2013-10-14 23:09:13 +02:00
|
|
|
#!/usr/bin/env python
|
|
|
|
|
2013-10-15 00:33:18 +02:00
|
|
|
'''
|
|
|
|
searx is free software: you can redistribute it and/or modify
|
|
|
|
it under the terms of the GNU Affero General Public License as published by
|
|
|
|
the Free Software Foundation, either version 3 of the License, or
|
|
|
|
(at your option) any later version.
|
|
|
|
|
|
|
|
searx is distributed in the hope that it will be useful,
|
|
|
|
but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
|
|
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
|
|
GNU Affero General Public License for more details.
|
|
|
|
|
|
|
|
You should have received a copy of the GNU Affero General Public License
|
|
|
|
along with searx. If not, see < http://www.gnu.org/licenses/ >.
|
|
|
|
|
|
|
|
(C) 2013- by Adam Tauber, <asciimoo@gmail.com>
|
|
|
|
'''
|
|
|
|
|
2013-12-01 23:52:49 +01:00
|
|
|
import os
|
2013-10-14 23:09:13 +02:00
|
|
|
if __name__ == "__main__":
|
|
|
|
from sys import path
|
2013-12-01 23:52:49 +01:00
|
|
|
path.append(os.path.realpath(os.path.dirname(os.path.realpath(__file__))+'/../'))
|
2013-10-14 23:09:13 +02:00
|
|
|
|
2014-01-01 23:04:13 +01:00
|
|
|
from flask import Flask, request, render_template, url_for, Response, make_response, redirect
|
2013-11-04 00:21:27 +01:00
|
|
|
from searx.engines import search, categories, engines, get_engines_stats
|
2013-10-19 16:18:41 +02:00
|
|
|
from searx import settings
|
2013-10-17 00:30:41 +02:00
|
|
|
import json
|
2013-11-15 18:55:18 +01:00
|
|
|
import cStringIO
|
|
|
|
from searx.utils import UnicodeWriter
|
2013-12-01 23:52:49 +01:00
|
|
|
from flask import send_from_directory
|
|
|
|
|
2013-10-14 23:09:13 +02:00
|
|
|
|
|
|
|
|
|
|
|
app = Flask(__name__)
|
2013-10-19 16:18:41 +02:00
|
|
|
app.secret_key = settings.secret_key
|
2013-10-14 23:09:13 +02:00
|
|
|
|
2013-10-16 00:01:08 +02:00
|
|
|
opensearch_xml = '''<?xml version="1.0" encoding="utf-8"?>
|
|
|
|
<OpenSearchDescription xmlns="http://a9.com/-/spec/opensearch/1.1/">
|
|
|
|
<ShortName>searx</ShortName>
|
|
|
|
<Description>Search searx</Description>
|
|
|
|
<InputEncoding>UTF-8</InputEncoding>
|
|
|
|
<LongName>searx meta search engine</LongName>
|
2013-10-20 22:37:55 +02:00
|
|
|
<Url type="text/html" method="{method}" template="{host}">
|
2013-10-16 00:01:08 +02:00
|
|
|
<Param name="q" value="{{searchTerms}}" />
|
|
|
|
</Url>
|
|
|
|
</OpenSearchDescription>
|
|
|
|
'''
|
|
|
|
|
2013-10-15 20:50:12 +02:00
|
|
|
def render(template_name, **kwargs):
|
2013-10-17 21:06:28 +02:00
|
|
|
global categories
|
2013-10-19 20:45:48 +02:00
|
|
|
kwargs['categories'] = sorted(categories.keys())
|
2013-10-17 21:06:28 +02:00
|
|
|
if not 'selected_categories' in kwargs:
|
2013-10-17 21:46:35 +02:00
|
|
|
kwargs['selected_categories'] = []
|
|
|
|
cookie_categories = request.cookies.get('categories', '').split(',')
|
|
|
|
for ccateg in cookie_categories:
|
|
|
|
if ccateg in categories:
|
|
|
|
kwargs['selected_categories'].append(ccateg)
|
|
|
|
if not len(kwargs['selected_categories']):
|
|
|
|
kwargs['selected_categories'] = ['general']
|
2013-10-15 20:50:12 +02:00
|
|
|
return render_template(template_name, **kwargs)
|
|
|
|
|
2013-11-04 00:21:27 +01:00
|
|
|
def parse_query(query):
|
|
|
|
query_engines = []
|
|
|
|
query_parts = query.split()
|
|
|
|
if query_parts[0].startswith('-') and query_parts[0][1:] in engines:
|
|
|
|
query_engines.append({'category': 'TODO', 'name': query_parts[0][1:]})
|
|
|
|
query = query.replace(query_parts[0], '', 1).strip()
|
|
|
|
return query, query_engines
|
|
|
|
|
2013-10-14 23:09:13 +02:00
|
|
|
@app.route('/', methods=['GET', 'POST'])
|
|
|
|
def index():
|
2013-10-17 21:06:28 +02:00
|
|
|
global categories
|
2013-11-15 19:28:30 +01:00
|
|
|
|
2013-10-14 23:09:13 +02:00
|
|
|
if request.method=='POST':
|
2013-10-20 22:21:34 +02:00
|
|
|
request_data = request.form
|
|
|
|
else:
|
|
|
|
request_data = request.args
|
2013-10-22 18:57:20 +02:00
|
|
|
if not request_data.get('q'):
|
|
|
|
return render('index.html')
|
2013-11-04 00:18:07 +01:00
|
|
|
|
2013-10-22 18:57:20 +02:00
|
|
|
selected_categories = []
|
2013-11-04 00:18:07 +01:00
|
|
|
|
2013-11-04 00:21:27 +01:00
|
|
|
query, selected_engines = parse_query(request_data['q'].encode('utf-8'))
|
|
|
|
|
|
|
|
if not len(selected_engines):
|
|
|
|
for pd_name,pd in request_data.items():
|
|
|
|
if pd_name.startswith('category_'):
|
|
|
|
category = pd_name[9:]
|
|
|
|
if not category in categories:
|
|
|
|
continue
|
|
|
|
selected_categories.append(category)
|
|
|
|
if not len(selected_categories):
|
|
|
|
cookie_categories = request.cookies.get('categories', '').split(',')
|
|
|
|
for ccateg in cookie_categories:
|
|
|
|
if ccateg in categories:
|
|
|
|
selected_categories.append(ccateg)
|
|
|
|
if not len(selected_categories):
|
|
|
|
selected_categories = ['general']
|
|
|
|
|
|
|
|
for categ in selected_categories:
|
|
|
|
selected_engines.extend({'category': categ, 'name': x.name} for x in categories[categ])
|
2013-11-04 00:18:07 +01:00
|
|
|
|
2013-11-13 19:32:46 +01:00
|
|
|
results, suggestions = search(query, request, selected_engines)
|
2013-11-15 19:28:30 +01:00
|
|
|
|
2013-10-25 02:14:26 +02:00
|
|
|
for result in results:
|
|
|
|
if len(result['url']) > 74:
|
|
|
|
result['pretty_url'] = result['url'][:35] + '[..]' + result['url'][-35:]
|
|
|
|
else:
|
2014-01-05 22:10:46 +01:00
|
|
|
result['pretty_url'] = result['url']
|
2013-11-15 19:28:30 +01:00
|
|
|
|
2013-10-22 18:57:20 +02:00
|
|
|
if request_data.get('format') == 'json':
|
2013-10-23 21:45:00 +02:00
|
|
|
return Response(json.dumps({'query': query, 'results': results}), mimetype='application/json')
|
2013-11-15 18:55:18 +01:00
|
|
|
elif request_data.get('format') == 'csv':
|
|
|
|
csv = UnicodeWriter(cStringIO.StringIO())
|
2013-12-02 21:36:09 +01:00
|
|
|
keys = ('title', 'url', 'content', 'host', 'engine', 'score')
|
2013-11-15 18:55:18 +01:00
|
|
|
if len(results):
|
|
|
|
csv.writerow(keys)
|
|
|
|
for row in results:
|
2013-12-02 21:36:09 +01:00
|
|
|
row['host'] = row['parsed_url'].netloc
|
|
|
|
csv.writerow([row.get(key, '') for key in keys])
|
2013-11-15 18:55:18 +01:00
|
|
|
csv.stream.seek(0)
|
2013-11-15 19:28:30 +01:00
|
|
|
response = Response(csv.stream.read(), mimetype='application/csv')
|
2013-12-02 21:36:09 +01:00
|
|
|
response.headers.add('Content-Disposition', 'attachment;Filename=searx_-_{0}.csv'.format('_'.join(query.split())))
|
2013-11-15 18:55:18 +01:00
|
|
|
return response
|
2013-11-15 19:28:30 +01:00
|
|
|
|
2014-01-01 22:16:53 +01:00
|
|
|
return render('results.html'
|
|
|
|
,results=results
|
|
|
|
,q=request_data['q']
|
|
|
|
,selected_categories=selected_categories
|
|
|
|
,number_of_results=len(results)
|
|
|
|
,suggestions=suggestions
|
|
|
|
)
|
|
|
|
|
2013-10-14 23:09:13 +02:00
|
|
|
|
2013-10-21 00:28:48 +02:00
|
|
|
@app.route('/about', methods=['GET'])
|
|
|
|
def about():
|
|
|
|
global categories
|
|
|
|
return render('about.html', categs=categories.items())
|
|
|
|
|
2014-01-01 22:16:53 +01:00
|
|
|
|
|
|
|
@app.route('/preferences', methods=['GET', 'POST'])
|
|
|
|
def preferences():
|
|
|
|
|
|
|
|
if request.method=='POST':
|
|
|
|
selected_categories = []
|
|
|
|
for pd_name,pd in request.form.items():
|
|
|
|
if pd_name.startswith('category_'):
|
|
|
|
category = pd_name[9:]
|
|
|
|
if not category in categories:
|
|
|
|
continue
|
|
|
|
selected_categories.append(category)
|
|
|
|
if selected_categories:
|
2014-01-01 23:04:13 +01:00
|
|
|
resp = make_response(redirect('/'))
|
2014-01-05 01:11:41 +01:00
|
|
|
# cookie max age: 4 weeks
|
|
|
|
resp.set_cookie('categories', ','.join(selected_categories), max_age=60*60*24*7*4)
|
2014-01-01 22:16:53 +01:00
|
|
|
return resp
|
|
|
|
return render('preferences.html')
|
|
|
|
|
|
|
|
|
2013-10-27 01:03:05 +02:00
|
|
|
@app.route('/stats', methods=['GET'])
|
|
|
|
def stats():
|
|
|
|
global categories
|
|
|
|
stats = get_engines_stats()
|
|
|
|
return render('stats.html', stats=stats)
|
|
|
|
|
2014-01-01 22:16:53 +01:00
|
|
|
|
2013-12-01 16:10:38 +01:00
|
|
|
@app.route('/robots.txt', methods=['GET'])
|
|
|
|
def robots():
|
|
|
|
return Response("""User-agent: *
|
|
|
|
Allow: /
|
|
|
|
Allow: /about
|
|
|
|
Disallow: /stats
|
|
|
|
""", mimetype='text/plain')
|
|
|
|
|
2014-01-01 22:16:53 +01:00
|
|
|
|
2013-10-16 00:01:08 +02:00
|
|
|
@app.route('/opensearch.xml', methods=['GET'])
|
|
|
|
def opensearch():
|
|
|
|
global opensearch_xml
|
2013-10-20 22:37:55 +02:00
|
|
|
method = 'post'
|
2013-10-30 16:30:55 +01:00
|
|
|
scheme = 'http'
|
2013-10-21 00:28:48 +02:00
|
|
|
# chrome/chromium only supports HTTP GET....
|
2013-10-20 22:37:55 +02:00
|
|
|
if request.headers.get('User-Agent', '').lower().find('webkit') >= 0:
|
|
|
|
method = 'get'
|
2013-10-30 23:01:58 +01:00
|
|
|
if request.is_secure:
|
2013-10-30 16:30:55 +01:00
|
|
|
scheme = 'https'
|
2013-12-31 02:09:24 +01:00
|
|
|
if settings.base_url:
|
|
|
|
hostname = settings.base_url
|
2013-12-26 01:53:26 +01:00
|
|
|
else:
|
|
|
|
hostname = url_for('index', _external=True, _scheme=scheme)
|
|
|
|
ret = opensearch_xml.format(method=method, host=hostname)
|
2013-10-16 00:01:08 +02:00
|
|
|
resp = Response(response=ret,
|
|
|
|
status=200,
|
|
|
|
mimetype="application/xml")
|
|
|
|
return resp
|
|
|
|
|
2013-12-01 23:52:49 +01:00
|
|
|
@app.route('/favicon.ico')
|
|
|
|
def favicon():
|
|
|
|
return send_from_directory(os.path.join(app.root_path, 'static/img'),
|
|
|
|
'favicon.png', mimetype='image/vnd.microsoft.icon')
|
|
|
|
|
|
|
|
|
2013-10-14 23:09:13 +02:00
|
|
|
if __name__ == "__main__":
|
|
|
|
from gevent import monkey
|
|
|
|
monkey.patch_all()
|
|
|
|
|
2013-10-19 16:18:41 +02:00
|
|
|
app.run(debug = settings.debug
|
|
|
|
,use_debugger = settings.debug
|
|
|
|
,port = settings.port
|
2013-10-14 23:09:13 +02:00
|
|
|
)
|