Ponysearch/searx/webapp.py

334 lines
11 KiB
Python
Raw Normal View History

2013-10-14 23:09:13 +02:00
#!/usr/bin/env python
2013-10-15 00:33:18 +02:00
'''
searx is free software: you can redistribute it and/or modify
it under the terms of the GNU Affero General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
searx is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Affero General Public License for more details.
You should have received a copy of the GNU Affero General Public License
along with searx. If not, see < http://www.gnu.org/licenses/ >.
(C) 2013- by Adam Tauber, <asciimoo@gmail.com>
'''
2014-01-21 21:28:54 +01:00
import json
import cStringIO
import os
2014-01-20 02:31:20 +01:00
from flask import Flask, request, render_template
from flask import url_for, Response, make_response, redirect
from flask import send_from_directory
from searx import settings
2013-11-04 00:21:27 +01:00
from searx.engines import search, categories, engines, get_engines_stats
2013-11-15 18:55:18 +01:00
from searx.utils import UnicodeWriter
2014-01-10 23:38:08 +01:00
from searx.utils import highlight_content, html_to_text
2013-12-01 23:52:49 +01:00
from flask.ext.babel import Babel
2013-10-14 23:09:13 +02:00
app = Flask(
__name__,
static_folder=os.path.join(os.path.dirname(__file__), 'static'),
template_folder=os.path.join(os.path.dirname(__file__), 'templates')
)
app.secret_key = settings['server']['secret_key']
2014-01-14 18:17:19 +01:00
babel = Babel(app)
2014-01-19 23:04:09 +01:00
#TODO configurable via settings.yml
favicons = ['wikipedia', 'youtube', 'vimeo', 'soundcloud',
'twitter', 'stackoverflow', 'github']
2013-10-16 00:01:08 +02:00
opensearch_xml = '''<?xml version="1.0" encoding="utf-8"?>
<OpenSearchDescription xmlns="http://a9.com/-/spec/opensearch/1.1/">
<ShortName>searx</ShortName>
<Description>Search searx</Description>
<InputEncoding>UTF-8</InputEncoding>
<LongName>searx meta search engine</LongName>
2013-10-20 22:37:55 +02:00
<Url type="text/html" method="{method}" template="{host}">
2013-10-16 00:01:08 +02:00
<Param name="q" value="{{searchTerms}}" />
</Url>
</OpenSearchDescription>
'''
2014-01-14 18:17:19 +01:00
@babel.localeselector
def get_locale():
2014-01-22 00:59:18 +01:00
locale = request.accept_languages.best_match(settings['locales'].keys())
if request.cookies.get('locale', '') in settings['locales']:
locale = request.cookies.get('locale', '')
if 'locale' in request.args\
and request.args['locale'] in settings['locales']:
locale = request.args['locale']
if 'locale' in request.form\
and request.form['locale'] in settings['locales']:
locale = request.form['locale']
return locale
2014-01-14 18:17:19 +01:00
def get_base_url():
if settings['server']['base_url']:
hostname = settings['server']['base_url']
2014-01-14 18:17:19 +01:00
else:
scheme = 'http'
if request.is_secure:
scheme = 'https'
hostname = url_for('index', _external=True, _scheme=scheme)
return hostname
2013-10-15 20:50:12 +02:00
def render(template_name, **kwargs):
2013-10-17 21:06:28 +02:00
global categories
2014-01-17 17:06:54 +01:00
kwargs['categories'] = ['general']
2014-01-20 02:31:20 +01:00
kwargs['categories'].extend(x for x in
sorted(categories.keys()) if x != 'general')
2013-10-17 21:06:28 +02:00
if not 'selected_categories' in kwargs:
kwargs['selected_categories'] = []
cookie_categories = request.cookies.get('categories', '').split(',')
for ccateg in cookie_categories:
if ccateg in categories:
kwargs['selected_categories'].append(ccateg)
if not len(kwargs['selected_categories']):
kwargs['selected_categories'] = ['general']
2013-10-15 20:50:12 +02:00
return render_template(template_name, **kwargs)
2013-11-04 00:21:27 +01:00
def parse_query(query):
query_engines = []
query_parts = query.split()
if query_parts[0].startswith('-'):
engine_name = query_parts[0][1:].replace('_', ' ')
if engine_name in engines:
query_engines.append({'category': 'none',
'name': query_parts[0][1:]})
elif query_parts[0].startswith('!'):
category_name = query_parts[0][1:].replace('_', ' ')
if category_name in categories:
query_engines.extend({'category': category_name,
'name': engine.name}
for engine in categories[category_name])
if len(query_engines):
2013-11-04 00:21:27 +01:00
query = query.replace(query_parts[0], '', 1).strip()
return query, query_engines
2014-01-14 18:17:19 +01:00
2014-01-14 18:19:21 +01:00
@app.route('/', methods=['GET', 'POST'])
2013-10-14 23:09:13 +02:00
def index():
2014-01-29 20:52:04 +01:00
paging = False
2013-11-15 19:28:30 +01:00
if request.method == 'POST':
2013-10-20 22:21:34 +02:00
request_data = request.form
else:
request_data = request.args
2013-10-22 18:57:20 +02:00
if not request_data.get('q'):
return render('index.html')
2013-11-04 00:18:07 +01:00
2014-01-29 20:52:04 +01:00
pageno_param = request_data.get('pageno', '1')
if not pageno_param.isdigit() or int(pageno_param) < 1:
return render('index.html')
pageno = int(pageno_param)
2013-10-22 18:57:20 +02:00
selected_categories = []
2013-11-04 00:18:07 +01:00
2013-11-04 00:21:27 +01:00
query, selected_engines = parse_query(request_data['q'].encode('utf-8'))
if len(selected_engines):
selected_categories = list(set(engine['category']
for engine in selected_engines))
else:
for pd_name, pd in request_data.items():
2013-11-04 00:21:27 +01:00
if pd_name.startswith('category_'):
category = pd_name[9:]
if not category in categories:
continue
selected_categories.append(category)
if not len(selected_categories):
2014-01-20 02:31:20 +01:00
cookie_categories = request.cookies.get('categories', '')
cookie_categories = cookie_categories.split(',')
2013-11-04 00:21:27 +01:00
for ccateg in cookie_categories:
if ccateg in categories:
selected_categories.append(ccateg)
if not len(selected_categories):
selected_categories = ['general']
for categ in selected_categories:
2014-01-20 02:31:20 +01:00
selected_engines.extend({'category': categ,
'name': x.name}
for x in categories[categ])
2013-11-04 00:18:07 +01:00
2014-01-29 20:52:04 +01:00
results, suggestions = search(query, request, selected_engines, pageno)
2013-11-15 19:28:30 +01:00
2013-10-25 02:14:26 +02:00
for result in results:
2014-01-29 20:52:04 +01:00
if not paging and engines[result['engine']].paging:
paging = True
2014-01-10 23:38:08 +01:00
if request_data.get('format', 'html') == 'html':
if 'content' in result:
result['content'] = highlight_content(result['content'], query)
result['title'] = highlight_content(result['title'], query)
else:
if 'content' in result:
result['content'] = html_to_text(result['content']).strip()
result['title'] = html_to_text(result['title']).strip()
2013-10-25 02:14:26 +02:00
if len(result['url']) > 74:
2014-01-20 02:31:20 +01:00
url_parts = result['url'][:35], result['url'][-35:]
result['pretty_url'] = '{0}[...]{1}'.format(*url_parts)
2013-10-25 02:14:26 +02:00
else:
result['pretty_url'] = result['url']
2013-11-15 19:28:30 +01:00
for engine in result['engines']:
2014-01-19 23:04:09 +01:00
if engine in favicons:
result['favicon'] = engine
2013-10-22 18:57:20 +02:00
if request_data.get('format') == 'json':
2014-01-20 02:31:20 +01:00
return Response(json.dumps({'query': query, 'results': results}),
mimetype='application/json')
2013-11-15 18:55:18 +01:00
elif request_data.get('format') == 'csv':
csv = UnicodeWriter(cStringIO.StringIO())
2013-12-02 21:36:09 +01:00
keys = ('title', 'url', 'content', 'host', 'engine', 'score')
2013-11-15 18:55:18 +01:00
if len(results):
csv.writerow(keys)
for row in results:
2013-12-02 21:36:09 +01:00
row['host'] = row['parsed_url'].netloc
csv.writerow([row.get(key, '') for key in keys])
2013-11-15 18:55:18 +01:00
csv.stream.seek(0)
2013-11-15 19:28:30 +01:00
response = Response(csv.stream.read(), mimetype='application/csv')
2014-01-20 02:31:20 +01:00
content_disp = 'attachment;Filename=searx_-_{0}.csv'.format(query)
response.headers.add('Content-Disposition', content_disp)
2013-11-15 18:55:18 +01:00
return response
2014-01-14 18:17:19 +01:00
elif request_data.get('format') == 'rss':
response_rss = render(
'opensearch_response_rss.xml',
results=results,
q=request_data['q'],
number_of_results=len(results),
base_url=get_base_url()
)
2014-01-14 22:18:21 +01:00
return Response(response_rss, mimetype='text/xml')
2014-01-14 18:17:19 +01:00
return render(
'results.html',
results=results,
q=request_data['q'],
selected_categories=selected_categories,
2014-01-29 20:52:04 +01:00
paging=paging,
pageno=pageno,
suggestions=suggestions
)
2014-01-01 22:16:53 +01:00
2013-10-14 23:09:13 +02:00
2013-10-21 00:28:48 +02:00
@app.route('/about', methods=['GET'])
def about():
2014-01-17 16:23:23 +01:00
return render('about.html')
@app.route('/engines', methods=['GET'])
def list_engines():
2013-10-21 00:28:48 +02:00
global categories
2014-01-17 16:23:23 +01:00
return render('engines.html', categs=categories.items())
2013-10-21 00:28:48 +02:00
2014-01-01 22:16:53 +01:00
@app.route('/preferences', methods=['GET', 'POST'])
def preferences():
if request.method == 'POST':
2014-01-01 22:16:53 +01:00
selected_categories = []
2014-01-22 00:59:18 +01:00
locale = None
for pd_name, pd in request.form.items():
2014-01-01 22:16:53 +01:00
if pd_name.startswith('category_'):
category = pd_name[9:]
if not category in categories:
continue
selected_categories.append(category)
2014-01-22 00:59:18 +01:00
elif pd_name == 'locale' and pd in settings['locales']:
locale = pd
resp = make_response(redirect('/'))
if locale:
# cookie max age: 4 weeks
resp.set_cookie(
'locale', locale,
max_age=60 * 60 * 24 * 7 * 4
)
2014-01-01 22:16:53 +01:00
if selected_categories:
# cookie max age: 4 weeks
resp.set_cookie(
'categories', ','.join(selected_categories),
max_age=60 * 60 * 24 * 7 * 4
)
2014-01-22 00:59:18 +01:00
return resp
2014-01-22 01:20:38 +01:00
return render('preferences.html',
locales=settings['locales'],
current_locale=get_locale())
2014-01-01 22:16:53 +01:00
2013-10-27 01:03:05 +02:00
@app.route('/stats', methods=['GET'])
def stats():
global categories
stats = get_engines_stats()
return render('stats.html', stats=stats)
2014-01-01 22:16:53 +01:00
2013-12-01 16:10:38 +01:00
@app.route('/robots.txt', methods=['GET'])
def robots():
return Response("""User-agent: *
Allow: /
Allow: /about
Disallow: /stats
2014-01-20 01:38:17 +01:00
Disallow: /engines
2013-12-01 16:10:38 +01:00
""", mimetype='text/plain')
2014-01-01 22:16:53 +01:00
2013-10-16 00:01:08 +02:00
@app.route('/opensearch.xml', methods=['GET'])
def opensearch():
global opensearch_xml
2013-10-20 22:37:55 +02:00
method = 'post'
2013-10-21 00:28:48 +02:00
# chrome/chromium only supports HTTP GET....
2013-10-20 22:37:55 +02:00
if request.headers.get('User-Agent', '').lower().find('webkit') >= 0:
method = 'get'
2014-01-14 18:17:19 +01:00
base_url = get_base_url()
ret = opensearch_xml.format(method=method, host=base_url)
2013-10-16 00:01:08 +02:00
resp = Response(response=ret,
2014-01-20 02:31:20 +01:00
status=200,
mimetype="application/xml")
2013-10-16 00:01:08 +02:00
return resp
2013-12-01 23:52:49 +01:00
@app.route('/favicon.ico')
def favicon():
return send_from_directory(os.path.join(app.root_path, 'static/img'),
2014-01-20 02:31:20 +01:00
'favicon.png',
mimetype='image/vnd.microsoft.icon')
2013-12-01 23:52:49 +01:00
def run():
2013-10-14 23:09:13 +02:00
from gevent import monkey
monkey.patch_all()
app.run(
debug=settings['server']['debug'],
use_debugger=settings['server']['debug'],
port=settings['server']['port']
)
if __name__ == "__main__":
run()