Ponysearch/searx/engines/__init__.py

283 lines
11 KiB
Python
Raw Normal View History

2013-10-14 23:09:13 +02:00
2013-10-17 00:32:32 +02:00
'''
searx is free software: you can redistribute it and/or modify
it under the terms of the GNU Affero General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
searx is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Affero General Public License for more details.
You should have received a copy of the GNU Affero General Public License
along with searx. If not, see < http://www.gnu.org/licenses/ >.
(C) 2013- by Adam Tauber, <asciimoo@gmail.com>
'''
2013-10-14 23:09:13 +02:00
from os.path import realpath, dirname, splitext, join
from imp import load_source
2013-10-15 18:19:06 +02:00
import grequests
2013-10-15 23:20:26 +02:00
from itertools import izip_longest, chain
from operator import itemgetter
2013-10-19 17:36:44 +02:00
from urlparse import urlparse
2013-10-19 19:01:06 +02:00
from searx import settings
2013-10-23 23:54:46 +02:00
import ConfigParser
import sys
import re
from datetime import datetime
2013-10-14 23:09:13 +02:00
engine_dir = dirname(realpath(__file__))
2013-10-23 23:54:46 +02:00
searx_dir = join(engine_dir, '../../')
engines_config = ConfigParser.SafeConfigParser()
engines_config.read(join(searx_dir, 'engines.cfg'))
2013-10-27 13:55:18 +01:00
number_of_searches = 0
2013-10-14 23:09:13 +02:00
2013-10-15 19:11:43 +02:00
engines = {}
2013-10-14 23:09:13 +02:00
2013-10-17 21:06:28 +02:00
categories = {'general': []}
2013-10-23 23:54:46 +02:00
def load_module(filename):
2013-10-19 19:01:06 +02:00
modname = splitext(filename)[0]
2013-10-23 23:54:46 +02:00
if modname in sys.modules:
del sys.modules[modname]
2013-10-14 23:09:13 +02:00
filepath = join(engine_dir, filename)
2013-10-23 23:54:46 +02:00
module = load_source(modname, filepath)
module.name = modname
return module
2013-10-26 18:44:58 +02:00
if not engines_config.sections():
print '[E] Error no engines found. Edit your engines.cfg'
exit(2)
for engine_config_name in engines_config.sections():
engine_data = engines_config.options(engine_config_name)
engine = load_module(engines_config.get(engine_config_name, 'engine')+'.py')
engine.name = engine_config_name
2013-10-23 23:54:46 +02:00
for param_name in engine_data:
if param_name == 'engine':
continue
if param_name == 'categories':
if engines_config.get(engine_config_name, param_name) == 'none':
engine.categories = []
else:
engine.categories = map(str.strip, engines_config.get(engine_config_name, param_name).split(','))
2013-10-23 23:54:46 +02:00
continue
setattr(engine, param_name, engines_config.get(engine_config_name, param_name))
2013-10-25 23:41:14 +02:00
for engine_attr in dir(engine):
if engine_attr.startswith('_'):
continue
if getattr(engine, engine_attr) == None:
print '[E] Engine config error: Missing attribute "{0}.{1}"'.format(engine.name, engine_attr)
sys.exit(1)
2013-10-23 23:54:46 +02:00
engines[engine.name] = engine
engine.stats = {'result_count': 0, 'search_count': 0, 'page_load_time': 0, 'score_count': 0, 'errors': 0}
2013-10-23 23:54:46 +02:00
if hasattr(engine, 'categories'):
2013-10-17 21:06:28 +02:00
for category_name in engine.categories:
categories.setdefault(category_name, []).append(engine)
2013-10-23 23:54:46 +02:00
else:
categories['general'].append(engine)
2013-10-15 18:19:06 +02:00
def default_request_params():
2013-10-19 22:34:46 +02:00
return {'method': 'GET', 'headers': {}, 'data': {}, 'url': '', 'cookies': {}}
2013-10-15 18:19:06 +02:00
2013-11-13 19:32:46 +01:00
def make_callback(engine_name, results, suggestions, callback, params):
# creating a callback wrapper for the search engine results
2013-10-15 18:19:06 +02:00
def process_callback(response, **kwargs):
2013-10-15 23:20:26 +02:00
cb_res = []
2013-10-26 02:21:55 +02:00
response.search_params = params
engines[engine_name].stats['page_load_time'] += (datetime.now() - params['started']).total_seconds()
try:
search_results = callback(response)
except Exception, e:
engines[engine_name].stats['errors'] += 1
results[engine_name] = cb_res
print '[E] Error with engine "{0}":\n\t{1}'.format(engine_name, str(e))
return
for result in search_results:
2013-10-15 19:11:43 +02:00
result['engine'] = engine_name
2013-11-13 19:32:46 +01:00
if 'suggestion' in result:
# TODO type checks
suggestions.add(result['suggestion'])
continue
2013-10-15 23:20:26 +02:00
cb_res.append(result)
results[engine_name] = cb_res
2013-10-15 18:19:06 +02:00
return process_callback
def highlight_content(content, query):
2013-12-16 14:06:53 +01:00
if not content:
return None
# ignoring html contents
# TODO better html content detection
if content.find('<') != -1:
return content
2013-11-19 11:27:17 +01:00
query = query.decode('utf-8')
if content.lower().find(query.lower()) > -1:
2013-11-19 11:27:17 +01:00
query_regex = u'({0})'.format(re.escape(query))
content = re.sub(query_regex, '<b>\\1</b>', content, flags=re.I | re.U)
else:
2013-11-10 21:47:42 +01:00
regex_parts = []
for chunk in query.split():
if len(chunk) == 1:
2013-11-19 11:27:17 +01:00
regex_parts.append(u'\W+{0}\W+'.format(re.escape(chunk)))
else:
2013-11-19 11:27:17 +01:00
regex_parts.append(u'{0}'.format(re.escape(chunk)))
query_regex = u'({0})'.format('|'.join(regex_parts))
content = re.sub(query_regex, '<b>\\1</b>', content, flags=re.I | re.U)
return content
2013-11-12 21:34:28 +01:00
def score_results(results):
flat_res = filter(None, chain.from_iterable(izip_longest(*results.values())))
flat_len = len(flat_res)
engines_len = len(results)
results = []
# deduplication + scoring
for i,res in enumerate(flat_res):
res['parsed_url'] = urlparse(res['url'])
res['engines'] = [res['engine']]
weight = 1.0
if hasattr(engines[res['engine']], 'weight'):
weight = float(engines[res['engine']].weight)
elif res['engine'] in settings.weights:
weight = float(settings.weights[res['engine']])
score = int((flat_len - i)/engines_len)*weight+1
duplicated = False
for new_res in results:
p1 = res['parsed_url'].path[:-1] if res['parsed_url'].path.endswith('/') else res['parsed_url'].path
p2 = new_res['parsed_url'].path[:-1] if new_res['parsed_url'].path.endswith('/') else new_res['parsed_url'].path
if res['parsed_url'].netloc == new_res['parsed_url'].netloc and\
p1 == p2 and\
res['parsed_url'].query == new_res['parsed_url'].query and\
res.get('template') == new_res.get('template'):
duplicated = new_res
break
if duplicated:
if len(res.get('content', '')) > len(duplicated.get('content', '')):
duplicated['content'] = res['content']
duplicated['score'] += score
duplicated['engines'].append(res['engine'])
if duplicated['parsed_url'].scheme == 'https':
continue
elif res['parsed_url'].scheme == 'https':
duplicated['url'] = res['parsed_url'].geturl()
duplicated['parsed_url'] = res['parsed_url']
else:
res['score'] = score
results.append(res)
return sorted(results, key=itemgetter('score'), reverse=True)
2013-11-04 00:18:07 +01:00
def search(query, request, selected_engines):
2013-10-27 13:55:18 +01:00
global engines, categories, number_of_searches
2013-10-15 18:19:06 +02:00
requests = []
2013-10-15 23:20:26 +02:00
results = {}
2013-11-13 19:32:46 +01:00
suggestions = set()
2013-10-27 13:55:18 +01:00
number_of_searches += 1
2013-10-15 18:19:06 +02:00
user_agent = request.headers.get('User-Agent', '')
2013-11-03 00:40:45 +01:00
2013-10-22 18:57:20 +02:00
for selected_engine in selected_engines:
if selected_engine['name'] not in engines:
2013-10-15 22:18:08 +02:00
continue
2013-11-03 00:40:45 +01:00
2013-10-22 18:57:20 +02:00
engine = engines[selected_engine['name']]
2013-11-03 00:40:45 +01:00
2013-10-20 20:20:10 +02:00
request_params = default_request_params()
request_params['headers']['User-Agent'] = user_agent
2013-10-22 18:57:20 +02:00
request_params['category'] = selected_engine['category']
request_params['started'] = datetime.now()
2013-10-20 20:20:10 +02:00
request_params = engine.request(query, request_params)
2013-11-03 00:40:45 +01:00
2013-11-13 19:32:46 +01:00
callback = make_callback(selected_engine['name'], results, suggestions, engine.response, request_params)
2013-11-03 00:40:45 +01:00
request_args = dict(headers = request_params['headers']
,hooks = dict(response=callback)
,cookies = request_params['cookies']
,timeout = settings.request_timeout
)
2013-10-15 18:19:06 +02:00
if request_params['method'] == 'GET':
2013-11-03 00:40:45 +01:00
req = grequests.get
2013-10-15 18:19:06 +02:00
else:
2013-11-03 00:40:45 +01:00
req = grequests.post
request_args['data'] = request_params['data']
2013-11-04 21:06:53 +01:00
# ignoring empty urls
if not request_params['url']:
continue
2013-11-03 00:40:45 +01:00
requests.append(req(request_params['url'], **request_args))
2013-10-15 18:19:06 +02:00
grequests.map(requests)
2013-10-27 01:02:28 +02:00
for engine_name,engine_results in results.items():
engines[engine_name].stats['search_count'] += 1
engines[engine_name].stats['result_count'] += len(engine_results)
2013-11-12 21:34:28 +01:00
results = score_results(results)
2013-10-27 14:39:23 +01:00
for result in results:
if 'content' in result:
result['content'] = highlight_content(result['content'], query)
2013-10-27 14:39:23 +01:00
for res_engine in result['engines']:
engines[result['engine']].stats['score_count'] += result['score']
2013-11-13 19:32:46 +01:00
return results, suggestions
2013-10-27 01:02:28 +02:00
def get_engines_stats():
2014-01-05 23:13:53 +01:00
# TODO refactor
2013-10-27 02:50:24 +02:00
pageloads = []
results = []
2013-10-27 14:21:23 +01:00
scores = []
errors = []
2014-01-05 23:13:53 +01:00
scores_per_result = []
2013-10-27 01:02:28 +02:00
2014-01-05 23:13:53 +01:00
max_pageload = max_results = max_score = max_errors = max_score_per_result = 0
2013-10-27 01:02:28 +02:00
for engine in engines.values():
if engine.stats['search_count'] == 0:
continue
results_num = engine.stats['result_count']/float(engine.stats['search_count'])
load_times = engine.stats['page_load_time']/float(engine.stats['search_count'])
2013-10-27 14:21:23 +01:00
if results_num:
2013-10-27 14:48:16 +01:00
score = engine.stats['score_count'] / float(engine.stats['search_count'])
2014-01-05 23:13:53 +01:00
score_per_result = score / results_num
2013-10-27 14:21:23 +01:00
else:
2014-01-05 23:13:53 +01:00
score = score_per_result = 0.0
2013-10-27 02:50:24 +02:00
max_results = max(results_num, max_results)
max_pageload = max(load_times, max_pageload)
2013-10-27 14:21:23 +01:00
max_score = max(score, max_score)
2014-01-05 23:13:53 +01:00
max_score_per_result = max(score_per_result, max_score_per_result)
max_errors = max(max_errors, engine.stats['errors'])
2013-10-27 02:50:24 +02:00
pageloads.append({'avg': load_times, 'name': engine.name})
results.append({'avg': results_num, 'name': engine.name})
2013-10-27 14:21:23 +01:00
scores.append({'avg': score, 'name': engine.name})
errors.append({'avg': engine.stats['errors'], 'name': engine.name})
2014-01-05 23:13:53 +01:00
scores_per_result.append({'avg': score_per_result, 'name': engine.name})
2013-10-27 02:50:24 +02:00
for engine in pageloads:
engine['percentage'] = int(engine['avg']/max_pageload*100)
for engine in results:
engine['percentage'] = int(engine['avg']/max_results*100)
2013-10-27 14:21:23 +01:00
for engine in scores:
engine['percentage'] = int(engine['avg']/max_score*100)
2014-01-05 23:13:53 +01:00
for engine in scores_per_result:
engine['percentage'] = int(engine['avg']/max_score_per_result*100)
for engine in errors:
if max_errors:
2014-01-05 01:00:32 +01:00
engine['percentage'] = int(float(engine['avg'])/max_errors*100)
else:
engine['percentage'] = 0
2013-10-27 01:02:28 +02:00
2013-10-27 14:55:47 +01:00
return [('Page loads (sec)', sorted(pageloads, key=itemgetter('avg')))
2013-10-27 02:50:24 +02:00
,('Number of results', sorted(results, key=itemgetter('avg'), reverse=True))
2013-10-27 14:21:23 +01:00
,('Scores', sorted(scores, key=itemgetter('avg'), reverse=True))
2014-01-05 23:13:53 +01:00
,('Scores per result', sorted(scores_per_result, key=itemgetter('avg'), reverse=True))
,('Errors', sorted(errors, key=itemgetter('avg'), reverse=True))
2013-10-27 02:50:24 +02:00
]