forked from Ponysearch/Ponysearch
[fix] proper search timeout handling
This commit is contained in:
parent
f5aec98cbd
commit
a5324d9d82
1 changed files with 19 additions and 6 deletions
|
@ -21,6 +21,8 @@ import re
|
||||||
from itertools import izip_longest, chain
|
from itertools import izip_longest, chain
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
from operator import itemgetter
|
from operator import itemgetter
|
||||||
|
from Queue import Queue
|
||||||
|
from time import time
|
||||||
from urlparse import urlparse, unquote
|
from urlparse import urlparse, unquote
|
||||||
from searx.engines import (
|
from searx.engines import (
|
||||||
categories, engines
|
categories, engines
|
||||||
|
@ -34,6 +36,8 @@ number_of_searches = 0
|
||||||
|
|
||||||
|
|
||||||
def threaded_requests(requests):
|
def threaded_requests(requests):
|
||||||
|
timeout_limit = max(r[2]['timeout'] for r in requests)
|
||||||
|
search_start = time()
|
||||||
for fn, url, request_args in requests:
|
for fn, url, request_args in requests:
|
||||||
th = threading.Thread(
|
th = threading.Thread(
|
||||||
target=fn,
|
target=fn,
|
||||||
|
@ -45,7 +49,11 @@ def threaded_requests(requests):
|
||||||
|
|
||||||
for th in threading.enumerate():
|
for th in threading.enumerate():
|
||||||
if th.name == 'search_request':
|
if th.name == 'search_request':
|
||||||
th.join()
|
remaining_time = max(0.0, timeout_limit - (time() - search_start))
|
||||||
|
th.join(remaining_time)
|
||||||
|
if th.isAlive():
|
||||||
|
print('engine timeout')
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
# get default reqest parameter
|
# get default reqest parameter
|
||||||
|
@ -56,7 +64,7 @@ def default_request_params():
|
||||||
|
|
||||||
# create a callback wrapper for the search engine results
|
# create a callback wrapper for the search engine results
|
||||||
def make_callback(engine_name,
|
def make_callback(engine_name,
|
||||||
results,
|
results_queue,
|
||||||
suggestions,
|
suggestions,
|
||||||
answers,
|
answers,
|
||||||
infoboxes,
|
infoboxes,
|
||||||
|
@ -74,7 +82,7 @@ def make_callback(engine_name,
|
||||||
except Exception, e:
|
except Exception, e:
|
||||||
# increase errors stats
|
# increase errors stats
|
||||||
engines[engine_name].stats['errors'] += 1
|
engines[engine_name].stats['errors'] += 1
|
||||||
results[engine_name] = cb_res
|
results_queue.put_nowait((engine_name, cb_res))
|
||||||
|
|
||||||
# print engine name and specific error message
|
# print engine name and specific error message
|
||||||
print '[E] Error with engine "{0}":\n\t{1}'.format(
|
print '[E] Error with engine "{0}":\n\t{1}'.format(
|
||||||
|
@ -104,7 +112,7 @@ def make_callback(engine_name,
|
||||||
# append result
|
# append result
|
||||||
cb_res.append(result)
|
cb_res.append(result)
|
||||||
|
|
||||||
results[engine_name] = cb_res
|
results_queue.put_nowait((engine_name, cb_res))
|
||||||
|
|
||||||
# update stats with current page-load-time
|
# update stats with current page-load-time
|
||||||
engines[engine_name].stats['page_load_time'] += \
|
engines[engine_name].stats['page_load_time'] += \
|
||||||
|
@ -420,7 +428,7 @@ class Search(object):
|
||||||
|
|
||||||
# init vars
|
# init vars
|
||||||
requests = []
|
requests = []
|
||||||
results = {}
|
results_queue = Queue()
|
||||||
suggestions = set()
|
suggestions = set()
|
||||||
answers = set()
|
answers = set()
|
||||||
infoboxes = []
|
infoboxes = []
|
||||||
|
@ -468,7 +476,7 @@ class Search(object):
|
||||||
# create a callback wrapper for the search engine results
|
# create a callback wrapper for the search engine results
|
||||||
callback = make_callback(
|
callback = make_callback(
|
||||||
selected_engine['name'],
|
selected_engine['name'],
|
||||||
results,
|
results_queue,
|
||||||
suggestions,
|
suggestions,
|
||||||
answers,
|
answers,
|
||||||
infoboxes,
|
infoboxes,
|
||||||
|
@ -502,6 +510,11 @@ class Search(object):
|
||||||
# send all search-request
|
# send all search-request
|
||||||
threaded_requests(requests)
|
threaded_requests(requests)
|
||||||
|
|
||||||
|
results = {}
|
||||||
|
while not results_queue.empty():
|
||||||
|
engine_name, engine_results = results_queue.get_nowait()
|
||||||
|
results[engine_name] = engine_results
|
||||||
|
|
||||||
# update engine-specific stats
|
# update engine-specific stats
|
||||||
for engine_name, engine_results in results.items():
|
for engine_name, engine_results in results.items():
|
||||||
engines[engine_name].stats['search_count'] += 1
|
engines[engine_name].stats['search_count'] += 1
|
||||||
|
|
Loading…
Reference in a new issue