forked from Ponysearch/Ponysearch
Merge pull request #551 from dalf/fix-results-lock
[fix] searx.results: fix pylint issue "useless-with-lock"
This commit is contained in:
commit
27c1b9f660
1 changed files with 16 additions and 13 deletions
|
@ -145,7 +145,8 @@ class ResultContainer:
|
||||||
"""docstring for ResultContainer"""
|
"""docstring for ResultContainer"""
|
||||||
|
|
||||||
__slots__ = '_merged_results', 'infoboxes', 'suggestions', 'answers', 'corrections', '_number_of_results',\
|
__slots__ = '_merged_results', 'infoboxes', 'suggestions', 'answers', 'corrections', '_number_of_results',\
|
||||||
'_closed', 'paging', 'unresponsive_engines', 'timings', 'redirect_url', 'engine_data', 'on_result'
|
'_closed', 'paging', 'unresponsive_engines', 'timings', 'redirect_url', 'engine_data', 'on_result',\
|
||||||
|
'_lock'
|
||||||
|
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
super().__init__()
|
super().__init__()
|
||||||
|
@ -162,6 +163,7 @@ class ResultContainer:
|
||||||
self.timings = []
|
self.timings = []
|
||||||
self.redirect_url = None
|
self.redirect_url = None
|
||||||
self.on_result = lambda _: True
|
self.on_result = lambda _: True
|
||||||
|
self._lock = RLock()
|
||||||
|
|
||||||
def extend(self, engine_name, results):
|
def extend(self, engine_name, results):
|
||||||
if self._closed:
|
if self._closed:
|
||||||
|
@ -216,6 +218,7 @@ class ResultContainer:
|
||||||
infobox['engines'] = set([infobox['engine']])
|
infobox['engines'] = set([infobox['engine']])
|
||||||
if infobox_id is not None:
|
if infobox_id is not None:
|
||||||
parsed_url_infobox_id = urlparse(infobox_id)
|
parsed_url_infobox_id = urlparse(infobox_id)
|
||||||
|
with self._lock:
|
||||||
for existingIndex in self.infoboxes:
|
for existingIndex in self.infoboxes:
|
||||||
if compare_urls(urlparse(existingIndex.get('id', '')), parsed_url_infobox_id):
|
if compare_urls(urlparse(existingIndex.get('id', '')), parsed_url_infobox_id):
|
||||||
merge_two_infoboxes(existingIndex, infobox)
|
merge_two_infoboxes(existingIndex, infobox)
|
||||||
|
@ -262,6 +265,7 @@ class ResultContainer:
|
||||||
|
|
||||||
def __merge_url_result(self, result, position):
|
def __merge_url_result(self, result, position):
|
||||||
result['engines'] = set([result['engine']])
|
result['engines'] = set([result['engine']])
|
||||||
|
with self._lock:
|
||||||
duplicated = self.__find_duplicated_http_result(result)
|
duplicated = self.__find_duplicated_http_result(result)
|
||||||
if duplicated:
|
if duplicated:
|
||||||
self.__merge_duplicated_http_result(duplicated, result, position)
|
self.__merge_duplicated_http_result(duplicated, result, position)
|
||||||
|
@ -269,7 +273,6 @@ class ResultContainer:
|
||||||
|
|
||||||
# if there is no duplicate found, append result
|
# if there is no duplicate found, append result
|
||||||
result['positions'] = [position]
|
result['positions'] = [position]
|
||||||
with RLock():
|
|
||||||
self._merged_results.append(result)
|
self._merged_results.append(result)
|
||||||
|
|
||||||
def __find_duplicated_http_result(self, result):
|
def __find_duplicated_http_result(self, result):
|
||||||
|
@ -314,7 +317,7 @@ class ResultContainer:
|
||||||
def __merge_result_no_url(self, result, position):
|
def __merge_result_no_url(self, result, position):
|
||||||
result['engines'] = set([result['engine']])
|
result['engines'] = set([result['engine']])
|
||||||
result['positions'] = [position]
|
result['positions'] = [position]
|
||||||
with RLock():
|
with self._lock:
|
||||||
self._merged_results.append(result)
|
self._merged_results.append(result)
|
||||||
|
|
||||||
def close(self):
|
def close(self):
|
||||||
|
|
Loading…
Reference in a new issue