forked from Ponysearch/Ponysearch
commit
04447f8c1a
5 changed files with 36 additions and 53 deletions
|
@ -125,12 +125,12 @@ class RawTextQuery:
|
||||||
searx_query_part = True
|
searx_query_part = True
|
||||||
engine_name = engine_shortcuts[prefix]
|
engine_name = engine_shortcuts[prefix]
|
||||||
if engine_name in engines:
|
if engine_name in engines:
|
||||||
self.enginerefs.append(EngineRef(engine_name, 'none', True))
|
self.enginerefs.append(EngineRef(engine_name, 'none'))
|
||||||
|
|
||||||
# check if prefix is equal with engine name
|
# check if prefix is equal with engine name
|
||||||
elif prefix in engines:
|
elif prefix in engines:
|
||||||
searx_query_part = True
|
searx_query_part = True
|
||||||
self.enginerefs.append(EngineRef(prefix, 'none', True))
|
self.enginerefs.append(EngineRef(prefix, 'none'))
|
||||||
|
|
||||||
# check if prefix is equal with categorie name
|
# check if prefix is equal with categorie name
|
||||||
elif prefix in categories:
|
elif prefix in categories:
|
||||||
|
|
|
@ -52,39 +52,36 @@ def initialize(settings_engines=None):
|
||||||
|
|
||||||
class EngineRef:
|
class EngineRef:
|
||||||
|
|
||||||
__slots__ = 'name', 'category', 'from_bang'
|
__slots__ = 'name', 'category'
|
||||||
|
|
||||||
def __init__(self, name: str, category: str, from_bang: bool=False):
|
def __init__(self, name: str, category: str):
|
||||||
self.name = name
|
self.name = name
|
||||||
self.category = category
|
self.category = category
|
||||||
self.from_bang = from_bang
|
|
||||||
|
|
||||||
def __repr__(self):
|
def __repr__(self):
|
||||||
return "EngineRef({!r}, {!r}, {!r})".format(self.name, self.category, self.from_bang)
|
return "EngineRef({!r}, {!r})".format(self.name, self.category)
|
||||||
|
|
||||||
def __eq__(self, other):
|
def __eq__(self, other):
|
||||||
return self.name == other.name and self.category == other.category and self.from_bang == other.from_bang
|
return self.name == other.name and self.category == other.category
|
||||||
|
|
||||||
|
|
||||||
class SearchQuery:
|
class SearchQuery:
|
||||||
"""container for all the search parameters (query, language, etc...)"""
|
"""container for all the search parameters (query, language, etc...)"""
|
||||||
|
|
||||||
__slots__ = 'query', 'engineref_list', 'categories', 'lang', 'safesearch', 'pageno', 'time_range',\
|
__slots__ = 'query', 'engineref_list', 'lang', 'safesearch', 'pageno', 'time_range',\
|
||||||
'timeout_limit', 'external_bang'
|
'timeout_limit', 'external_bang'
|
||||||
|
|
||||||
def __init__(self,
|
def __init__(self,
|
||||||
query: str,
|
query: str,
|
||||||
engineref_list: typing.List[EngineRef],
|
engineref_list: typing.List[EngineRef],
|
||||||
categories: typing.List[str],
|
lang: str='all',
|
||||||
lang: str,
|
safesearch: int=0,
|
||||||
safesearch: int,
|
pageno: int=1,
|
||||||
pageno: int,
|
time_range: typing.Optional[str]=None,
|
||||||
time_range: typing.Optional[str],
|
|
||||||
timeout_limit: typing.Optional[float]=None,
|
timeout_limit: typing.Optional[float]=None,
|
||||||
external_bang: typing.Optional[str]=None):
|
external_bang: typing.Optional[str]=None):
|
||||||
self.query = query
|
self.query = query
|
||||||
self.engineref_list = engineref_list
|
self.engineref_list = engineref_list
|
||||||
self.categories = categories
|
|
||||||
self.lang = lang
|
self.lang = lang
|
||||||
self.safesearch = safesearch
|
self.safesearch = safesearch
|
||||||
self.pageno = pageno
|
self.pageno = pageno
|
||||||
|
@ -92,15 +89,18 @@ class SearchQuery:
|
||||||
self.timeout_limit = timeout_limit
|
self.timeout_limit = timeout_limit
|
||||||
self.external_bang = external_bang
|
self.external_bang = external_bang
|
||||||
|
|
||||||
|
@property
|
||||||
|
def categories(self):
|
||||||
|
return list(set(map(lambda engineref: engineref.category, self.engineref_list)))
|
||||||
|
|
||||||
def __repr__(self):
|
def __repr__(self):
|
||||||
return "SearchQuery({!r}, {!r}, {!r}, {!r}, {!r}, {!r}, {!r}, {!r}, {!r})".\
|
return "SearchQuery({!r}, {!r}, {!r}, {!r}, {!r}, {!r}, {!r}, {!r})".\
|
||||||
format(self.query, self.engineref_list, self.categories, self.lang, self.safesearch,
|
format(self.query, self.engineref_list, self.lang, self.safesearch,
|
||||||
self.pageno, self.time_range, self.timeout_limit, self.external_bang)
|
self.pageno, self.time_range, self.timeout_limit, self.external_bang)
|
||||||
|
|
||||||
def __eq__(self, other):
|
def __eq__(self, other):
|
||||||
return self.query == other.query\
|
return self.query == other.query\
|
||||||
and self.engineref_list == other.engineref_list\
|
and self.engineref_list == other.engineref_list\
|
||||||
and self.categories == self.categories\
|
|
||||||
and self.lang == other.lang\
|
and self.lang == other.lang\
|
||||||
and self.safesearch == other.safesearch\
|
and self.safesearch == other.safesearch\
|
||||||
and self.pageno == other.pageno\
|
and self.pageno == other.pageno\
|
||||||
|
|
|
@ -109,18 +109,6 @@ def parse_timeout(form: Dict[str, str], raw_text_query: RawTextQuery) -> Optiona
|
||||||
raise SearxParameterException('timeout_limit', timeout_limit)
|
raise SearxParameterException('timeout_limit', timeout_limit)
|
||||||
|
|
||||||
|
|
||||||
def parse_specific(raw_text_query: RawTextQuery) -> Tuple[List[EngineRef], List[str]]:
|
|
||||||
query_engineref_list = raw_text_query.enginerefs
|
|
||||||
additional_categories = set()
|
|
||||||
for engineref in raw_text_query.enginerefs:
|
|
||||||
if engineref.from_bang:
|
|
||||||
additional_categories.add('none')
|
|
||||||
else:
|
|
||||||
additional_categories.add(engineref.category)
|
|
||||||
query_categories = list(additional_categories)
|
|
||||||
return query_engineref_list, query_categories
|
|
||||||
|
|
||||||
|
|
||||||
def parse_category_form(query_categories: List[str], name: str, value: str) -> None:
|
def parse_category_form(query_categories: List[str], name: str, value: str) -> None:
|
||||||
if name == 'categories':
|
if name == 'categories':
|
||||||
query_categories.extend(categ for categ in map(str.strip, value.split(',')) if categ in categories)
|
query_categories.extend(categ for categ in map(str.strip, value.split(',')) if categ in categories)
|
||||||
|
@ -171,8 +159,7 @@ def get_engineref_from_category_list(category_list: List[str], disabled_engines:
|
||||||
return result
|
return result
|
||||||
|
|
||||||
|
|
||||||
def parse_generic(preferences: Preferences, form: Dict[str, str], disabled_engines: List[str])\
|
def parse_generic(preferences: Preferences, form: Dict[str, str], disabled_engines: List[str]) -> List[EngineRef]:
|
||||||
-> Tuple[List[EngineRef], List[str]]:
|
|
||||||
query_engineref_list = []
|
query_engineref_list = []
|
||||||
query_categories = []
|
query_categories = []
|
||||||
|
|
||||||
|
@ -195,8 +182,6 @@ def parse_generic(preferences: Preferences, form: Dict[str, str], disabled_engin
|
||||||
if query_categories:
|
if query_categories:
|
||||||
# add engines from referenced by the "categories" parameter and the "category_*"" parameters
|
# add engines from referenced by the "categories" parameter and the "category_*"" parameters
|
||||||
query_engineref_list.extend(get_engineref_from_category_list(query_categories, disabled_engines))
|
query_engineref_list.extend(get_engineref_from_category_list(query_categories, disabled_engines))
|
||||||
# get categories from the query_engineref_list
|
|
||||||
query_categories = list(set(engine.category for engine in query_engineref_list))
|
|
||||||
else:
|
else:
|
||||||
# no "engines" parameters in the form
|
# no "engines" parameters in the form
|
||||||
if not query_categories:
|
if not query_categories:
|
||||||
|
@ -208,7 +193,7 @@ def parse_generic(preferences: Preferences, form: Dict[str, str], disabled_engin
|
||||||
# declared under the specific categories
|
# declared under the specific categories
|
||||||
query_engineref_list.extend(get_engineref_from_category_list(query_categories, disabled_engines))
|
query_engineref_list.extend(get_engineref_from_category_list(query_categories, disabled_engines))
|
||||||
|
|
||||||
return query_engineref_list, query_categories
|
return query_engineref_list
|
||||||
|
|
||||||
|
|
||||||
def get_search_query_from_webapp(preferences: Preferences, form: Dict[str, str])\
|
def get_search_query_from_webapp(preferences: Preferences, form: Dict[str, str])\
|
||||||
|
@ -236,20 +221,18 @@ def get_search_query_from_webapp(preferences: Preferences, form: Dict[str, str])
|
||||||
if not is_locked('categories') and raw_text_query.enginerefs and raw_text_query.specific:
|
if not is_locked('categories') and raw_text_query.enginerefs and raw_text_query.specific:
|
||||||
# if engines are calculated from query,
|
# if engines are calculated from query,
|
||||||
# set categories by using that informations
|
# set categories by using that informations
|
||||||
query_engineref_list, query_categories = parse_specific(raw_text_query)
|
query_engineref_list = raw_text_query.enginerefs
|
||||||
else:
|
else:
|
||||||
# otherwise, using defined categories to
|
# otherwise, using defined categories to
|
||||||
# calculate which engines should be used
|
# calculate which engines should be used
|
||||||
query_engineref_list, query_categories = parse_generic(preferences, form, disabled_engines)
|
query_engineref_list = parse_generic(preferences, form, disabled_engines)
|
||||||
|
|
||||||
query_engineref_list = deduplicate_engineref_list(query_engineref_list)
|
query_engineref_list = deduplicate_engineref_list(query_engineref_list)
|
||||||
query_engineref_list, query_engineref_list_unknown, query_engineref_list_notoken =\
|
query_engineref_list, query_engineref_list_unknown, query_engineref_list_notoken =\
|
||||||
validate_engineref_list(query_engineref_list, preferences)
|
validate_engineref_list(query_engineref_list, preferences)
|
||||||
|
|
||||||
return (SearchQuery(query, query_engineref_list, query_categories,
|
return (SearchQuery(query, query_engineref_list, query_lang, query_safesearch, query_pageno,
|
||||||
query_lang, query_safesearch, query_pageno,
|
query_time_range, query_timeout, external_bang=external_bang),
|
||||||
query_time_range, query_timeout,
|
|
||||||
external_bang=external_bang),
|
|
||||||
raw_text_query,
|
raw_text_query,
|
||||||
query_engineref_list_unknown,
|
query_engineref_list_unknown,
|
||||||
query_engineref_list_notoken)
|
query_engineref_list_notoken)
|
||||||
|
|
|
@ -23,13 +23,13 @@ TEST_ENGINES = [
|
||||||
class SearchQueryTestCase(SearxTestCase):
|
class SearchQueryTestCase(SearxTestCase):
|
||||||
|
|
||||||
def test_repr(self):
|
def test_repr(self):
|
||||||
s = SearchQuery('test', [EngineRef('bing', 'general', False)], ['general'], 'all', 0, 1, '1', 5.0, 'g')
|
s = SearchQuery('test', [EngineRef('bing', 'general')], 'all', 0, 1, '1', 5.0, 'g')
|
||||||
self.assertEqual(repr(s),
|
self.assertEqual(repr(s),
|
||||||
"SearchQuery('test', [EngineRef('bing', 'general', False)], ['general'], 'all', 0, 1, '1', 5.0, 'g')") # noqa
|
"SearchQuery('test', [EngineRef('bing', 'general')], 'all', 0, 1, '1', 5.0, 'g')") # noqa
|
||||||
|
|
||||||
def test_eq(self):
|
def test_eq(self):
|
||||||
s = SearchQuery('test', [EngineRef('bing', 'general', False)], ['general'], 'all', 0, 1, None, None, None)
|
s = SearchQuery('test', [EngineRef('bing', 'general')], 'all', 0, 1, None, None, None)
|
||||||
t = SearchQuery('test', [EngineRef('google', 'general', False)], ['general'], 'all', 0, 1, None, None, None)
|
t = SearchQuery('test', [EngineRef('google', 'general')], 'all', 0, 1, None, None, None)
|
||||||
self.assertEqual(s, s)
|
self.assertEqual(s, s)
|
||||||
self.assertNotEqual(s, t)
|
self.assertNotEqual(s, t)
|
||||||
|
|
||||||
|
@ -43,7 +43,7 @@ class SearchTestCase(SearxTestCase):
|
||||||
def test_timeout_simple(self):
|
def test_timeout_simple(self):
|
||||||
searx.search.max_request_timeout = None
|
searx.search.max_request_timeout = None
|
||||||
search_query = SearchQuery('test', [EngineRef(PUBLIC_ENGINE_NAME, 'general')],
|
search_query = SearchQuery('test', [EngineRef(PUBLIC_ENGINE_NAME, 'general')],
|
||||||
['general'], 'en-US', SAFESEARCH, PAGENO, None, None)
|
'en-US', SAFESEARCH, PAGENO, None, None)
|
||||||
search = searx.search.Search(search_query)
|
search = searx.search.Search(search_query)
|
||||||
search.search()
|
search.search()
|
||||||
self.assertEqual(search.actual_timeout, 3.0)
|
self.assertEqual(search.actual_timeout, 3.0)
|
||||||
|
@ -51,7 +51,7 @@ class SearchTestCase(SearxTestCase):
|
||||||
def test_timeout_query_above_default_nomax(self):
|
def test_timeout_query_above_default_nomax(self):
|
||||||
searx.search.max_request_timeout = None
|
searx.search.max_request_timeout = None
|
||||||
search_query = SearchQuery('test', [EngineRef(PUBLIC_ENGINE_NAME, 'general')],
|
search_query = SearchQuery('test', [EngineRef(PUBLIC_ENGINE_NAME, 'general')],
|
||||||
['general'], 'en-US', SAFESEARCH, PAGENO, None, 5.0)
|
'en-US', SAFESEARCH, PAGENO, None, 5.0)
|
||||||
search = searx.search.Search(search_query)
|
search = searx.search.Search(search_query)
|
||||||
search.search()
|
search.search()
|
||||||
self.assertEqual(search.actual_timeout, 3.0)
|
self.assertEqual(search.actual_timeout, 3.0)
|
||||||
|
@ -59,7 +59,7 @@ class SearchTestCase(SearxTestCase):
|
||||||
def test_timeout_query_below_default_nomax(self):
|
def test_timeout_query_below_default_nomax(self):
|
||||||
searx.search.max_request_timeout = None
|
searx.search.max_request_timeout = None
|
||||||
search_query = SearchQuery('test', [EngineRef(PUBLIC_ENGINE_NAME, 'general')],
|
search_query = SearchQuery('test', [EngineRef(PUBLIC_ENGINE_NAME, 'general')],
|
||||||
['general'], 'en-US', SAFESEARCH, PAGENO, None, 1.0)
|
'en-US', SAFESEARCH, PAGENO, None, 1.0)
|
||||||
search = searx.search.Search(search_query)
|
search = searx.search.Search(search_query)
|
||||||
search.search()
|
search.search()
|
||||||
self.assertEqual(search.actual_timeout, 1.0)
|
self.assertEqual(search.actual_timeout, 1.0)
|
||||||
|
@ -67,7 +67,7 @@ class SearchTestCase(SearxTestCase):
|
||||||
def test_timeout_query_below_max(self):
|
def test_timeout_query_below_max(self):
|
||||||
searx.search.max_request_timeout = 10.0
|
searx.search.max_request_timeout = 10.0
|
||||||
search_query = SearchQuery('test', [EngineRef(PUBLIC_ENGINE_NAME, 'general')],
|
search_query = SearchQuery('test', [EngineRef(PUBLIC_ENGINE_NAME, 'general')],
|
||||||
['general'], 'en-US', SAFESEARCH, PAGENO, None, 5.0)
|
'en-US', SAFESEARCH, PAGENO, None, 5.0)
|
||||||
search = searx.search.Search(search_query)
|
search = searx.search.Search(search_query)
|
||||||
search.search()
|
search.search()
|
||||||
self.assertEqual(search.actual_timeout, 5.0)
|
self.assertEqual(search.actual_timeout, 5.0)
|
||||||
|
@ -75,7 +75,7 @@ class SearchTestCase(SearxTestCase):
|
||||||
def test_timeout_query_above_max(self):
|
def test_timeout_query_above_max(self):
|
||||||
searx.search.max_request_timeout = 10.0
|
searx.search.max_request_timeout = 10.0
|
||||||
search_query = SearchQuery('test', [EngineRef(PUBLIC_ENGINE_NAME, 'general')],
|
search_query = SearchQuery('test', [EngineRef(PUBLIC_ENGINE_NAME, 'general')],
|
||||||
['general'], 'en-US', SAFESEARCH, PAGENO, None, 15.0)
|
'en-US', SAFESEARCH, PAGENO, None, 15.0)
|
||||||
search = searx.search.Search(search_query)
|
search = searx.search.Search(search_query)
|
||||||
search.search()
|
search.search()
|
||||||
self.assertEqual(search.actual_timeout, 10.0)
|
self.assertEqual(search.actual_timeout, 10.0)
|
||||||
|
@ -83,7 +83,7 @@ class SearchTestCase(SearxTestCase):
|
||||||
def test_external_bang(self):
|
def test_external_bang(self):
|
||||||
search_query = SearchQuery('yes yes',
|
search_query = SearchQuery('yes yes',
|
||||||
[EngineRef(PUBLIC_ENGINE_NAME, 'general')],
|
[EngineRef(PUBLIC_ENGINE_NAME, 'general')],
|
||||||
['general'], 'en-US', SAFESEARCH, PAGENO, None, None,
|
'en-US', SAFESEARCH, PAGENO, None, None,
|
||||||
external_bang="yt")
|
external_bang="yt")
|
||||||
search = searx.search.Search(search_query)
|
search = searx.search.Search(search_query)
|
||||||
results = search.search()
|
results = search.search()
|
||||||
|
@ -92,7 +92,7 @@ class SearchTestCase(SearxTestCase):
|
||||||
|
|
||||||
search_query = SearchQuery('youtube never gonna give you up',
|
search_query = SearchQuery('youtube never gonna give you up',
|
||||||
[EngineRef(PUBLIC_ENGINE_NAME, 'general')],
|
[EngineRef(PUBLIC_ENGINE_NAME, 'general')],
|
||||||
['general'], 'en-US', SAFESEARCH, PAGENO, None, None)
|
'en-US', SAFESEARCH, PAGENO, None, None)
|
||||||
|
|
||||||
search = searx.search.Search(search_query)
|
search = searx.search.Search(search_query)
|
||||||
results = search.search()
|
results = search.search()
|
||||||
|
|
|
@ -101,8 +101,8 @@ class StandaloneSearx(SearxTestCase):
|
||||||
args = sas.parse_argument(['rain', ])
|
args = sas.parse_argument(['rain', ])
|
||||||
search_q = sas.get_search_query(args)
|
search_q = sas.get_search_query(args)
|
||||||
self.assertTrue(search_q)
|
self.assertTrue(search_q)
|
||||||
self.assertEqual(search_q, SearchQuery('rain', [EngineRef('engine1', 'general', False)],
|
self.assertEqual(search_q, SearchQuery('rain', [EngineRef('engine1', 'general')],
|
||||||
['general'], 'all', 0, 1, None, None, None))
|
'all', 0, 1, None, None, None))
|
||||||
|
|
||||||
def test_no_parsed_url(self):
|
def test_no_parsed_url(self):
|
||||||
"""test no_parsed_url func"""
|
"""test no_parsed_url func"""
|
||||||
|
|
Loading…
Reference in a new issue