forked from Ponysearch/Ponysearch
commit
219af243a2
29 changed files with 74 additions and 94 deletions
14
Makefile
14
Makefile
|
@ -13,6 +13,8 @@ include utils/makefile.include
|
||||||
PYOBJECTS = searx
|
PYOBJECTS = searx
|
||||||
DOC = docs
|
DOC = docs
|
||||||
PY_SETUP_EXTRAS ?= \[test\]
|
PY_SETUP_EXTRAS ?= \[test\]
|
||||||
|
PYLINT_SEARX_DISABLE_OPTION := I,C,R,W0105,W0212,W0511,W0603,W0613,W0621,W0702,W0703,W1401
|
||||||
|
PYLINT_ADDITIONAL_BUILTINS_FOR_ENGINES := supported_languages,language_aliases
|
||||||
|
|
||||||
include utils/makefile.python
|
include utils/makefile.python
|
||||||
include utils/makefile.sphinx
|
include utils/makefile.sphinx
|
||||||
|
@ -210,8 +212,8 @@ gecko.driver:
|
||||||
PHONY += test test.sh test.pylint test.pep8 test.unit test.coverage test.robot
|
PHONY += test test.sh test.pylint test.pep8 test.unit test.coverage test.robot
|
||||||
test: buildenv test.pylint test.pep8 test.unit gecko.driver test.robot
|
test: buildenv test.pylint test.pep8 test.unit gecko.driver test.robot
|
||||||
|
|
||||||
# TODO: balance linting with pylint
|
|
||||||
|
|
||||||
|
# TODO: balance linting with pylint
|
||||||
test.pylint: pyenvinstall
|
test.pylint: pyenvinstall
|
||||||
$(call cmd,pylint,\
|
$(call cmd,pylint,\
|
||||||
searx/preferences.py \
|
searx/preferences.py \
|
||||||
|
@ -219,6 +221,16 @@ test.pylint: pyenvinstall
|
||||||
searx/engines/gigablast.py \
|
searx/engines/gigablast.py \
|
||||||
searx/engines/deviantart.py \
|
searx/engines/deviantart.py \
|
||||||
)
|
)
|
||||||
|
$(call cmd,pylint,\
|
||||||
|
--disable=$(PYLINT_SEARX_DISABLE_OPTION) \
|
||||||
|
--additional-builtins=$(PYLINT_ADDITIONAL_BUILTINS_FOR_ENGINES) \
|
||||||
|
searx/engines \
|
||||||
|
)
|
||||||
|
$(call cmd,pylint,\
|
||||||
|
--disable=$(PYLINT_SEARX_DISABLE_OPTION) \
|
||||||
|
--ignore=searx/engines \
|
||||||
|
searx tests \
|
||||||
|
)
|
||||||
|
|
||||||
# ignored rules:
|
# ignored rules:
|
||||||
# E402 module level import not at top of file
|
# E402 module level import not at top of file
|
||||||
|
|
|
@ -72,7 +72,7 @@ def load_engine(engine_data):
|
||||||
|
|
||||||
try:
|
try:
|
||||||
engine = load_module(engine_module + '.py', engine_dir)
|
engine = load_module(engine_module + '.py', engine_dir)
|
||||||
except (SyntaxError, KeyboardInterrupt, SystemExit, SystemError, ImportError, RuntimeError) as e:
|
except (SyntaxError, KeyboardInterrupt, SystemExit, SystemError, ImportError, RuntimeError):
|
||||||
logger.exception('Fatal exception in engine "{}"'.format(engine_module))
|
logger.exception('Fatal exception in engine "{}"'.format(engine_module))
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
except:
|
except:
|
||||||
|
@ -234,7 +234,7 @@ def get_engines_stats(preferences):
|
||||||
results = to_percentage(results, max_results)
|
results = to_percentage(results, max_results)
|
||||||
scores = to_percentage(scores, max_score)
|
scores = to_percentage(scores, max_score)
|
||||||
scores_per_result = to_percentage(scores_per_result, max_score_per_result)
|
scores_per_result = to_percentage(scores_per_result, max_score_per_result)
|
||||||
erros = to_percentage(errors, max_errors)
|
errors = to_percentage(errors, max_errors)
|
||||||
|
|
||||||
return [
|
return [
|
||||||
(
|
(
|
||||||
|
|
|
@ -41,7 +41,6 @@ def response(resp):
|
||||||
# defaults
|
# defaults
|
||||||
filesize = 0
|
filesize = 0
|
||||||
magnet_link = "magnet:?xt=urn:btih:{}&tr=http://tracker.acgsou.com:2710/announce"
|
magnet_link = "magnet:?xt=urn:btih:{}&tr=http://tracker.acgsou.com:2710/announce"
|
||||||
torrent_link = ""
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
category = extract_text(result.xpath(xpath_category)[0])
|
category = extract_text(result.xpath(xpath_category)[0])
|
||||||
|
|
|
@ -80,10 +80,7 @@ def response(resp):
|
||||||
|
|
||||||
date = datetime.now() # needed in case no dcdate is available for an item
|
date = datetime.now() # needed in case no dcdate is available for an item
|
||||||
for item in entry:
|
for item in entry:
|
||||||
if item.attrib["name"] == "dchdate":
|
if item.attrib["name"] == "dcdate":
|
||||||
harvestDate = item.text
|
|
||||||
|
|
||||||
elif item.attrib["name"] == "dcdate":
|
|
||||||
date = item.text
|
date = item.text
|
||||||
|
|
||||||
elif item.attrib["name"] == "dctitle":
|
elif item.attrib["name"] == "dctitle":
|
||||||
|
|
|
@ -18,7 +18,7 @@ from json import loads
|
||||||
from searx.utils import match_language
|
from searx.utils import match_language
|
||||||
|
|
||||||
from searx.engines.bing import language_aliases
|
from searx.engines.bing import language_aliases
|
||||||
from searx.engines.bing import _fetch_supported_languages, supported_languages_url # NOQA
|
from searx.engines.bing import _fetch_supported_languages, supported_languages_url # NOQA # pylint: disable=unused-import
|
||||||
|
|
||||||
# engine dependent config
|
# engine dependent config
|
||||||
categories = ['images']
|
categories = ['images']
|
||||||
|
|
|
@ -17,7 +17,7 @@ from urllib.parse import urlencode, urlparse, parse_qsl
|
||||||
from lxml import etree
|
from lxml import etree
|
||||||
from searx.utils import list_get, match_language
|
from searx.utils import list_get, match_language
|
||||||
from searx.engines.bing import language_aliases
|
from searx.engines.bing import language_aliases
|
||||||
from searx.engines.bing import _fetch_supported_languages, supported_languages_url # NOQA
|
from searx.engines.bing import _fetch_supported_languages, supported_languages_url # NOQA # pylint: disable=unused-import
|
||||||
|
|
||||||
# engine dependent config
|
# engine dependent config
|
||||||
categories = ['news']
|
categories = ['news']
|
||||||
|
|
|
@ -16,7 +16,7 @@ from urllib.parse import urlencode
|
||||||
from searx.utils import match_language
|
from searx.utils import match_language
|
||||||
|
|
||||||
from searx.engines.bing import language_aliases
|
from searx.engines.bing import language_aliases
|
||||||
from searx.engines.bing import _fetch_supported_languages, supported_languages_url # NOQA
|
from searx.engines.bing import _fetch_supported_languages, supported_languages_url # NOQA # pylint: disable=unused-import
|
||||||
|
|
||||||
categories = ['videos']
|
categories = ['videos']
|
||||||
paging = True
|
paging = True
|
||||||
|
|
|
@ -41,8 +41,6 @@ def request(query, params):
|
||||||
from_currency = name_to_iso4217(from_currency.strip())
|
from_currency = name_to_iso4217(from_currency.strip())
|
||||||
to_currency = name_to_iso4217(to_currency.strip())
|
to_currency = name_to_iso4217(to_currency.strip())
|
||||||
|
|
||||||
q = (from_currency + to_currency).upper()
|
|
||||||
|
|
||||||
params['url'] = url.format(from_currency, to_currency)
|
params['url'] = url.format(from_currency, to_currency)
|
||||||
params['amount'] = amount
|
params['amount'] = amount
|
||||||
params['from'] = from_currency
|
params['from'] = from_currency
|
||||||
|
|
|
@ -49,11 +49,11 @@ correction_xpath = '//div[@id="did_you_mean"]//a'
|
||||||
|
|
||||||
|
|
||||||
# match query's language to a region code that duckduckgo will accept
|
# match query's language to a region code that duckduckgo will accept
|
||||||
def get_region_code(lang, lang_list=[]):
|
def get_region_code(lang, lang_list=None):
|
||||||
if lang == 'all':
|
if lang == 'all':
|
||||||
return None
|
return None
|
||||||
|
|
||||||
lang_code = match_language(lang, lang_list, language_aliases, 'wt-WT')
|
lang_code = match_language(lang, lang_list or [], language_aliases, 'wt-WT')
|
||||||
lang_parts = lang_code.split('-')
|
lang_parts = lang_code.split('-')
|
||||||
|
|
||||||
# country code goes first
|
# country code goes first
|
||||||
|
|
|
@ -16,7 +16,7 @@ from lxml import html
|
||||||
from searx import logger
|
from searx import logger
|
||||||
from searx.data import WIKIDATA_UNITS
|
from searx.data import WIKIDATA_UNITS
|
||||||
from searx.engines.duckduckgo import language_aliases
|
from searx.engines.duckduckgo import language_aliases
|
||||||
from searx.engines.duckduckgo import _fetch_supported_languages, supported_languages_url # NOQA
|
from searx.engines.duckduckgo import _fetch_supported_languages, supported_languages_url # NOQA # pylint: disable=unused-import
|
||||||
from searx.utils import extract_text, html_to_text, match_language, get_string_replaces_function
|
from searx.utils import extract_text, html_to_text, match_language, get_string_replaces_function
|
||||||
from searx.external_urls import get_external_url, get_earth_coordinates_url, area_to_osm_zoom
|
from searx.external_urls import get_external_url, get_earth_coordinates_url, area_to_osm_zoom
|
||||||
|
|
||||||
|
|
|
@ -16,7 +16,7 @@
|
||||||
from json import loads
|
from json import loads
|
||||||
from urllib.parse import urlencode
|
from urllib.parse import urlencode
|
||||||
from searx.engines.duckduckgo import get_region_code
|
from searx.engines.duckduckgo import get_region_code
|
||||||
from searx.engines.duckduckgo import _fetch_supported_languages, supported_languages_url # NOQA
|
from searx.engines.duckduckgo import _fetch_supported_languages, supported_languages_url # NOQA # pylint: disable=unused-import
|
||||||
from searx.poolrequests import get
|
from searx.poolrequests import get
|
||||||
|
|
||||||
# engine dependent config
|
# engine dependent config
|
||||||
|
|
|
@ -8,7 +8,7 @@ search_url = url + '/search.php?{query}&size_i=0&size_f=100000000&engine_r=1&eng
|
||||||
paging = True
|
paging = True
|
||||||
|
|
||||||
|
|
||||||
class FilecropResultParser(HTMLParser):
|
class FilecropResultParser(HTMLParser): # pylint: disable=W0223 # (see https://bugs.python.org/issue31844)
|
||||||
|
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
HTMLParser.__init__(self)
|
HTMLParser.__init__(self)
|
||||||
|
|
|
@ -29,10 +29,7 @@ from lxml import html
|
||||||
from flask_babel import gettext
|
from flask_babel import gettext
|
||||||
from searx import logger
|
from searx import logger
|
||||||
from searx.utils import extract_text, eval_xpath
|
from searx.utils import extract_text, eval_xpath
|
||||||
from searx.engines.google import _fetch_supported_languages, supported_languages_url # NOQA
|
from searx.engines.google import _fetch_supported_languages, supported_languages_url # NOQA # pylint: disable=unused-import
|
||||||
|
|
||||||
# pylint: disable=unused-import
|
|
||||||
# pylint: enable=unused-import
|
|
||||||
|
|
||||||
from searx.engines.google import (
|
from searx.engines.google import (
|
||||||
get_lang_country,
|
get_lang_country,
|
||||||
|
|
|
@ -13,7 +13,7 @@
|
||||||
from urllib.parse import urlencode
|
from urllib.parse import urlencode
|
||||||
from lxml import html
|
from lxml import html
|
||||||
from searx.utils import match_language
|
from searx.utils import match_language
|
||||||
from searx.engines.google import _fetch_supported_languages, supported_languages_url # NOQA
|
from searx.engines.google import _fetch_supported_languages, supported_languages_url # NOQA # pylint: disable=unused-import
|
||||||
|
|
||||||
# search-url
|
# search-url
|
||||||
categories = ['news']
|
categories = ['news']
|
||||||
|
|
|
@ -90,6 +90,5 @@ def response(resp):
|
||||||
|
|
||||||
|
|
||||||
def _fetch_supported_languages(resp):
|
def _fetch_supported_languages(resp):
|
||||||
ret_val = {}
|
|
||||||
peertube_languages = list(loads(resp.text).keys())
|
peertube_languages = list(loads(resp.text).keys())
|
||||||
return peertube_languages
|
return peertube_languages
|
||||||
|
|
|
@ -23,7 +23,7 @@ from searx.data import WIKIDATA_UNITS
|
||||||
from searx.poolrequests import post, get
|
from searx.poolrequests import post, get
|
||||||
from searx.utils import match_language, searx_useragent, get_string_replaces_function
|
from searx.utils import match_language, searx_useragent, get_string_replaces_function
|
||||||
from searx.external_urls import get_external_url, get_earth_coordinates_url, area_to_osm_zoom
|
from searx.external_urls import get_external_url, get_earth_coordinates_url, area_to_osm_zoom
|
||||||
from searx.engines.wikipedia import _fetch_supported_languages, supported_languages_url # NOQA
|
from searx.engines.wikipedia import _fetch_supported_languages, supported_languages_url # NOQA # pylint: disable=unused-import
|
||||||
|
|
||||||
logger = logger.getChild('wikidata')
|
logger = logger.getChild('wikidata')
|
||||||
|
|
||||||
|
@ -228,7 +228,7 @@ def get_results(attribute_result, attributes, language):
|
||||||
# Should use normalized value p:P2046/psn:P2046/wikibase:quantityAmount
|
# Should use normalized value p:P2046/psn:P2046/wikibase:quantityAmount
|
||||||
area = attribute_result.get('P2046')
|
area = attribute_result.get('P2046')
|
||||||
osm_zoom = area_to_osm_zoom(area) if area else 19
|
osm_zoom = area_to_osm_zoom(area) if area else 19
|
||||||
url = attribute.get_str(attribute_result, language, osm_zoom=osm_zoom)
|
url = attribute.get_geo_url(attribute_result, osm_zoom=osm_zoom)
|
||||||
if url:
|
if url:
|
||||||
infobox_urls.append({'title': attribute.get_label(language),
|
infobox_urls.append({'title': attribute.get_label(language),
|
||||||
'url': url,
|
'url': url,
|
||||||
|
@ -546,7 +546,14 @@ class WDGeoAttribute(WDAttribute):
|
||||||
def get_group_by(self):
|
def get_group_by(self):
|
||||||
return self.get_select()
|
return self.get_select()
|
||||||
|
|
||||||
def get_str(self, result, language, osm_zoom=19):
|
def get_str(self, result, language):
|
||||||
|
latitude = result.get(self.name + 'Lat')
|
||||||
|
longitude = result.get(self.name + 'Long')
|
||||||
|
if latitude and longitude:
|
||||||
|
return latitude + ' ' + longitude
|
||||||
|
return None
|
||||||
|
|
||||||
|
def get_geo_url(self, result, osm_zoom=19):
|
||||||
latitude = result.get(self.name + 'Lat')
|
latitude = result.get(self.name + 'Lat')
|
||||||
longitude = result.get(self.name + 'Long')
|
longitude = result.get(self.name + 'Long')
|
||||||
if latitude and longitude:
|
if latitude and longitude:
|
||||||
|
|
|
@ -14,7 +14,7 @@ from datetime import datetime, timedelta
|
||||||
from urllib.parse import urlencode
|
from urllib.parse import urlencode
|
||||||
from lxml import html
|
from lxml import html
|
||||||
from searx.engines.yahoo import parse_url, language_aliases
|
from searx.engines.yahoo import parse_url, language_aliases
|
||||||
from searx.engines.yahoo import _fetch_supported_languages, supported_languages_url # NOQA
|
from searx.engines.yahoo import _fetch_supported_languages, supported_languages_url # NOQA # pylint: disable=unused-import
|
||||||
from dateutil import parser
|
from dateutil import parser
|
||||||
from searx.utils import extract_text, extract_url, match_language
|
from searx.utils import extract_text, extract_url, match_language
|
||||||
|
|
||||||
|
|
|
@ -158,8 +158,8 @@ def prepare_package_resources(pkg, name):
|
||||||
|
|
||||||
def sha_sum(filename):
|
def sha_sum(filename):
|
||||||
with open(filename, "rb") as f:
|
with open(filename, "rb") as f:
|
||||||
bytes = f.read()
|
file_content_bytes = f.read()
|
||||||
return sha256(bytes).hexdigest()
|
return sha256(file_content_bytes).hexdigest()
|
||||||
|
|
||||||
|
|
||||||
plugins = PluginStore()
|
plugins = PluginStore()
|
||||||
|
|
|
@ -298,7 +298,7 @@ class ResultContainer:
|
||||||
gresults = []
|
gresults = []
|
||||||
categoryPositions = {}
|
categoryPositions = {}
|
||||||
|
|
||||||
for i, res in enumerate(results):
|
for res in results:
|
||||||
# FIXME : handle more than one category per engine
|
# FIXME : handle more than one category per engine
|
||||||
res['category'] = engines[res['engine']].categories[0]
|
res['category'] = engines[res['engine']].categories[0]
|
||||||
|
|
||||||
|
|
|
@ -43,9 +43,8 @@ else:
|
||||||
logger.info('max_request_timeout={0} second(s)'.format(max_request_timeout))
|
logger.info('max_request_timeout={0} second(s)'.format(max_request_timeout))
|
||||||
else:
|
else:
|
||||||
logger.critical('outgoing.max_request_timeout if defined has to be float')
|
logger.critical('outgoing.max_request_timeout if defined has to be float')
|
||||||
from sys import exit
|
import sys
|
||||||
|
sys.exit(1)
|
||||||
exit(1)
|
|
||||||
|
|
||||||
|
|
||||||
class EngineRef:
|
class EngineRef:
|
||||||
|
|
|
@ -52,7 +52,7 @@ class HTMLTextExtractorException(Exception):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
class HTMLTextExtractor(HTMLParser):
|
class HTMLTextExtractor(HTMLParser): # pylint: disable=W0223 # (see https://bugs.python.org/issue31844)
|
||||||
|
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
HTMLParser.__init__(self)
|
HTMLParser.__init__(self)
|
||||||
|
@ -74,18 +74,18 @@ class HTMLTextExtractor(HTMLParser):
|
||||||
def is_valid_tag(self):
|
def is_valid_tag(self):
|
||||||
return not self.tags or self.tags[-1] not in blocked_tags
|
return not self.tags or self.tags[-1] not in blocked_tags
|
||||||
|
|
||||||
def handle_data(self, d):
|
def handle_data(self, data):
|
||||||
if not self.is_valid_tag():
|
if not self.is_valid_tag():
|
||||||
return
|
return
|
||||||
self.result.append(d)
|
self.result.append(data)
|
||||||
|
|
||||||
def handle_charref(self, number):
|
def handle_charref(self, name):
|
||||||
if not self.is_valid_tag():
|
if not self.is_valid_tag():
|
||||||
return
|
return
|
||||||
if number[0] in ('x', 'X'):
|
if name[0] in ('x', 'X'):
|
||||||
codepoint = int(number[1:], 16)
|
codepoint = int(name[1:], 16)
|
||||||
else:
|
else:
|
||||||
codepoint = int(number)
|
codepoint = int(name)
|
||||||
self.result.append(chr(codepoint))
|
self.result.append(chr(codepoint))
|
||||||
|
|
||||||
def handle_entityref(self, name):
|
def handle_entityref(self, name):
|
||||||
|
@ -380,7 +380,7 @@ def _get_lang_to_lc_dict(lang_list):
|
||||||
return value
|
return value
|
||||||
|
|
||||||
|
|
||||||
def _match_language(lang_code, lang_list=[], custom_aliases={}):
|
def _match_language(lang_code, lang_list=[], custom_aliases={}): # pylint: disable=W0102
|
||||||
"""auxiliary function to match lang_code in lang_list"""
|
"""auxiliary function to match lang_code in lang_list"""
|
||||||
# replace language code with a custom alias if necessary
|
# replace language code with a custom alias if necessary
|
||||||
if lang_code in custom_aliases:
|
if lang_code in custom_aliases:
|
||||||
|
@ -403,7 +403,7 @@ def _match_language(lang_code, lang_list=[], custom_aliases={}):
|
||||||
return _get_lang_to_lc_dict(lang_list).get(lang_code, None)
|
return _get_lang_to_lc_dict(lang_list).get(lang_code, None)
|
||||||
|
|
||||||
|
|
||||||
def match_language(locale_code, lang_list=[], custom_aliases={}, fallback='en-US'):
|
def match_language(locale_code, lang_list=[], custom_aliases={}, fallback='en-US'): # pylint: disable=W0102
|
||||||
"""get the language code from lang_list that best matches locale_code"""
|
"""get the language code from lang_list that best matches locale_code"""
|
||||||
# try to get language from given locale_code
|
# try to get language from given locale_code
|
||||||
language = _match_language(locale_code, lang_list, custom_aliases)
|
language = _match_language(locale_code, lang_list, custom_aliases)
|
||||||
|
|
|
@ -466,7 +466,7 @@ def pre_request():
|
||||||
else:
|
else:
|
||||||
try:
|
try:
|
||||||
preferences.parse_dict(request.form)
|
preferences.parse_dict(request.form)
|
||||||
except Exception as e:
|
except Exception:
|
||||||
logger.exception('invalid settings')
|
logger.exception('invalid settings')
|
||||||
request.errors.append(gettext('Invalid settings'))
|
request.errors.append(gettext('Invalid settings'))
|
||||||
|
|
||||||
|
@ -819,7 +819,6 @@ def preferences():
|
||||||
|
|
||||||
# render preferences
|
# render preferences
|
||||||
image_proxy = request.preferences.get_value('image_proxy')
|
image_proxy = request.preferences.get_value('image_proxy')
|
||||||
lang = request.preferences.get_value('language')
|
|
||||||
disabled_engines = request.preferences.engines.get_disabled()
|
disabled_engines = request.preferences.engines.get_disabled()
|
||||||
allowed_plugins = request.preferences.plugins.get_enabled()
|
allowed_plugins = request.preferences.plugins.get_enabled()
|
||||||
|
|
||||||
|
|
|
@ -1,23 +0,0 @@
|
||||||
# -*- coding: utf-8 -*-
|
|
||||||
|
|
||||||
import os
|
|
||||||
import unittest2 as unittest
|
|
||||||
from plone.testing import layered
|
|
||||||
from robotsuite import RobotTestSuite
|
|
||||||
from searx.testing import SEARXROBOTLAYER
|
|
||||||
|
|
||||||
|
|
||||||
def test_suite():
|
|
||||||
suite = unittest.TestSuite()
|
|
||||||
current_dir = os.path.abspath(os.path.dirname(__file__))
|
|
||||||
robot_dir = os.path.join(current_dir, 'robot')
|
|
||||||
tests = [
|
|
||||||
os.path.join('robot', f) for f in
|
|
||||||
os.listdir(robot_dir) if f.endswith('.robot') and
|
|
||||||
f.startswith('test_')
|
|
||||||
]
|
|
||||||
for test in tests:
|
|
||||||
suite.addTests([
|
|
||||||
layered(RobotTestSuite(test), layer=SEARXROBOTLAYER),
|
|
||||||
])
|
|
||||||
return suite
|
|
|
@ -31,10 +31,10 @@ class PluginStoreTest(SearxTestCase):
|
||||||
request = Mock()
|
request = Mock()
|
||||||
store.call([], 'asdf', request, Mock())
|
store.call([], 'asdf', request, Mock())
|
||||||
|
|
||||||
self.assertFalse(testplugin.asdf.called)
|
self.assertFalse(testplugin.asdf.called) # pylint: disable=E1101
|
||||||
|
|
||||||
store.call([testplugin], 'asdf', request, Mock())
|
store.call([testplugin], 'asdf', request, Mock())
|
||||||
self.assertTrue(testplugin.asdf.called)
|
self.assertTrue(testplugin.asdf.called) # pylint: disable=E1101
|
||||||
|
|
||||||
|
|
||||||
class SelfIPTest(SearxTestCase):
|
class SelfIPTest(SearxTestCase):
|
||||||
|
|
|
@ -5,8 +5,8 @@ from searx.testing import SearxTestCase
|
||||||
|
|
||||||
class PluginStub:
|
class PluginStub:
|
||||||
|
|
||||||
def __init__(self, id, default_on):
|
def __init__(self, plugin_id, default_on):
|
||||||
self.id = id
|
self.id = plugin_id
|
||||||
self.default_on = default_on
|
self.default_on = default_on
|
||||||
|
|
||||||
|
|
||||||
|
@ -15,11 +15,11 @@ class TestSettings(SearxTestCase):
|
||||||
|
|
||||||
def test_map_setting_invalid_initialization(self):
|
def test_map_setting_invalid_initialization(self):
|
||||||
with self.assertRaises(MissingArgumentException):
|
with self.assertRaises(MissingArgumentException):
|
||||||
setting = MapSetting(3, wrong_argument={'0': 0})
|
MapSetting(3, wrong_argument={'0': 0})
|
||||||
|
|
||||||
def test_map_setting_invalid_default_value(self):
|
def test_map_setting_invalid_default_value(self):
|
||||||
with self.assertRaises(ValidationException):
|
with self.assertRaises(ValidationException):
|
||||||
setting = MapSetting(3, map={'dog': 1, 'bat': 2})
|
MapSetting(3, map={'dog': 1, 'bat': 2})
|
||||||
|
|
||||||
def test_map_setting_invalid_choice(self):
|
def test_map_setting_invalid_choice(self):
|
||||||
setting = MapSetting(2, map={'dog': 1, 'bat': 2})
|
setting = MapSetting(2, map={'dog': 1, 'bat': 2})
|
||||||
|
@ -36,18 +36,14 @@ class TestSettings(SearxTestCase):
|
||||||
setting.parse('bat')
|
setting.parse('bat')
|
||||||
self.assertEqual(setting.get_value(), 2)
|
self.assertEqual(setting.get_value(), 2)
|
||||||
|
|
||||||
def test_enum_setting_invalid_initialization(self):
|
|
||||||
with self.assertRaises(MissingArgumentException):
|
|
||||||
setting = EnumStringSetting('cat', wrong_argument=[0, 1, 2])
|
|
||||||
|
|
||||||
# enum settings
|
# enum settings
|
||||||
def test_enum_setting_invalid_initialization(self):
|
def test_enum_setting_invalid_initialization(self):
|
||||||
with self.assertRaises(MissingArgumentException):
|
with self.assertRaises(MissingArgumentException):
|
||||||
setting = EnumStringSetting('cat', wrong_argument=[0, 1, 2])
|
EnumStringSetting('cat', wrong_argument=[0, 1, 2])
|
||||||
|
|
||||||
def test_enum_setting_invalid_default_value(self):
|
def test_enum_setting_invalid_default_value(self):
|
||||||
with self.assertRaises(ValidationException):
|
with self.assertRaises(ValidationException):
|
||||||
setting = EnumStringSetting(3, choices=[0, 1, 2])
|
EnumStringSetting(3, choices=[0, 1, 2])
|
||||||
|
|
||||||
def test_enum_setting_invalid_choice(self):
|
def test_enum_setting_invalid_choice(self):
|
||||||
setting = EnumStringSetting(0, choices=[0, 1, 2])
|
setting = EnumStringSetting(0, choices=[0, 1, 2])
|
||||||
|
@ -67,11 +63,11 @@ class TestSettings(SearxTestCase):
|
||||||
# multiple choice settings
|
# multiple choice settings
|
||||||
def test_multiple_setting_invalid_initialization(self):
|
def test_multiple_setting_invalid_initialization(self):
|
||||||
with self.assertRaises(MissingArgumentException):
|
with self.assertRaises(MissingArgumentException):
|
||||||
setting = MultipleChoiceSetting(['2'], wrong_argument=['0', '1', '2'])
|
MultipleChoiceSetting(['2'], wrong_argument=['0', '1', '2'])
|
||||||
|
|
||||||
def test_multiple_setting_invalid_default_value(self):
|
def test_multiple_setting_invalid_default_value(self):
|
||||||
with self.assertRaises(ValidationException):
|
with self.assertRaises(ValidationException):
|
||||||
setting = MultipleChoiceSetting(['3', '4'], choices=['0', '1', '2'])
|
MultipleChoiceSetting(['3', '4'], choices=['0', '1', '2'])
|
||||||
|
|
||||||
def test_multiple_setting_invalid_choice(self):
|
def test_multiple_setting_invalid_choice(self):
|
||||||
setting = MultipleChoiceSetting(['1', '2'], choices=['0', '1', '2'])
|
setting = MultipleChoiceSetting(['1', '2'], choices=['0', '1', '2'])
|
||||||
|
|
|
@ -145,7 +145,7 @@ class ViewsTestCase(SearxTestCase):
|
||||||
result = self.app.post('/', data={'q': 'test', 'format': 'rss'})
|
result = self.app.post('/', data={'q': 'test', 'format': 'rss'})
|
||||||
self.assertEqual(result.status_code, 308)
|
self.assertEqual(result.status_code, 308)
|
||||||
|
|
||||||
def test_index_rss(self):
|
def test_search_rss(self):
|
||||||
result = self.app.post('/search', data={'q': 'test', 'format': 'rss'})
|
result = self.app.post('/search', data={'q': 'test', 'format': 'rss'})
|
||||||
|
|
||||||
self.assertIn(
|
self.assertIn(
|
||||||
|
|
Loading…
Reference in a new issue