forked from Ponysearch/Ponysearch
commit
c6d5605d27
46 changed files with 1393 additions and 381 deletions
|
@ -21,7 +21,6 @@ import threading
|
|||
from os.path import realpath, dirname
|
||||
from babel.localedata import locale_identifiers
|
||||
from urllib.parse import urlparse
|
||||
from flask_babel import gettext
|
||||
from operator import itemgetter
|
||||
from searx import settings
|
||||
from searx import logger
|
||||
|
@ -51,8 +50,6 @@ engine_default_args = {'paging': False,
|
|||
'shortcut': '-',
|
||||
'disabled': False,
|
||||
'enable_http': False,
|
||||
'suspend_end_time': 0,
|
||||
'continuous_errors': 0,
|
||||
'time_range_support': False,
|
||||
'engine_type': 'online',
|
||||
'display_error_messages': True,
|
||||
|
@ -138,22 +135,6 @@ def load_engine(engine_data):
|
|||
setattr(engine, 'fetch_supported_languages',
|
||||
lambda: engine._fetch_supported_languages(get(engine.supported_languages_url, headers=headers)))
|
||||
|
||||
engine.stats = {
|
||||
'sent_search_count': 0, # sent search
|
||||
'search_count': 0, # succesful search
|
||||
'result_count': 0,
|
||||
'engine_time': 0,
|
||||
'engine_time_count': 0,
|
||||
'score_count': 0,
|
||||
'errors': 0
|
||||
}
|
||||
|
||||
engine_type = getattr(engine, 'engine_type', 'online')
|
||||
|
||||
if engine_type != 'offline':
|
||||
engine.stats['page_load_time'] = 0
|
||||
engine.stats['page_load_count'] = 0
|
||||
|
||||
# tor related settings
|
||||
if settings['outgoing'].get('using_tor_proxy'):
|
||||
# use onion url if using tor.
|
||||
|
@ -177,103 +158,6 @@ def load_engine(engine_data):
|
|||
return engine
|
||||
|
||||
|
||||
def to_percentage(stats, maxvalue):
|
||||
for engine_stat in stats:
|
||||
if maxvalue:
|
||||
engine_stat['percentage'] = int(engine_stat['avg'] / maxvalue * 100)
|
||||
else:
|
||||
engine_stat['percentage'] = 0
|
||||
return stats
|
||||
|
||||
|
||||
def get_engines_stats(preferences):
|
||||
# TODO refactor
|
||||
pageloads = []
|
||||
engine_times = []
|
||||
results = []
|
||||
scores = []
|
||||
errors = []
|
||||
scores_per_result = []
|
||||
|
||||
max_pageload = max_engine_times = max_results = max_score = max_errors = max_score_per_result = 0 # noqa
|
||||
for engine in engines.values():
|
||||
if not preferences.validate_token(engine):
|
||||
continue
|
||||
|
||||
if engine.stats['search_count'] == 0:
|
||||
continue
|
||||
|
||||
results_num = \
|
||||
engine.stats['result_count'] / float(engine.stats['search_count'])
|
||||
|
||||
if engine.stats['engine_time_count'] != 0:
|
||||
this_engine_time = engine.stats['engine_time'] / float(engine.stats['engine_time_count']) # noqa
|
||||
else:
|
||||
this_engine_time = 0
|
||||
|
||||
if results_num:
|
||||
score = engine.stats['score_count'] / float(engine.stats['search_count']) # noqa
|
||||
score_per_result = score / results_num
|
||||
else:
|
||||
score = score_per_result = 0.0
|
||||
|
||||
if engine.engine_type != 'offline':
|
||||
load_times = 0
|
||||
if engine.stats['page_load_count'] != 0:
|
||||
load_times = engine.stats['page_load_time'] / float(engine.stats['page_load_count']) # noqa
|
||||
max_pageload = max(load_times, max_pageload)
|
||||
pageloads.append({'avg': load_times, 'name': engine.name})
|
||||
|
||||
max_engine_times = max(this_engine_time, max_engine_times)
|
||||
max_results = max(results_num, max_results)
|
||||
max_score = max(score, max_score)
|
||||
max_score_per_result = max(score_per_result, max_score_per_result)
|
||||
max_errors = max(max_errors, engine.stats['errors'])
|
||||
|
||||
engine_times.append({'avg': this_engine_time, 'name': engine.name})
|
||||
results.append({'avg': results_num, 'name': engine.name})
|
||||
scores.append({'avg': score, 'name': engine.name})
|
||||
errors.append({'avg': engine.stats['errors'], 'name': engine.name})
|
||||
scores_per_result.append({
|
||||
'avg': score_per_result,
|
||||
'name': engine.name
|
||||
})
|
||||
|
||||
pageloads = to_percentage(pageloads, max_pageload)
|
||||
engine_times = to_percentage(engine_times, max_engine_times)
|
||||
results = to_percentage(results, max_results)
|
||||
scores = to_percentage(scores, max_score)
|
||||
scores_per_result = to_percentage(scores_per_result, max_score_per_result)
|
||||
errors = to_percentage(errors, max_errors)
|
||||
|
||||
return [
|
||||
(
|
||||
gettext('Engine time (sec)'),
|
||||
sorted(engine_times, key=itemgetter('avg'))
|
||||
),
|
||||
(
|
||||
gettext('Page loads (sec)'),
|
||||
sorted(pageloads, key=itemgetter('avg'))
|
||||
),
|
||||
(
|
||||
gettext('Number of results'),
|
||||
sorted(results, key=itemgetter('avg'), reverse=True)
|
||||
),
|
||||
(
|
||||
gettext('Scores'),
|
||||
sorted(scores, key=itemgetter('avg'), reverse=True)
|
||||
),
|
||||
(
|
||||
gettext('Scores per result'),
|
||||
sorted(scores_per_result, key=itemgetter('avg'), reverse=True)
|
||||
),
|
||||
(
|
||||
gettext('Errors'),
|
||||
sorted(errors, key=itemgetter('avg'), reverse=True)
|
||||
),
|
||||
]
|
||||
|
||||
|
||||
def load_engines(engine_list):
|
||||
global engines, engine_shortcuts
|
||||
engines.clear()
|
||||
|
|
206
searx/metrics/__init__.py
Normal file
206
searx/metrics/__init__.py
Normal file
|
@ -0,0 +1,206 @@
|
|||
# SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
|
||||
import typing
|
||||
import math
|
||||
import contextlib
|
||||
from timeit import default_timer
|
||||
from operator import itemgetter
|
||||
|
||||
from searx.engines import engines
|
||||
from .models import HistogramStorage, CounterStorage
|
||||
from .error_recorder import count_error, count_exception, errors_per_engines
|
||||
|
||||
__all__ = ["initialize",
|
||||
"get_engines_stats", "get_engine_errors",
|
||||
"histogram", "histogram_observe", "histogram_observe_time",
|
||||
"counter", "counter_inc", "counter_add",
|
||||
"count_error", "count_exception"]
|
||||
|
||||
|
||||
ENDPOINTS = {'search'}
|
||||
|
||||
|
||||
histogram_storage: typing.Optional[HistogramStorage] = None
|
||||
counter_storage: typing.Optional[CounterStorage] = None
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def histogram_observe_time(*args):
|
||||
h = histogram_storage.get(*args)
|
||||
before = default_timer()
|
||||
yield before
|
||||
duration = default_timer() - before
|
||||
if h:
|
||||
h.observe(duration)
|
||||
else:
|
||||
raise ValueError("histogram " + repr((*args,)) + " doesn't not exist")
|
||||
|
||||
|
||||
def histogram_observe(duration, *args):
|
||||
histogram_storage.get(*args).observe(duration)
|
||||
|
||||
|
||||
def histogram(*args, raise_on_not_found=True):
|
||||
h = histogram_storage.get(*args)
|
||||
if raise_on_not_found and h is None:
|
||||
raise ValueError("histogram " + repr((*args,)) + " doesn't not exist")
|
||||
return h
|
||||
|
||||
|
||||
def counter_inc(*args):
|
||||
counter_storage.add(1, *args)
|
||||
|
||||
|
||||
def counter_add(value, *args):
|
||||
counter_storage.add(value, *args)
|
||||
|
||||
|
||||
def counter(*args):
|
||||
return counter_storage.get(*args)
|
||||
|
||||
|
||||
def initialize(engine_names=None):
|
||||
"""
|
||||
Initialize metrics
|
||||
"""
|
||||
global counter_storage, histogram_storage
|
||||
|
||||
counter_storage = CounterStorage()
|
||||
histogram_storage = HistogramStorage()
|
||||
|
||||
# max_timeout = max of all the engine.timeout
|
||||
max_timeout = 2
|
||||
for engine_name in (engine_names or engines):
|
||||
if engine_name in engines:
|
||||
max_timeout = max(max_timeout, engines[engine_name].timeout)
|
||||
|
||||
# histogram configuration
|
||||
histogram_width = 0.1
|
||||
histogram_size = int(1.5 * max_timeout / histogram_width)
|
||||
|
||||
# engines
|
||||
for engine_name in (engine_names or engines):
|
||||
# search count
|
||||
counter_storage.configure('engine', engine_name, 'search', 'count', 'sent')
|
||||
counter_storage.configure('engine', engine_name, 'search', 'count', 'successful')
|
||||
# global counter of errors
|
||||
counter_storage.configure('engine', engine_name, 'search', 'count', 'error')
|
||||
# score of the engine
|
||||
counter_storage.configure('engine', engine_name, 'score')
|
||||
# result count per requests
|
||||
histogram_storage.configure(1, 100, 'engine', engine_name, 'result', 'count')
|
||||
# time doing HTTP requests
|
||||
histogram_storage.configure(histogram_width, histogram_size, 'engine', engine_name, 'time', 'http')
|
||||
# total time
|
||||
# .time.request and ...response times may overlap .time.http time.
|
||||
histogram_storage.configure(histogram_width, histogram_size, 'engine', engine_name, 'time', 'total')
|
||||
|
||||
|
||||
def get_engine_errors(engline_list):
|
||||
result = {}
|
||||
engine_names = list(errors_per_engines.keys())
|
||||
engine_names.sort()
|
||||
for engine_name in engine_names:
|
||||
if engine_name not in engline_list:
|
||||
continue
|
||||
|
||||
error_stats = errors_per_engines[engine_name]
|
||||
sent_search_count = max(counter('engine', engine_name, 'search', 'count', 'sent'), 1)
|
||||
sorted_context_count_list = sorted(error_stats.items(), key=lambda context_count: context_count[1])
|
||||
r = []
|
||||
for context, count in sorted_context_count_list:
|
||||
percentage = round(20 * count / sent_search_count) * 5
|
||||
r.append({
|
||||
'filename': context.filename,
|
||||
'function': context.function,
|
||||
'line_no': context.line_no,
|
||||
'code': context.code,
|
||||
'exception_classname': context.exception_classname,
|
||||
'log_message': context.log_message,
|
||||
'log_parameters': context.log_parameters,
|
||||
'secondary': context.secondary,
|
||||
'percentage': percentage,
|
||||
})
|
||||
result[engine_name] = sorted(r, reverse=True, key=lambda d: d['percentage'])
|
||||
return result
|
||||
|
||||
|
||||
def to_percentage(stats, maxvalue):
|
||||
for engine_stat in stats:
|
||||
if maxvalue:
|
||||
engine_stat['percentage'] = int(engine_stat['avg'] / maxvalue * 100)
|
||||
else:
|
||||
engine_stat['percentage'] = 0
|
||||
return stats
|
||||
|
||||
|
||||
def get_engines_stats(engine_list):
|
||||
global counter_storage, histogram_storage
|
||||
|
||||
assert counter_storage is not None
|
||||
assert histogram_storage is not None
|
||||
|
||||
list_time = []
|
||||
list_time_http = []
|
||||
list_time_total = []
|
||||
list_result_count = []
|
||||
list_error_count = []
|
||||
list_scores = []
|
||||
list_scores_per_result = []
|
||||
|
||||
max_error_count = max_http_time = max_time_total = max_result_count = max_score = None # noqa
|
||||
for engine_name in engine_list:
|
||||
error_count = counter('engine', engine_name, 'search', 'count', 'error')
|
||||
|
||||
if counter('engine', engine_name, 'search', 'count', 'sent') > 0:
|
||||
list_error_count.append({'avg': error_count, 'name': engine_name})
|
||||
max_error_count = max(error_count, max_error_count or 0)
|
||||
|
||||
successful_count = counter('engine', engine_name, 'search', 'count', 'successful')
|
||||
if successful_count == 0:
|
||||
continue
|
||||
|
||||
result_count_sum = histogram('engine', engine_name, 'result', 'count').sum
|
||||
time_total = histogram('engine', engine_name, 'time', 'total').percentage(50)
|
||||
time_http = histogram('engine', engine_name, 'time', 'http').percentage(50)
|
||||
result_count = result_count_sum / float(successful_count)
|
||||
|
||||
if result_count:
|
||||
score = counter('engine', engine_name, 'score') # noqa
|
||||
score_per_result = score / float(result_count_sum)
|
||||
else:
|
||||
score = score_per_result = 0.0
|
||||
|
||||
max_time_total = max(time_total, max_time_total or 0)
|
||||
max_http_time = max(time_http, max_http_time or 0)
|
||||
max_result_count = max(result_count, max_result_count or 0)
|
||||
max_score = max(score, max_score or 0)
|
||||
|
||||
list_time.append({'total': round(time_total, 1),
|
||||
'http': round(time_http, 1),
|
||||
'name': engine_name,
|
||||
'processing': round(time_total - time_http, 1)})
|
||||
list_time_total.append({'avg': time_total, 'name': engine_name})
|
||||
list_time_http.append({'avg': time_http, 'name': engine_name})
|
||||
list_result_count.append({'avg': result_count, 'name': engine_name})
|
||||
list_scores.append({'avg': score, 'name': engine_name})
|
||||
list_scores_per_result.append({'avg': score_per_result, 'name': engine_name})
|
||||
|
||||
list_time = sorted(list_time, key=itemgetter('total'))
|
||||
list_time_total = sorted(to_percentage(list_time_total, max_time_total), key=itemgetter('avg'))
|
||||
list_time_http = sorted(to_percentage(list_time_http, max_http_time), key=itemgetter('avg'))
|
||||
list_result_count = sorted(to_percentage(list_result_count, max_result_count), key=itemgetter('avg'), reverse=True)
|
||||
list_scores = sorted(list_scores, key=itemgetter('avg'), reverse=True)
|
||||
list_scores_per_result = sorted(list_scores_per_result, key=itemgetter('avg'), reverse=True)
|
||||
list_error_count = sorted(to_percentage(list_error_count, max_error_count), key=itemgetter('avg'), reverse=True)
|
||||
|
||||
return {
|
||||
'time': list_time,
|
||||
'max_time': math.ceil(max_time_total or 0),
|
||||
'time_total': list_time_total,
|
||||
'time_http': list_time_http,
|
||||
'result_count': list_result_count,
|
||||
'scores': list_scores,
|
||||
'scores_per_result': list_scores_per_result,
|
||||
'error_count': list_error_count,
|
||||
}
|
|
@ -1,6 +1,5 @@
|
|||
import typing
|
||||
import inspect
|
||||
import logging
|
||||
from json import JSONDecodeError
|
||||
from urllib.parse import urlparse
|
||||
from httpx import HTTPError, HTTPStatusError
|
||||
|
@ -9,16 +8,15 @@ from searx.exceptions import (SearxXPathSyntaxException, SearxEngineXPathExcepti
|
|||
from searx import logger
|
||||
|
||||
|
||||
logging.basicConfig(level=logging.INFO)
|
||||
|
||||
errors_per_engines = {}
|
||||
|
||||
|
||||
class ErrorContext:
|
||||
|
||||
__slots__ = 'filename', 'function', 'line_no', 'code', 'exception_classname', 'log_message', 'log_parameters'
|
||||
__slots__ = ('filename', 'function', 'line_no', 'code', 'exception_classname',
|
||||
'log_message', 'log_parameters', 'secondary')
|
||||
|
||||
def __init__(self, filename, function, line_no, code, exception_classname, log_message, log_parameters):
|
||||
def __init__(self, filename, function, line_no, code, exception_classname, log_message, log_parameters, secondary):
|
||||
self.filename = filename
|
||||
self.function = function
|
||||
self.line_no = line_no
|
||||
|
@ -26,22 +24,24 @@ class ErrorContext:
|
|||
self.exception_classname = exception_classname
|
||||
self.log_message = log_message
|
||||
self.log_parameters = log_parameters
|
||||
self.secondary = secondary
|
||||
|
||||
def __eq__(self, o) -> bool:
|
||||
if not isinstance(o, ErrorContext):
|
||||
return False
|
||||
return self.filename == o.filename and self.function == o.function and self.line_no == o.line_no\
|
||||
and self.code == o.code and self.exception_classname == o.exception_classname\
|
||||
and self.log_message == o.log_message and self.log_parameters == o.log_parameters
|
||||
and self.log_message == o.log_message and self.log_parameters == o.log_parameters \
|
||||
and self.secondary == o.secondary
|
||||
|
||||
def __hash__(self):
|
||||
return hash((self.filename, self.function, self.line_no, self.code, self.exception_classname, self.log_message,
|
||||
self.log_parameters))
|
||||
self.log_parameters, self.secondary))
|
||||
|
||||
def __repr__(self):
|
||||
return "ErrorContext({!r}, {!r}, {!r}, {!r}, {!r}, {!r})".\
|
||||
return "ErrorContext({!r}, {!r}, {!r}, {!r}, {!r}, {!r}) {!r}".\
|
||||
format(self.filename, self.line_no, self.code, self.exception_classname, self.log_message,
|
||||
self.log_parameters)
|
||||
self.log_parameters, self.secondary)
|
||||
|
||||
|
||||
def add_error_context(engine_name: str, error_context: ErrorContext) -> None:
|
||||
|
@ -114,31 +114,32 @@ def get_exception_classname(exc: Exception) -> str:
|
|||
return exc_module + '.' + exc_name
|
||||
|
||||
|
||||
def get_error_context(framerecords, exception_classname, log_message, log_parameters) -> ErrorContext:
|
||||
def get_error_context(framerecords, exception_classname, log_message, log_parameters, secondary) -> ErrorContext:
|
||||
searx_frame = get_trace(framerecords)
|
||||
filename = searx_frame.filename
|
||||
function = searx_frame.function
|
||||
line_no = searx_frame.lineno
|
||||
code = searx_frame.code_context[0].strip()
|
||||
del framerecords
|
||||
return ErrorContext(filename, function, line_no, code, exception_classname, log_message, log_parameters)
|
||||
return ErrorContext(filename, function, line_no, code, exception_classname, log_message, log_parameters, secondary)
|
||||
|
||||
|
||||
def record_exception(engine_name: str, exc: Exception) -> None:
|
||||
def count_exception(engine_name: str, exc: Exception, secondary: bool = False) -> None:
|
||||
framerecords = inspect.trace()
|
||||
try:
|
||||
exception_classname = get_exception_classname(exc)
|
||||
log_parameters = get_messages(exc, framerecords[-1][1])
|
||||
error_context = get_error_context(framerecords, exception_classname, None, log_parameters)
|
||||
error_context = get_error_context(framerecords, exception_classname, None, log_parameters, secondary)
|
||||
add_error_context(engine_name, error_context)
|
||||
finally:
|
||||
del framerecords
|
||||
|
||||
|
||||
def record_error(engine_name: str, log_message: str, log_parameters: typing.Optional[typing.Tuple] = None) -> None:
|
||||
def count_error(engine_name: str, log_message: str, log_parameters: typing.Optional[typing.Tuple] = None,
|
||||
secondary: bool = False) -> None:
|
||||
framerecords = list(reversed(inspect.stack()[1:]))
|
||||
try:
|
||||
error_context = get_error_context(framerecords, None, log_message, log_parameters or ())
|
||||
error_context = get_error_context(framerecords, None, log_message, log_parameters or (), secondary)
|
||||
add_error_context(engine_name, error_context)
|
||||
finally:
|
||||
del framerecords
|
156
searx/metrics/models.py
Normal file
156
searx/metrics/models.py
Normal file
|
@ -0,0 +1,156 @@
|
|||
# SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
|
||||
import decimal
|
||||
import threading
|
||||
|
||||
from searx import logger
|
||||
|
||||
|
||||
__all__ = ["Histogram", "HistogramStorage", "CounterStorage"]
|
||||
|
||||
logger = logger.getChild('searx.metrics')
|
||||
|
||||
|
||||
class Histogram:
|
||||
|
||||
_slots__ = '_lock', '_size', '_sum', '_quartiles', '_count', '_width'
|
||||
|
||||
def __init__(self, width=10, size=200):
|
||||
self._lock = threading.Lock()
|
||||
self._width = width
|
||||
self._size = size
|
||||
self._quartiles = [0] * size
|
||||
self._count = 0
|
||||
self._sum = 0
|
||||
|
||||
def observe(self, value):
|
||||
q = int(value / self._width)
|
||||
if q < 0:
|
||||
"""Value below zero is ignored"""
|
||||
q = 0
|
||||
if q >= self._size:
|
||||
"""Value above the maximum is replaced by the maximum"""
|
||||
q = self._size - 1
|
||||
with self._lock:
|
||||
self._quartiles[q] += 1
|
||||
self._count += 1
|
||||
self._sum += value
|
||||
|
||||
@property
|
||||
def quartiles(self):
|
||||
return list(self._quartiles)
|
||||
|
||||
@property
|
||||
def count(self):
|
||||
return self._count
|
||||
|
||||
@property
|
||||
def sum(self):
|
||||
return self._sum
|
||||
|
||||
@property
|
||||
def average(self):
|
||||
with self._lock:
|
||||
if self._count != 0:
|
||||
return self._sum / self._count
|
||||
else:
|
||||
return 0
|
||||
|
||||
@property
|
||||
def quartile_percentage(self):
|
||||
''' Quartile in percentage '''
|
||||
with self._lock:
|
||||
if self._count > 0:
|
||||
return [int(q * 100 / self._count) for q in self._quartiles]
|
||||
else:
|
||||
return self._quartiles
|
||||
|
||||
@property
|
||||
def quartile_percentage_map(self):
|
||||
result = {}
|
||||
# use Decimal to avoid rounding errors
|
||||
x = decimal.Decimal(0)
|
||||
width = decimal.Decimal(self._width)
|
||||
width_exponent = -width.as_tuple().exponent
|
||||
with self._lock:
|
||||
if self._count > 0:
|
||||
for y in self._quartiles:
|
||||
yp = int(y * 100 / self._count)
|
||||
if yp != 0:
|
||||
result[round(float(x), width_exponent)] = yp
|
||||
x += width
|
||||
return result
|
||||
|
||||
def percentage(self, percentage):
|
||||
# use Decimal to avoid rounding errors
|
||||
x = decimal.Decimal(0)
|
||||
width = decimal.Decimal(self._width)
|
||||
stop_at_value = decimal.Decimal(self._count) / 100 * percentage
|
||||
sum_value = 0
|
||||
with self._lock:
|
||||
if self._count > 0:
|
||||
for y in self._quartiles:
|
||||
sum_value += y
|
||||
if sum_value >= stop_at_value:
|
||||
return x
|
||||
x += width
|
||||
return None
|
||||
|
||||
def __repr__(self):
|
||||
return "Histogram<avg: " + str(self.average) + ", count: " + str(self._count) + ">"
|
||||
|
||||
|
||||
class HistogramStorage:
|
||||
|
||||
__slots__ = 'measures'
|
||||
|
||||
def __init__(self):
|
||||
self.clear()
|
||||
|
||||
def clear(self):
|
||||
self.measures = {}
|
||||
|
||||
def configure(self, width, size, *args):
|
||||
measure = Histogram(width, size)
|
||||
self.measures[args] = measure
|
||||
return measure
|
||||
|
||||
def get(self, *args):
|
||||
return self.measures.get(args, None)
|
||||
|
||||
def dump(self):
|
||||
logger.debug("Histograms:")
|
||||
ks = sorted(self.measures.keys(), key='/'.join)
|
||||
for k in ks:
|
||||
logger.debug("- %-60s %s", '|'.join(k), self.measures[k])
|
||||
|
||||
|
||||
class CounterStorage:
|
||||
|
||||
__slots__ = 'counters', 'lock'
|
||||
|
||||
def __init__(self):
|
||||
self.lock = threading.Lock()
|
||||
self.clear()
|
||||
|
||||
def clear(self):
|
||||
with self.lock:
|
||||
self.counters = {}
|
||||
|
||||
def configure(self, *args):
|
||||
with self.lock:
|
||||
self.counters[args] = 0
|
||||
|
||||
def get(self, *args):
|
||||
return self.counters[args]
|
||||
|
||||
def add(self, value, *args):
|
||||
with self.lock:
|
||||
self.counters[args] += value
|
||||
|
||||
def dump(self):
|
||||
with self.lock:
|
||||
ks = sorted(self.counters.keys(), key='/'.join)
|
||||
logger.debug("Counters:")
|
||||
for k in ks:
|
||||
logger.debug("- %-60s %s", '|'.join(k), self.counters[k])
|
|
@ -3,7 +3,7 @@
|
|||
import asyncio
|
||||
import threading
|
||||
import concurrent.futures
|
||||
from time import time
|
||||
from timeit import default_timer
|
||||
|
||||
import httpx
|
||||
import h2.exceptions
|
||||
|
@ -65,7 +65,7 @@ def get_context_network():
|
|||
|
||||
def request(method, url, **kwargs):
|
||||
"""same as requests/requests/api.py request(...)"""
|
||||
time_before_request = time()
|
||||
time_before_request = default_timer()
|
||||
|
||||
# timeout (httpx)
|
||||
if 'timeout' in kwargs:
|
||||
|
@ -82,7 +82,7 @@ def request(method, url, **kwargs):
|
|||
timeout += 0.2 # overhead
|
||||
start_time = getattr(THREADLOCAL, 'start_time', time_before_request)
|
||||
if start_time:
|
||||
timeout -= time() - start_time
|
||||
timeout -= default_timer() - start_time
|
||||
|
||||
# raise_for_error
|
||||
check_for_httperror = True
|
||||
|
@ -111,7 +111,7 @@ def request(method, url, **kwargs):
|
|||
# update total_time.
|
||||
# See get_time_for_thread() and reset_time_for_thread()
|
||||
if hasattr(THREADLOCAL, 'total_time'):
|
||||
time_after_request = time()
|
||||
time_after_request = default_timer()
|
||||
THREADLOCAL.total_time += time_after_request - time_before_request
|
||||
|
||||
# raise an exception
|
||||
|
|
|
@ -199,7 +199,7 @@ class Network:
|
|||
|
||||
def get_network(name=None):
|
||||
global NETWORKS
|
||||
return NETWORKS[name or DEFAULT_NAME]
|
||||
return NETWORKS.get(name or DEFAULT_NAME)
|
||||
|
||||
|
||||
def initialize(settings_engines=None, settings_outgoing=None):
|
||||
|
|
2
searx/raise_for_httperror/__init__.py
Normal file
2
searx/raise_for_httperror/__init__.py
Normal file
|
@ -0,0 +1,2 @@
|
|||
# compatibility with searx/searx
|
||||
from searx.network import raise_for_httperror
|
|
@ -5,7 +5,7 @@ from threading import RLock
|
|||
from urllib.parse import urlparse, unquote
|
||||
from searx import logger
|
||||
from searx.engines import engines
|
||||
from searx.metrology.error_recorder import record_error
|
||||
from searx.metrics import histogram_observe, counter_add, count_error
|
||||
|
||||
|
||||
CONTENT_LEN_IGNORED_CHARS_REGEX = re.compile(r'[,;:!?\./\\\\ ()-_]', re.M | re.U)
|
||||
|
@ -196,12 +196,10 @@ class ResultContainer:
|
|||
|
||||
if len(error_msgs) > 0:
|
||||
for msg in error_msgs:
|
||||
record_error(engine_name, 'some results are invalids: ' + msg)
|
||||
count_error(engine_name, 'some results are invalids: ' + msg, secondary=True)
|
||||
|
||||
if engine_name in engines:
|
||||
with RLock():
|
||||
engines[engine_name].stats['search_count'] += 1
|
||||
engines[engine_name].stats['result_count'] += standard_result_count
|
||||
histogram_observe(standard_result_count, 'engine', engine_name, 'result', 'count')
|
||||
|
||||
if not self.paging and standard_result_count > 0 and engine_name in engines\
|
||||
and engines[engine_name].paging:
|
||||
|
@ -301,9 +299,8 @@ class ResultContainer:
|
|||
for result in self._merged_results:
|
||||
score = result_score(result)
|
||||
result['score'] = score
|
||||
with RLock():
|
||||
for result_engine in result['engines']:
|
||||
engines[result_engine].stats['score_count'] += score
|
||||
for result_engine in result['engines']:
|
||||
counter_add(score, 'engine', result_engine, 'score')
|
||||
|
||||
results = sorted(self._merged_results, key=itemgetter('score'), reverse=True)
|
||||
|
||||
|
@ -369,9 +366,9 @@ class ResultContainer:
|
|||
return 0
|
||||
return resultnum_sum / len(self._number_of_results)
|
||||
|
||||
def add_unresponsive_engine(self, engine_name, error_type, error_message=None):
|
||||
def add_unresponsive_engine(self, engine_name, error_type, error_message=None, suspended=False):
|
||||
if engines[engine_name].display_error_messages:
|
||||
self.unresponsive_engines.add((engine_name, error_type, error_message))
|
||||
self.unresponsive_engines.add((engine_name, error_type, error_message, suspended))
|
||||
|
||||
def add_timing(self, engine_name, engine_time, page_load_time):
|
||||
self.timings.append({
|
||||
|
|
|
@ -18,7 +18,7 @@ along with searx. If not, see < http://www.gnu.org/licenses/ >.
|
|||
import typing
|
||||
import gc
|
||||
import threading
|
||||
from time import time
|
||||
from timeit import default_timer
|
||||
from uuid import uuid4
|
||||
from _thread import start_new_thread
|
||||
|
||||
|
@ -31,6 +31,7 @@ from searx.plugins import plugins
|
|||
from searx.search.models import EngineRef, SearchQuery
|
||||
from searx.search.processors import processors, initialize as initialize_processors
|
||||
from searx.search.checker import initialize as initialize_checker
|
||||
from searx.metrics import initialize as initialize_metrics, counter_inc, histogram_observe_time
|
||||
|
||||
|
||||
logger = logger.getChild('search')
|
||||
|
@ -50,6 +51,7 @@ else:
|
|||
def initialize(settings_engines=None, enable_checker=False):
|
||||
settings_engines = settings_engines or settings['engines']
|
||||
initialize_processors(settings_engines)
|
||||
initialize_metrics([engine['name'] for engine in settings_engines])
|
||||
if enable_checker:
|
||||
initialize_checker()
|
||||
|
||||
|
@ -106,13 +108,16 @@ class Search:
|
|||
for engineref in self.search_query.engineref_list:
|
||||
processor = processors[engineref.name]
|
||||
|
||||
# stop the request now if the engine is suspend
|
||||
if processor.extend_container_if_suspended(self.result_container):
|
||||
continue
|
||||
|
||||
# set default request parameters
|
||||
request_params = processor.get_params(self.search_query, engineref.category)
|
||||
if request_params is None:
|
||||
continue
|
||||
|
||||
with threading.RLock():
|
||||
processor.engine.stats['sent_search_count'] += 1
|
||||
counter_inc('engine', engineref.name, 'search', 'count', 'sent')
|
||||
|
||||
# append request to list
|
||||
requests.append((engineref.name, self.search_query.query, request_params))
|
||||
|
@ -157,7 +162,7 @@ class Search:
|
|||
|
||||
for th in threading.enumerate():
|
||||
if th.name == search_id:
|
||||
remaining_time = max(0.0, self.actual_timeout - (time() - self.start_time))
|
||||
remaining_time = max(0.0, self.actual_timeout - (default_timer() - self.start_time))
|
||||
th.join(remaining_time)
|
||||
if th.is_alive():
|
||||
th._timeout = True
|
||||
|
@ -180,12 +185,10 @@ class Search:
|
|||
|
||||
# do search-request
|
||||
def search(self):
|
||||
self.start_time = time()
|
||||
|
||||
self.start_time = default_timer()
|
||||
if not self.search_external_bang():
|
||||
if not self.search_answerers():
|
||||
self.search_standard()
|
||||
|
||||
return self.result_container
|
||||
|
||||
|
||||
|
|
|
@ -4,8 +4,8 @@ import typing
|
|||
import types
|
||||
import functools
|
||||
import itertools
|
||||
import threading
|
||||
from time import time
|
||||
from timeit import default_timer
|
||||
from urllib.parse import urlparse
|
||||
|
||||
import re
|
||||
|
@ -17,6 +17,7 @@ from searx import network, logger
|
|||
from searx.results import ResultContainer
|
||||
from searx.search.models import SearchQuery, EngineRef
|
||||
from searx.search.processors import EngineProcessor
|
||||
from searx.metrics import counter_inc
|
||||
|
||||
|
||||
logger = logger.getChild('searx.search.checker')
|
||||
|
@ -385,9 +386,8 @@ class Checker:
|
|||
engineref_category = search_query.engineref_list[0].category
|
||||
params = self.processor.get_params(search_query, engineref_category)
|
||||
if params is not None:
|
||||
with threading.RLock():
|
||||
self.processor.engine.stats['sent_search_count'] += 1
|
||||
self.processor.search(search_query.query, params, result_container, time(), 5)
|
||||
counter_inc('engine', search_query.engineref_list[0].name, 'search', 'count', 'sent')
|
||||
self.processor.search(search_query.query, params, result_container, default_timer(), 5)
|
||||
return result_container
|
||||
|
||||
def get_result_container_tests(self, test_name: str, search_query: SearchQuery) -> ResultContainerTests:
|
||||
|
|
|
@ -1,17 +1,110 @@
|
|||
# SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
|
||||
import threading
|
||||
from abc import abstractmethod, ABC
|
||||
from timeit import default_timer
|
||||
|
||||
from searx import logger
|
||||
from searx.engines import settings
|
||||
from searx.network import get_time_for_thread, get_network
|
||||
from searx.metrics import histogram_observe, counter_inc, count_exception, count_error
|
||||
from searx.exceptions import SearxEngineAccessDeniedException
|
||||
|
||||
|
||||
logger = logger.getChild('searx.search.processor')
|
||||
SUSPENDED_STATUS = {}
|
||||
|
||||
|
||||
class SuspendedStatus:
|
||||
|
||||
__slots__ = 'suspend_end_time', 'suspend_reason', 'continuous_errors', 'lock'
|
||||
|
||||
def __init__(self):
|
||||
self.lock = threading.Lock()
|
||||
self.continuous_errors = 0
|
||||
self.suspend_end_time = 0
|
||||
self.suspend_reason = None
|
||||
|
||||
@property
|
||||
def is_suspended(self):
|
||||
return self.suspend_end_time >= default_timer()
|
||||
|
||||
def suspend(self, suspended_time, suspend_reason):
|
||||
with self.lock:
|
||||
# update continuous_errors / suspend_end_time
|
||||
self.continuous_errors += 1
|
||||
if suspended_time is None:
|
||||
suspended_time = min(settings['search']['max_ban_time_on_fail'],
|
||||
self.continuous_errors * settings['search']['ban_time_on_fail'])
|
||||
self.suspend_end_time = default_timer() + suspended_time
|
||||
self.suspend_reason = suspend_reason
|
||||
logger.debug('Suspend engine for %i seconds', suspended_time)
|
||||
|
||||
def resume(self):
|
||||
with self.lock:
|
||||
# reset the suspend variables
|
||||
self.continuous_errors = 0
|
||||
self.suspend_end_time = 0
|
||||
self.suspend_reason = None
|
||||
|
||||
|
||||
class EngineProcessor(ABC):
|
||||
|
||||
__slots__ = 'engine', 'engine_name', 'lock', 'suspended_status'
|
||||
|
||||
def __init__(self, engine, engine_name):
|
||||
self.engine = engine
|
||||
self.engine_name = engine_name
|
||||
key = get_network(self.engine_name)
|
||||
key = id(key) if key else self.engine_name
|
||||
self.suspended_status = SUSPENDED_STATUS.setdefault(key, SuspendedStatus())
|
||||
|
||||
def handle_exception(self, result_container, reason, exception, suspend=False, display_exception=True):
|
||||
# update result_container
|
||||
error_message = str(exception) if display_exception and exception else None
|
||||
result_container.add_unresponsive_engine(self.engine_name, reason, error_message)
|
||||
# metrics
|
||||
counter_inc('engine', self.engine_name, 'search', 'count', 'error')
|
||||
if exception:
|
||||
count_exception(self.engine_name, exception)
|
||||
else:
|
||||
count_error(self.engine_name, reason)
|
||||
# suspend the engine ?
|
||||
if suspend:
|
||||
suspended_time = None
|
||||
if isinstance(exception, SearxEngineAccessDeniedException):
|
||||
suspended_time = exception.suspended_time
|
||||
self.suspended_status.suspend(suspended_time, reason) # pylint: disable=no-member
|
||||
|
||||
def _extend_container_basic(self, result_container, start_time, search_results):
|
||||
# update result_container
|
||||
result_container.extend(self.engine_name, search_results)
|
||||
engine_time = default_timer() - start_time
|
||||
page_load_time = get_time_for_thread()
|
||||
result_container.add_timing(self.engine_name, engine_time, page_load_time)
|
||||
# metrics
|
||||
counter_inc('engine', self.engine_name, 'search', 'count', 'successful')
|
||||
histogram_observe(engine_time, 'engine', self.engine_name, 'time', 'total')
|
||||
if page_load_time is not None:
|
||||
histogram_observe(page_load_time, 'engine', self.engine_name, 'time', 'http')
|
||||
|
||||
def extend_container(self, result_container, start_time, search_results):
|
||||
if getattr(threading.current_thread(), '_timeout', False):
|
||||
# the main thread is not waiting anymore
|
||||
self.handle_exception(result_container, 'Timeout', None)
|
||||
else:
|
||||
# check if the engine accepted the request
|
||||
if search_results is not None:
|
||||
self._extend_container_basic(result_container, start_time, search_results)
|
||||
self.suspended_status.resume()
|
||||
|
||||
def extend_container_if_suspended(self, result_container):
|
||||
if self.suspended_status.is_suspended:
|
||||
result_container.add_unresponsive_engine(self.engine_name,
|
||||
self.suspended_status.suspend_reason,
|
||||
suspended=True)
|
||||
return True
|
||||
return False
|
||||
|
||||
def get_params(self, search_query, engine_category):
|
||||
# if paging is not supported, skip
|
||||
|
|
|
@ -1,51 +1,26 @@
|
|||
# SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
|
||||
import threading
|
||||
from time import time
|
||||
from searx import logger
|
||||
from searx.metrology.error_recorder import record_exception, record_error
|
||||
from searx.search.processors.abstract import EngineProcessor
|
||||
|
||||
|
||||
logger = logger.getChild('search.processor.offline')
|
||||
logger = logger.getChild('searx.search.processor.offline')
|
||||
|
||||
|
||||
class OfflineProcessor(EngineProcessor):
|
||||
|
||||
engine_type = 'offline'
|
||||
|
||||
def _record_stats_on_error(self, result_container, start_time):
|
||||
engine_time = time() - start_time
|
||||
result_container.add_timing(self.engine_name, engine_time, engine_time)
|
||||
|
||||
with threading.RLock():
|
||||
self.engine.stats['errors'] += 1
|
||||
|
||||
def _search_basic(self, query, params):
|
||||
return self.engine.search(query, params)
|
||||
|
||||
def search(self, query, params, result_container, start_time, timeout_limit):
|
||||
try:
|
||||
search_results = self._search_basic(query, params)
|
||||
|
||||
if search_results:
|
||||
result_container.extend(self.engine_name, search_results)
|
||||
|
||||
engine_time = time() - start_time
|
||||
result_container.add_timing(self.engine_name, engine_time, engine_time)
|
||||
with threading.RLock():
|
||||
self.engine.stats['engine_time'] += engine_time
|
||||
self.engine.stats['engine_time_count'] += 1
|
||||
|
||||
self.extend_container(result_container, start_time, search_results)
|
||||
except ValueError as e:
|
||||
record_exception(self.engine_name, e)
|
||||
self._record_stats_on_error(result_container, start_time)
|
||||
# do not record the error
|
||||
logger.exception('engine {0} : invalid input : {1}'.format(self.engine_name, e))
|
||||
except Exception as e:
|
||||
record_exception(self.engine_name, e)
|
||||
self._record_stats_on_error(result_container, start_time)
|
||||
result_container.add_unresponsive_engine(self.engine_name, 'unexpected crash', str(e))
|
||||
self.handle_exception(result_container, 'unexpected crash', e)
|
||||
logger.exception('engine {0} : exception : {1}'.format(self.engine_name, e))
|
||||
else:
|
||||
if getattr(threading.current_thread(), '_timeout', False):
|
||||
record_error(self.engine_name, 'Timeout')
|
||||
|
|
|
@ -1,23 +1,21 @@
|
|||
# SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
|
||||
from time import time
|
||||
import threading
|
||||
import asyncio
|
||||
|
||||
import httpx
|
||||
|
||||
import searx.network
|
||||
from searx.engines import settings
|
||||
from searx import logger
|
||||
from searx.utils import gen_useragent
|
||||
from searx.exceptions import (SearxEngineAccessDeniedException, SearxEngineCaptchaException,
|
||||
SearxEngineTooManyRequestsException,)
|
||||
from searx.metrology.error_recorder import record_exception, record_error
|
||||
from searx.metrics.error_recorder import count_error
|
||||
|
||||
from searx.search.processors.abstract import EngineProcessor
|
||||
|
||||
|
||||
logger = logger.getChild('search.processor.online')
|
||||
logger = logger.getChild('searx.search.processor.online')
|
||||
|
||||
|
||||
def default_request_params():
|
||||
|
@ -41,11 +39,6 @@ class OnlineProcessor(EngineProcessor):
|
|||
if params is None:
|
||||
return None
|
||||
|
||||
# skip suspended engines
|
||||
if self.engine.suspend_end_time >= time():
|
||||
logger.debug('Engine currently suspended: %s', self.engine_name)
|
||||
return None
|
||||
|
||||
# add default params
|
||||
params.update(default_request_params())
|
||||
|
||||
|
@ -97,9 +90,10 @@ class OnlineProcessor(EngineProcessor):
|
|||
status_code = str(response.status_code or '')
|
||||
reason = response.reason_phrase or ''
|
||||
hostname = response.url.host
|
||||
record_error(self.engine_name,
|
||||
'{} redirects, maximum: {}'.format(len(response.history), soft_max_redirects),
|
||||
(status_code, reason, hostname))
|
||||
count_error(self.engine_name,
|
||||
'{} redirects, maximum: {}'.format(len(response.history), soft_max_redirects),
|
||||
(status_code, reason, hostname),
|
||||
secondary=True)
|
||||
|
||||
return response
|
||||
|
||||
|
@ -130,89 +124,38 @@ class OnlineProcessor(EngineProcessor):
|
|||
# set the network
|
||||
searx.network.set_context_network_name(self.engine_name)
|
||||
|
||||
# suppose everything will be alright
|
||||
http_exception = False
|
||||
suspended_time = None
|
||||
|
||||
try:
|
||||
# send requests and parse the results
|
||||
search_results = self._search_basic(query, params)
|
||||
|
||||
# check if the engine accepted the request
|
||||
if search_results is not None:
|
||||
# yes, so add results
|
||||
result_container.extend(self.engine_name, search_results)
|
||||
|
||||
# update engine time when there is no exception
|
||||
engine_time = time() - start_time
|
||||
page_load_time = searx.network.get_time_for_thread()
|
||||
result_container.add_timing(self.engine_name, engine_time, page_load_time)
|
||||
with threading.RLock():
|
||||
self.engine.stats['engine_time'] += engine_time
|
||||
self.engine.stats['engine_time_count'] += 1
|
||||
# update stats with the total HTTP time
|
||||
self.engine.stats['page_load_time'] += page_load_time
|
||||
self.engine.stats['page_load_count'] += 1
|
||||
except Exception as e:
|
||||
record_exception(self.engine_name, e)
|
||||
|
||||
# Timing
|
||||
engine_time = time() - start_time
|
||||
page_load_time = searx.network.get_time_for_thread()
|
||||
result_container.add_timing(self.engine_name, engine_time, page_load_time)
|
||||
|
||||
# Record the errors
|
||||
with threading.RLock():
|
||||
self.engine.stats['errors'] += 1
|
||||
|
||||
if (issubclass(e.__class__, (httpx.TimeoutException, asyncio.TimeoutError))):
|
||||
result_container.add_unresponsive_engine(self.engine_name, 'HTTP timeout')
|
||||
# requests timeout (connect or read)
|
||||
logger.error("engine {0} : HTTP requests timeout"
|
||||
self.extend_container(result_container, start_time, search_results)
|
||||
except (httpx.TimeoutException, asyncio.TimeoutError) as e:
|
||||
# requests timeout (connect or read)
|
||||
self.handle_exception(result_container, 'HTTP timeout', e, suspend=True, display_exception=False)
|
||||
logger.error("engine {0} : HTTP requests timeout"
|
||||
"(search duration : {1} s, timeout: {2} s) : {3}"
|
||||
.format(self.engine_name, time() - start_time,
|
||||
timeout_limit,
|
||||
e.__class__.__name__))
|
||||
except (httpx.HTTPError, httpx.StreamError) as e:
|
||||
# other requests exception
|
||||
self.handle_exception(result_container, 'HTTP error', e, suspend=True, display_exception=False)
|
||||
logger.exception("engine {0} : requests exception"
|
||||
"(search duration : {1} s, timeout: {2} s) : {3}"
|
||||
.format(self.engine_name, engine_time, timeout_limit, e.__class__.__name__))
|
||||
http_exception = True
|
||||
elif (issubclass(e.__class__, (httpx.HTTPError, httpx.StreamError))):
|
||||
result_container.add_unresponsive_engine(self.engine_name, 'HTTP error')
|
||||
# other requests exception
|
||||
logger.exception("engine {0} : requests exception"
|
||||
"(search duration : {1} s, timeout: {2} s) : {3}"
|
||||
.format(self.engine_name, engine_time, timeout_limit, e))
|
||||
http_exception = True
|
||||
elif (issubclass(e.__class__, SearxEngineCaptchaException)):
|
||||
result_container.add_unresponsive_engine(self.engine_name, 'CAPTCHA required')
|
||||
logger.exception('engine {0} : CAPTCHA'.format(self.engine_name))
|
||||
suspended_time = e.suspended_time # pylint: disable=no-member
|
||||
elif (issubclass(e.__class__, SearxEngineTooManyRequestsException)):
|
||||
result_container.add_unresponsive_engine(self.engine_name, 'too many requests')
|
||||
logger.exception('engine {0} : Too many requests'.format(self.engine_name))
|
||||
suspended_time = e.suspended_time # pylint: disable=no-member
|
||||
elif (issubclass(e.__class__, SearxEngineAccessDeniedException)):
|
||||
result_container.add_unresponsive_engine(self.engine_name, 'blocked')
|
||||
logger.exception('engine {0} : Searx is blocked'.format(self.engine_name))
|
||||
suspended_time = e.suspended_time # pylint: disable=no-member
|
||||
else:
|
||||
result_container.add_unresponsive_engine(self.engine_name, 'unexpected crash')
|
||||
# others errors
|
||||
logger.exception('engine {0} : exception : {1}'.format(self.engine_name, e))
|
||||
else:
|
||||
if getattr(threading.current_thread(), '_timeout', False):
|
||||
record_error(self.engine_name, 'Timeout')
|
||||
|
||||
# suspend the engine if there is an HTTP error
|
||||
# or suspended_time is defined
|
||||
with threading.RLock():
|
||||
if http_exception or suspended_time:
|
||||
# update continuous_errors / suspend_end_time
|
||||
self.engine.continuous_errors += 1
|
||||
if suspended_time is None:
|
||||
suspended_time = min(settings['search']['max_ban_time_on_fail'],
|
||||
self.engine.continuous_errors * settings['search']['ban_time_on_fail'])
|
||||
self.engine.suspend_end_time = time() + suspended_time
|
||||
else:
|
||||
# reset the suspend variables
|
||||
self.engine.continuous_errors = 0
|
||||
self.engine.suspend_end_time = 0
|
||||
.format(self.engine_name, time() - start_time,
|
||||
timeout_limit,
|
||||
e))
|
||||
except SearxEngineCaptchaException as e:
|
||||
self.handle_exception(result_container, 'CAPTCHA required', e, suspend=True, display_exception=False)
|
||||
logger.exception('engine {0} : CAPTCHA'.format(self.engine_name))
|
||||
except SearxEngineTooManyRequestsException as e:
|
||||
self.handle_exception(result_container, 'too many requests', e, suspend=True, display_exception=False)
|
||||
logger.exception('engine {0} : Too many requests'.format(self.engine_name))
|
||||
except SearxEngineAccessDeniedException as e:
|
||||
self.handle_exception(result_container, 'blocked', e, suspend=True, display_exception=False)
|
||||
logger.exception('engine {0} : Searx is blocked'.format(self.engine_name))
|
||||
except Exception as e:
|
||||
self.handle_exception(result_container, 'unexpected crash', e, display_exception=False)
|
||||
logger.exception('engine {0} : exception : {1}'.format(self.engine_name, e))
|
||||
|
||||
def get_default_tests(self):
|
||||
tests = {}
|
||||
|
|
|
@ -923,12 +923,78 @@ input.cursor-text {
|
|||
padding: 0.5rem 1rem;
|
||||
margin: 0rem 0 0 2rem;
|
||||
border: 1px solid #ddd;
|
||||
box-shadow: 2px 2px 2px 0px rgba(0, 0, 0, 0.1);
|
||||
background: white;
|
||||
font-size: 14px;
|
||||
font-weight: normal;
|
||||
z-index: 1000000;
|
||||
}
|
||||
td:hover .engine-tooltip,
|
||||
th:hover .engine-tooltip,
|
||||
.engine-tooltip:hover {
|
||||
display: inline-block;
|
||||
}
|
||||
/* stacked-bar-chart */
|
||||
.stacked-bar-chart {
|
||||
margin: 0;
|
||||
padding: 0 0.125rem 0 3rem;
|
||||
width: 100%;
|
||||
width: -moz-available;
|
||||
width: -webkit-fill-available;
|
||||
width: fill;
|
||||
flex-direction: row;
|
||||
flex-wrap: nowrap;
|
||||
flex-grow: 1;
|
||||
align-items: center;
|
||||
display: inline-flex;
|
||||
}
|
||||
.stacked-bar-chart-value {
|
||||
width: 3rem;
|
||||
display: inline-block;
|
||||
position: absolute;
|
||||
padding: 0 0.5rem;
|
||||
text-align: right;
|
||||
}
|
||||
.stacked-bar-chart-base {
|
||||
display: flex;
|
||||
flex-shrink: 0;
|
||||
flex-grow: 0;
|
||||
flex-basis: unset;
|
||||
}
|
||||
.stacked-bar-chart-median {
|
||||
display: flex;
|
||||
flex-shrink: 0;
|
||||
flex-grow: 0;
|
||||
flex-basis: unset;
|
||||
background: #000000;
|
||||
border: 1px solid rgba(0, 0, 0, 0.9);
|
||||
padding: 0.3rem 0;
|
||||
}
|
||||
.stacked-bar-chart-rate80 {
|
||||
display: flex;
|
||||
flex-shrink: 0;
|
||||
flex-grow: 0;
|
||||
flex-basis: unset;
|
||||
background: transparent;
|
||||
border: 1px solid rgba(0, 0, 0, 0.3);
|
||||
padding: 0.3rem 0;
|
||||
}
|
||||
.stacked-bar-chart-rate95 {
|
||||
display: flex;
|
||||
flex-shrink: 0;
|
||||
flex-grow: 0;
|
||||
flex-basis: unset;
|
||||
background: transparent;
|
||||
border-bottom: 1px dotted rgba(0, 0, 0, 0.5);
|
||||
padding: 0;
|
||||
}
|
||||
.stacked-bar-chart-rate100 {
|
||||
display: flex;
|
||||
flex-shrink: 0;
|
||||
flex-grow: 0;
|
||||
flex-basis: unset;
|
||||
background: transparent;
|
||||
border-left: 1px solid rgba(0, 0, 0, 0.9);
|
||||
padding: 0.4rem 0;
|
||||
width: 1px;
|
||||
}
|
||||
|
|
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
|
@ -896,15 +896,81 @@ input.cursor-text {
|
|||
padding: 0.5rem 1rem;
|
||||
margin: 0rem 0 0 2rem;
|
||||
border: 1px solid #ddd;
|
||||
box-shadow: 2px 2px 2px 0px rgba(0, 0, 0, 0.1);
|
||||
background: white;
|
||||
font-size: 14px;
|
||||
font-weight: normal;
|
||||
z-index: 1000000;
|
||||
}
|
||||
td:hover .engine-tooltip,
|
||||
th:hover .engine-tooltip,
|
||||
.engine-tooltip:hover {
|
||||
display: inline-block;
|
||||
}
|
||||
/* stacked-bar-chart */
|
||||
.stacked-bar-chart {
|
||||
margin: 0;
|
||||
padding: 0 0.125rem 0 3rem;
|
||||
width: 100%;
|
||||
width: -moz-available;
|
||||
width: -webkit-fill-available;
|
||||
width: fill;
|
||||
flex-direction: row;
|
||||
flex-wrap: nowrap;
|
||||
flex-grow: 1;
|
||||
align-items: center;
|
||||
display: inline-flex;
|
||||
}
|
||||
.stacked-bar-chart-value {
|
||||
width: 3rem;
|
||||
display: inline-block;
|
||||
position: absolute;
|
||||
padding: 0 0.5rem;
|
||||
text-align: right;
|
||||
}
|
||||
.stacked-bar-chart-base {
|
||||
display: flex;
|
||||
flex-shrink: 0;
|
||||
flex-grow: 0;
|
||||
flex-basis: unset;
|
||||
}
|
||||
.stacked-bar-chart-median {
|
||||
display: flex;
|
||||
flex-shrink: 0;
|
||||
flex-grow: 0;
|
||||
flex-basis: unset;
|
||||
background: #d5d8d7;
|
||||
border: 1px solid rgba(213, 216, 215, 0.9);
|
||||
padding: 0.3rem 0;
|
||||
}
|
||||
.stacked-bar-chart-rate80 {
|
||||
display: flex;
|
||||
flex-shrink: 0;
|
||||
flex-grow: 0;
|
||||
flex-basis: unset;
|
||||
background: transparent;
|
||||
border: 1px solid rgba(213, 216, 215, 0.3);
|
||||
padding: 0.3rem 0;
|
||||
}
|
||||
.stacked-bar-chart-rate95 {
|
||||
display: flex;
|
||||
flex-shrink: 0;
|
||||
flex-grow: 0;
|
||||
flex-basis: unset;
|
||||
background: transparent;
|
||||
border-bottom: 1px dotted rgba(213, 216, 215, 0.5);
|
||||
padding: 0;
|
||||
}
|
||||
.stacked-bar-chart-rate100 {
|
||||
display: flex;
|
||||
flex-shrink: 0;
|
||||
flex-grow: 0;
|
||||
flex-basis: unset;
|
||||
background: transparent;
|
||||
border-left: 1px solid rgba(213, 216, 215, 0.9);
|
||||
padding: 0.4rem 0;
|
||||
width: 1px;
|
||||
}
|
||||
/*Global*/
|
||||
body {
|
||||
background: #1d1f21 none !important;
|
||||
|
|
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
|
@ -688,6 +688,71 @@ input[type=checkbox]:not(:checked) + .label_hide_if_checked + .label_hide_if_not
|
|||
z-index: 1000000;
|
||||
}
|
||||
th:hover .engine-tooltip,
|
||||
td:hover .engine-tooltip,
|
||||
.engine-tooltip:hover {
|
||||
display: inline-block;
|
||||
}
|
||||
/* stacked-bar-chart */
|
||||
.stacked-bar-chart {
|
||||
margin: 0;
|
||||
padding: 0 0.125rem 0 3rem;
|
||||
width: 100%;
|
||||
width: -moz-available;
|
||||
width: -webkit-fill-available;
|
||||
width: fill;
|
||||
flex-direction: row;
|
||||
flex-wrap: nowrap;
|
||||
flex-grow: 1;
|
||||
align-items: center;
|
||||
display: inline-flex;
|
||||
}
|
||||
.stacked-bar-chart-value {
|
||||
width: 3rem;
|
||||
display: inline-block;
|
||||
position: absolute;
|
||||
padding: 0 0.5rem;
|
||||
text-align: right;
|
||||
}
|
||||
.stacked-bar-chart-base {
|
||||
display: flex;
|
||||
flex-shrink: 0;
|
||||
flex-grow: 0;
|
||||
flex-basis: unset;
|
||||
}
|
||||
.stacked-bar-chart-median {
|
||||
display: flex;
|
||||
flex-shrink: 0;
|
||||
flex-grow: 0;
|
||||
flex-basis: unset;
|
||||
background: #000000;
|
||||
border: 1px solid rgba(0, 0, 0, 0.9);
|
||||
padding: 0.3rem 0;
|
||||
}
|
||||
.stacked-bar-chart-rate80 {
|
||||
display: flex;
|
||||
flex-shrink: 0;
|
||||
flex-grow: 0;
|
||||
flex-basis: unset;
|
||||
background: transparent;
|
||||
border: 1px solid rgba(0, 0, 0, 0.3);
|
||||
padding: 0.3rem 0;
|
||||
}
|
||||
.stacked-bar-chart-rate95 {
|
||||
display: flex;
|
||||
flex-shrink: 0;
|
||||
flex-grow: 0;
|
||||
flex-basis: unset;
|
||||
background: transparent;
|
||||
border-bottom: 1px dotted rgba(0, 0, 0, 0.5);
|
||||
padding: 0;
|
||||
}
|
||||
.stacked-bar-chart-rate100 {
|
||||
display: flex;
|
||||
flex-shrink: 0;
|
||||
flex-grow: 0;
|
||||
flex-basis: unset;
|
||||
background: transparent;
|
||||
border-left: 1px solid rgba(0, 0, 0, 0.9);
|
||||
padding: 0.4rem 0;
|
||||
width: 1px;
|
||||
}
|
||||
|
|
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
2
searx/static/themes/oscar/js/searx.min.js
vendored
2
searx/static/themes/oscar/js/searx.min.js
vendored
File diff suppressed because one or more lines are too long
|
@ -1,4 +1,7 @@
|
|||
@import "../logicodev/variables.less";
|
||||
|
||||
@stacked-bar-chart: rgb(213, 216, 215, 1);
|
||||
|
||||
@import "../logicodev/footer.less";
|
||||
@import "../logicodev/checkbox.less";
|
||||
@import "../logicodev/onoff.less";
|
||||
|
|
|
@ -20,12 +20,72 @@ input.cursor-text {
|
|||
padding: 0.5rem 1rem;
|
||||
margin: 0rem 0 0 2rem;
|
||||
border: 1px solid #ddd;
|
||||
box-shadow: 2px 2px 2px 0px rgba(0,0,0,0.1);
|
||||
background: white;
|
||||
font-size: 14px;
|
||||
font-weight: normal;
|
||||
z-index: 1000000;
|
||||
}
|
||||
|
||||
th:hover .engine-tooltip, .engine-tooltip:hover {
|
||||
td:hover .engine-tooltip, th:hover .engine-tooltip, .engine-tooltip:hover {
|
||||
display: inline-block;
|
||||
}
|
||||
}
|
||||
|
||||
/* stacked-bar-chart */
|
||||
.stacked-bar-chart {
|
||||
margin: 0;
|
||||
padding: 0 0.125rem 0 3rem;
|
||||
width: 100%;
|
||||
width: -moz-available;
|
||||
width: -webkit-fill-available;
|
||||
width: fill;
|
||||
flex-direction: row;
|
||||
flex-wrap: nowrap;
|
||||
flex-grow: 1;
|
||||
align-items: center;
|
||||
display: inline-flex;
|
||||
}
|
||||
|
||||
.stacked-bar-chart-value {
|
||||
width: 3rem;
|
||||
display: inline-block;
|
||||
position: absolute;
|
||||
padding: 0 0.5rem;
|
||||
text-align: right;
|
||||
}
|
||||
|
||||
.stacked-bar-chart-base {
|
||||
display:flex;
|
||||
flex-shrink: 0;
|
||||
flex-grow: 0;
|
||||
flex-basis: unset;
|
||||
}
|
||||
|
||||
.stacked-bar-chart-median {
|
||||
.stacked-bar-chart-base();
|
||||
background: @stacked-bar-chart;
|
||||
border: 1px solid fade(@stacked-bar-chart, 90%);
|
||||
padding: 0.3rem 0;
|
||||
}
|
||||
|
||||
.stacked-bar-chart-rate80 {
|
||||
.stacked-bar-chart-base();
|
||||
background: transparent;
|
||||
border: 1px solid fade(@stacked-bar-chart, 30%);
|
||||
padding: 0.3rem 0;
|
||||
}
|
||||
|
||||
.stacked-bar-chart-rate95 {
|
||||
.stacked-bar-chart-base();
|
||||
background: transparent;
|
||||
border-bottom: 1px dotted fade(@stacked-bar-chart, 50%);
|
||||
padding: 0;
|
||||
}
|
||||
|
||||
.stacked-bar-chart-rate100 {
|
||||
.stacked-bar-chart-base();
|
||||
background: transparent;
|
||||
border-left: 1px solid fade(@stacked-bar-chart, 90%);
|
||||
padding: 0.4rem 0;
|
||||
width: 1px;
|
||||
}
|
||||
|
|
|
@ -14,3 +14,5 @@
|
|||
@light-green: #01D7D4;
|
||||
@orange: #FFA92F;
|
||||
@dark-red: #c9432f;
|
||||
|
||||
@stacked-bar-chart: rgb(0, 0, 0);
|
||||
|
|
|
@ -1,3 +1,5 @@
|
|||
@import "variables.less";
|
||||
|
||||
@import "footer.less";
|
||||
|
||||
@import "checkbox.less";
|
||||
|
|
|
@ -14,6 +14,66 @@
|
|||
z-index: 1000000;
|
||||
}
|
||||
|
||||
th:hover .engine-tooltip, .engine-tooltip:hover {
|
||||
th:hover .engine-tooltip, td:hover .engine-tooltip, .engine-tooltip:hover {
|
||||
display: inline-block;
|
||||
}
|
||||
|
||||
/* stacked-bar-chart */
|
||||
.stacked-bar-chart {
|
||||
margin: 0;
|
||||
padding: 0 0.125rem 0 3rem;
|
||||
width: 100%;
|
||||
width: -moz-available;
|
||||
width: -webkit-fill-available;
|
||||
width: fill;
|
||||
flex-direction: row;
|
||||
flex-wrap: nowrap;
|
||||
flex-grow: 1;
|
||||
align-items: center;
|
||||
display: inline-flex;
|
||||
}
|
||||
|
||||
.stacked-bar-chart-value {
|
||||
width: 3rem;
|
||||
display: inline-block;
|
||||
position: absolute;
|
||||
padding: 0 0.5rem;
|
||||
text-align: right;
|
||||
}
|
||||
|
||||
.stacked-bar-chart-base {
|
||||
display:flex;
|
||||
flex-shrink: 0;
|
||||
flex-grow: 0;
|
||||
flex-basis: unset;
|
||||
}
|
||||
|
||||
.stacked-bar-chart-median {
|
||||
.stacked-bar-chart-base();
|
||||
background: @stacked-bar-chart;
|
||||
border: 1px solid fade(@stacked-bar-chart, 90%);
|
||||
padding: 0.3rem 0;
|
||||
}
|
||||
|
||||
.stacked-bar-chart-rate80 {
|
||||
.stacked-bar-chart-base();
|
||||
background: transparent;
|
||||
border: 1px solid fade(@stacked-bar-chart, 30%);
|
||||
padding: 0.3rem 0;
|
||||
}
|
||||
|
||||
.stacked-bar-chart-rate95 {
|
||||
.stacked-bar-chart-base();
|
||||
background: transparent;
|
||||
border-bottom: 1px dotted fade(@stacked-bar-chart, 50%);
|
||||
padding: 0;
|
||||
}
|
||||
|
||||
.stacked-bar-chart-rate100 {
|
||||
.stacked-bar-chart-base();
|
||||
background: transparent;
|
||||
border-left: 1px solid fade(@stacked-bar-chart, 90%);
|
||||
padding: 0.4rem 0;
|
||||
width: 1px;
|
||||
}
|
||||
|
||||
|
|
|
@ -0,0 +1 @@
|
|||
@stacked-bar-chart: rgb(0, 0, 0);
|
|
@ -1,4 +1,4 @@
|
|||
/*! searx | 23-03-2021 | */
|
||||
/*! searx | 21-04-2021 | */
|
||||
/*
|
||||
* searx, A privacy-respecting, hackable metasearch engine
|
||||
*
|
||||
|
@ -692,6 +692,12 @@ html.js .show_if_nojs {
|
|||
.danger {
|
||||
background-color: #fae1e1;
|
||||
}
|
||||
.warning {
|
||||
background: #faf5e1;
|
||||
}
|
||||
.success {
|
||||
background: #e3fae1;
|
||||
}
|
||||
.badge {
|
||||
display: inline-block;
|
||||
color: #fff;
|
||||
|
@ -1147,6 +1153,69 @@ select:focus {
|
|||
transform: rotate(360deg);
|
||||
}
|
||||
}
|
||||
/* -- stacked bar chart -- */
|
||||
.stacked-bar-chart {
|
||||
margin: 0;
|
||||
padding: 0 0.125rem 0 4rem;
|
||||
width: 100%;
|
||||
width: -moz-available;
|
||||
width: -webkit-fill-available;
|
||||
width: fill;
|
||||
flex-direction: row;
|
||||
flex-wrap: nowrap;
|
||||
align-items: center;
|
||||
display: inline-flex;
|
||||
}
|
||||
.stacked-bar-chart-value {
|
||||
width: 3rem;
|
||||
display: inline-block;
|
||||
position: absolute;
|
||||
padding: 0 0.5rem;
|
||||
text-align: right;
|
||||
}
|
||||
.stacked-bar-chart-base {
|
||||
display: flex;
|
||||
flex-shrink: 0;
|
||||
flex-grow: 0;
|
||||
flex-basis: unset;
|
||||
}
|
||||
.stacked-bar-chart-median {
|
||||
display: flex;
|
||||
flex-shrink: 0;
|
||||
flex-grow: 0;
|
||||
flex-basis: unset;
|
||||
background: #000000;
|
||||
border: 1px solid rgba(0, 0, 0, 0.9);
|
||||
padding: 0.3rem 0;
|
||||
}
|
||||
.stacked-bar-chart-rate80 {
|
||||
display: flex;
|
||||
flex-shrink: 0;
|
||||
flex-grow: 0;
|
||||
flex-basis: unset;
|
||||
background: transparent;
|
||||
border: 1px solid rgba(0, 0, 0, 0.3);
|
||||
padding: 0.3rem 0;
|
||||
}
|
||||
.stacked-bar-chart-rate95 {
|
||||
display: flex;
|
||||
flex-shrink: 0;
|
||||
flex-grow: 0;
|
||||
flex-basis: unset;
|
||||
background: transparent;
|
||||
border-bottom: 1px dotted rgba(0, 0, 0, 0.5);
|
||||
padding: 0;
|
||||
}
|
||||
.stacked-bar-chart-rate100 {
|
||||
display: flex;
|
||||
flex-shrink: 0;
|
||||
flex-grow: 0;
|
||||
flex-basis: unset;
|
||||
background: transparent;
|
||||
border-left: 1px solid rgba(0, 0, 0, 0.9);
|
||||
padding: 0.4rem 0;
|
||||
width: 1px;
|
||||
}
|
||||
/*! Autocomplete.js v2.6.3 | license MIT | (c) 2017, Baptiste Donaux | http://autocomplete-js.com */
|
||||
.autocomplete {
|
||||
position: absolute;
|
||||
|
@ -1435,8 +1504,10 @@ select:focus {
|
|||
font-size: 14px;
|
||||
font-weight: normal;
|
||||
z-index: 1000000;
|
||||
text-align: left;
|
||||
}
|
||||
#main_preferences th:hover .engine-tooltip,
|
||||
#main_preferences td:hover .engine-tooltip,
|
||||
#main_preferences .engine-tooltip:hover {
|
||||
display: inline-block;
|
||||
}
|
||||
|
|
File diff suppressed because one or more lines are too long
|
@ -1,4 +1,4 @@
|
|||
/*! searx | 23-03-2021 | */
|
||||
/*! searx | 21-04-2021 | */
|
||||
/*
|
||||
* searx, A privacy-respecting, hackable metasearch engine
|
||||
*
|
||||
|
@ -692,6 +692,12 @@ html.js .show_if_nojs {
|
|||
.danger {
|
||||
background-color: #fae1e1;
|
||||
}
|
||||
.warning {
|
||||
background: #faf5e1;
|
||||
}
|
||||
.success {
|
||||
background: #e3fae1;
|
||||
}
|
||||
.badge {
|
||||
display: inline-block;
|
||||
color: #fff;
|
||||
|
@ -1147,6 +1153,69 @@ select:focus {
|
|||
transform: rotate(360deg);
|
||||
}
|
||||
}
|
||||
/* -- stacked bar chart -- */
|
||||
.stacked-bar-chart {
|
||||
margin: 0;
|
||||
padding: 0 0.125rem 0 4rem;
|
||||
width: 100%;
|
||||
width: -moz-available;
|
||||
width: -webkit-fill-available;
|
||||
width: fill;
|
||||
flex-direction: row;
|
||||
flex-wrap: nowrap;
|
||||
align-items: center;
|
||||
display: inline-flex;
|
||||
}
|
||||
.stacked-bar-chart-value {
|
||||
width: 3rem;
|
||||
display: inline-block;
|
||||
position: absolute;
|
||||
padding: 0 0.5rem;
|
||||
text-align: right;
|
||||
}
|
||||
.stacked-bar-chart-base {
|
||||
display: flex;
|
||||
flex-shrink: 0;
|
||||
flex-grow: 0;
|
||||
flex-basis: unset;
|
||||
}
|
||||
.stacked-bar-chart-median {
|
||||
display: flex;
|
||||
flex-shrink: 0;
|
||||
flex-grow: 0;
|
||||
flex-basis: unset;
|
||||
background: #000000;
|
||||
border: 1px solid rgba(0, 0, 0, 0.9);
|
||||
padding: 0.3rem 0;
|
||||
}
|
||||
.stacked-bar-chart-rate80 {
|
||||
display: flex;
|
||||
flex-shrink: 0;
|
||||
flex-grow: 0;
|
||||
flex-basis: unset;
|
||||
background: transparent;
|
||||
border: 1px solid rgba(0, 0, 0, 0.3);
|
||||
padding: 0.3rem 0;
|
||||
}
|
||||
.stacked-bar-chart-rate95 {
|
||||
display: flex;
|
||||
flex-shrink: 0;
|
||||
flex-grow: 0;
|
||||
flex-basis: unset;
|
||||
background: transparent;
|
||||
border-bottom: 1px dotted rgba(0, 0, 0, 0.5);
|
||||
padding: 0;
|
||||
}
|
||||
.stacked-bar-chart-rate100 {
|
||||
display: flex;
|
||||
flex-shrink: 0;
|
||||
flex-grow: 0;
|
||||
flex-basis: unset;
|
||||
background: transparent;
|
||||
border-left: 1px solid rgba(0, 0, 0, 0.9);
|
||||
padding: 0.4rem 0;
|
||||
width: 1px;
|
||||
}
|
||||
/*! Autocomplete.js v2.6.3 | license MIT | (c) 2017, Baptiste Donaux | http://autocomplete-js.com */
|
||||
.autocomplete {
|
||||
position: absolute;
|
||||
|
@ -1435,8 +1504,10 @@ select:focus {
|
|||
font-size: 14px;
|
||||
font-weight: normal;
|
||||
z-index: 1000000;
|
||||
text-align: left;
|
||||
}
|
||||
#main_preferences th:hover .engine-tooltip,
|
||||
#main_preferences td:hover .engine-tooltip,
|
||||
#main_preferences .engine-tooltip:hover {
|
||||
display: inline-block;
|
||||
}
|
||||
|
|
2
searx/static/themes/simple/css/searx.min.css
vendored
2
searx/static/themes/simple/css/searx.min.css
vendored
File diff suppressed because one or more lines are too long
|
@ -1,4 +1,4 @@
|
|||
/*! simple/searx.min.js | 23-03-2021 | */
|
||||
/*! simple/searx.min.js | 21-04-2021 | */
|
||||
|
||||
(function(t,e){"use strict";var a=e.currentScript||function(){var t=e.getElementsByTagName("script");return t[t.length-1]}();t.searx={touch:"ontouchstart"in t||t.DocumentTouch&&document instanceof DocumentTouch||false,method:a.getAttribute("data-method"),autocompleter:a.getAttribute("data-autocompleter")==="true",search_on_category_select:a.getAttribute("data-search-on-category-select")==="true",infinite_scroll:a.getAttribute("data-infinite-scroll")==="true",static_path:a.getAttribute("data-static-path"),translations:JSON.parse(a.getAttribute("data-translations"))};e.getElementsByTagName("html")[0].className=t.searx.touch?"js touch":"js"})(window,document);
|
||||
//# sourceMappingURL=searx.head.min.js.map
|
2
searx/static/themes/simple/js/searx.min.js
vendored
2
searx/static/themes/simple/js/searx.min.js
vendored
|
@ -1,4 +1,4 @@
|
|||
/*! simple/searx.min.js | 23-03-2021 | */
|
||||
/*! simple/searx.min.js | 21-04-2021 | */
|
||||
|
||||
window.searx=function(t,a){"use strict";if(t.Element){(function(e){e.matches=e.matches||e.matchesSelector||e.webkitMatchesSelector||e.msMatchesSelector||function(e){var t=this,n=(t.parentNode||t.document).querySelectorAll(e),i=-1;while(n[++i]&&n[i]!=t);return!!n[i]}})(Element.prototype)}function o(e,t,n){try{e.call(t,n)}catch(e){console.log(e)}}var s=window.searx||{};s.on=function(i,e,r,t){t=t||false;if(typeof i!=="string"){i.addEventListener(e,r,t)}else{a.addEventListener(e,function(e){var t=e.target||e.srcElement,n=false;while(t&&t.matches&&t!==a&&!(n=t.matches(i)))t=t.parentElement;if(n)o(r,t,e)},t)}};s.ready=function(e){if(document.readyState!="loading"){e.call(t)}else{t.addEventListener("DOMContentLoaded",e.bind(t))}};s.http=function(e,t,n){var i=new XMLHttpRequest,r=function(){},a=function(){},o={then:function(e){r=e;return o},catch:function(e){a=e;return o}};try{i.open(e,t,true);i.onload=function(){if(i.status==200){r(i.response,i.responseType)}else{a(Error(i.statusText))}};i.onerror=function(){a(Error("Network Error"))};i.onabort=function(){a(Error("Transaction is aborted"))};i.send()}catch(e){a(e)}return o};s.loadStyle=function(e){var t=s.static_path+e,n="style_"+e.replace(".","_"),i=a.getElementById(n);if(i===null){i=a.createElement("link");i.setAttribute("id",n);i.setAttribute("rel","stylesheet");i.setAttribute("type","text/css");i.setAttribute("href",t);a.body.appendChild(i)}};s.loadScript=function(e,t){var n=s.static_path+e,i="script_"+e.replace(".","_"),r=a.getElementById(i);if(r===null){r=a.createElement("script");r.setAttribute("id",i);r.setAttribute("src",n);r.onload=t;r.onerror=function(){r.setAttribute("error","1")};a.body.appendChild(r)}else if(!r.hasAttribute("error")){try{t.apply(r,[])}catch(e){console.log(e)}}else{console.log("callback not executed : script '"+n+"' not loaded.")}};s.insertBefore=function(e,t){element.parentNode.insertBefore(e,t)};s.insertAfter=function(e,t){t.parentNode.insertBefore(e,t.nextSibling)};s.on(".close","click",function(e){var t=e.target||e.srcElement;this.parentNode.classList.add("invisible")});return s}(window,document);(function(e){if(typeof exports==="object"&&typeof module!=="undefined"){module.exports=e()}else if(typeof define==="function"&&define.amd){define([],e)}else{var t;if(typeof window!=="undefined"){t=window}else if(typeof global!=="undefined"){t=global}else if(typeof self!=="undefined"){t=self}else{t=this}t.AutoComplete=e()}})(function(){var e,t,n;return function a(o,s,l){function u(n,e){if(!s[n]){if(!o[n]){var t=typeof require=="function"&&require;if(!e&&t)return t(n,!0);if(c)return c(n,!0);var i=new Error("Cannot find module '"+n+"'");throw i.code="MODULE_NOT_FOUND",i}var r=s[n]={exports:{}};o[n][0].call(r.exports,function(e){var t=o[n][1][e];return u(t?t:e)},r,r.exports,a,o,s,l)}return s[n].exports}var c=typeof require=="function"&&require;for(var e=0;e<l.length;e++)u(l[e]);return u}({1:[function(e,t,n){
|
||||
/*
|
||||
|
|
|
@ -19,6 +19,9 @@
|
|||
@color-warning: #dbba34;
|
||||
@color-warning-background: lighten(@color-warning, 40%);
|
||||
|
||||
@color-success: #42db34;
|
||||
@color-success-background: lighten(@color-success, 40%);
|
||||
|
||||
/// General
|
||||
|
||||
@color-font: #444;
|
||||
|
|
|
@ -105,9 +105,10 @@
|
|||
font-size: 14px;
|
||||
font-weight: normal;
|
||||
z-index: 1000000;
|
||||
text-align: left;
|
||||
}
|
||||
|
||||
th:hover .engine-tooltip, .engine-tooltip:hover {
|
||||
th:hover .engine-tooltip, td:hover .engine-tooltip, .engine-tooltip:hover {
|
||||
display: inline-block;
|
||||
}
|
||||
|
||||
|
|
|
@ -4,6 +4,8 @@
|
|||
* To convert "style.less" to "style.css" run: $make styles
|
||||
*/
|
||||
|
||||
@stacked-bar-chart: rgb(0, 0, 0);
|
||||
|
||||
@import "normalize.less";
|
||||
|
||||
@import "definitions.less";
|
||||
|
|
|
@ -36,6 +36,14 @@ html.js .show_if_nojs {
|
|||
background-color: @color-error-background;
|
||||
}
|
||||
|
||||
.warning {
|
||||
background: @color-warning-background;
|
||||
}
|
||||
|
||||
.success {
|
||||
background: @color-success-background;
|
||||
}
|
||||
|
||||
.badge {
|
||||
display: inline-block;
|
||||
color: #fff;
|
||||
|
@ -465,4 +473,62 @@ select {
|
|||
-webkit-transform: rotate(360deg);
|
||||
transform: rotate(360deg);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/* -- stacked bar chart -- */
|
||||
.stacked-bar-chart {
|
||||
margin: 0;
|
||||
padding: 0 0.125rem 0 4rem;
|
||||
width: 100%;
|
||||
width: -moz-available;
|
||||
width: -webkit-fill-available;
|
||||
width: fill;
|
||||
flex-direction: row;
|
||||
flex-wrap: nowrap;
|
||||
align-items: center;
|
||||
display: inline-flex;
|
||||
}
|
||||
|
||||
.stacked-bar-chart-value {
|
||||
width: 3rem;
|
||||
display: inline-block;
|
||||
position: absolute;
|
||||
padding: 0 0.5rem;
|
||||
text-align: right;
|
||||
}
|
||||
|
||||
.stacked-bar-chart-base {
|
||||
display:flex;
|
||||
flex-shrink: 0;
|
||||
flex-grow: 0;
|
||||
flex-basis: unset;
|
||||
}
|
||||
|
||||
.stacked-bar-chart-median {
|
||||
.stacked-bar-chart-base();
|
||||
background: @stacked-bar-chart;
|
||||
border: 1px solid fade(@stacked-bar-chart, 90%);
|
||||
padding: 0.3rem 0;
|
||||
}
|
||||
|
||||
.stacked-bar-chart-rate80 {
|
||||
.stacked-bar-chart-base();
|
||||
background: transparent;
|
||||
border: 1px solid fade(@stacked-bar-chart, 30%);
|
||||
padding: 0.3rem 0;
|
||||
}
|
||||
|
||||
.stacked-bar-chart-rate95 {
|
||||
.stacked-bar-chart-base();
|
||||
background: transparent;
|
||||
border-bottom: 1px dotted fade(@stacked-bar-chart, 50%);
|
||||
padding: 0;
|
||||
}
|
||||
|
||||
.stacked-bar-chart-rate100 {
|
||||
.stacked-bar-chart-base();
|
||||
background: transparent;
|
||||
border-left: 1px solid fade(@stacked-bar-chart, 90%);
|
||||
padding: 0.4rem 0;
|
||||
width: 1px;
|
||||
}
|
||||
|
|
|
@ -134,13 +134,11 @@ custom-select{% if rtl %}-rtl{% endif %}
|
|||
{%- endmacro %}
|
||||
|
||||
{% macro support_toggle(supports) -%}
|
||||
{%- if supports -%}
|
||||
<span class="label label-success">
|
||||
{{- _("supported") -}}
|
||||
</span>
|
||||
{%- if supports == '?' -%}
|
||||
<span class="glyphicon glyphicon-exclamation-sign" aria-hidden="true" title="{{- _('broken') -}}"></span>{{- "" -}}
|
||||
{%- elif supports -%}
|
||||
<span class="glyphicon glyphicon-ok" aria-hidden="true" title="{{- _('supported') -}}"></span>{{- "" -}}
|
||||
{%- else -%}
|
||||
<span class="label label-danger">
|
||||
{{- _("not supported") -}}
|
||||
</span>
|
||||
<span aria-hidden="true" title="{{- _('not supported') -}}"></span>{{- "" -}}
|
||||
{%- endif -%}
|
||||
{%- endmacro %}
|
||||
|
|
|
@ -1,16 +1,74 @@
|
|||
{% from 'oscar/macros.html' import preferences_item_header, preferences_item_header_rtl, preferences_item_footer, preferences_item_footer_rtl, checkbox_toggle, support_toggle, custom_select_class %}
|
||||
{% extends "oscar/base.html" %}
|
||||
{% macro engine_about(search_engine, id) -%}
|
||||
{% if search_engine.about is defined %}
|
||||
{%- macro engine_about(search_engine, id) -%}
|
||||
{% if search_engine.about is defined or stats[search_engine.name]['result_count'] > 0 %}
|
||||
{% set about = search_engine.about %}
|
||||
<div class="engine-tooltip" role="tooltip" id="{{ id }}">{{- "" -}}
|
||||
<h5><a href="{{about.website}}" rel="noreferrer">{{about.website}}</a></h5>
|
||||
{%- if about.wikidata_id -%}<p><a href="https://www.wikidata.org/wiki/{{about.wikidata_id}}" rel="noreferrer">wikidata.org/wiki/{{about.wikidata_id}}</a></p>{%- endif -%}
|
||||
{% if search_engine.about is defined %}
|
||||
<h5><a href="{{about.website}}" rel="noreferrer">{{about.website}}</a></h5>
|
||||
{%- if about.wikidata_id -%}<p><a href="https://www.wikidata.org/wiki/{{about.wikidata_id}}" rel="noreferrer">wikidata.org/wiki/{{about.wikidata_id}}</a></p>{%- endif -%}
|
||||
{% endif %}
|
||||
{%- if search_engine.enable_http %}<p>{{ icon('exclamation-sign', 'No HTTPS') }}{{ _('No HTTPS')}}</p>{% endif -%}
|
||||
{%- if stats[search_engine.name]['result_count'] -%}
|
||||
<p>{{ _('Number of results') }}: {{ stats[search_engine.name]['result_count'] }} ( {{ _('Avg.') }} )</p>{{- "" -}}
|
||||
{%- endif -%}
|
||||
</div>
|
||||
{%- endif -%}
|
||||
{%- endmacro %}
|
||||
{% block title %}{{ _('preferences') }} - {% endblock %}
|
||||
|
||||
{%- macro engine_time(engine_name, css_align_class) -%}
|
||||
<td class="{{ label }}" style="padding: 2px">{{- "" -}}
|
||||
{%- if stats[engine_name].time != None -%}
|
||||
<span class="stacked-bar-chart-value">{{- stats[engine_name].time -}}</span>{{- "" -}}
|
||||
<span class="stacked-bar-chart" aria-labelledby="{{engine_name}}_chart" aria-hidden="true">{{- "" -}}
|
||||
<span style="width: calc(max(2px, 100%*{{ (stats[engine_name].time / max_rate95)|round(3) }}))" class="stacked-bar-chart-median"></span>{{- "" -}}
|
||||
<span style="width: calc(100%*{{ ((stats[engine_name].rate80 - stats[engine_name].time) / max_rate95)|round(3) }})" class="stacked-bar-chart-rate80"></span>{{- "" -}}
|
||||
<span style="width: calc(100%*{{ ((stats[engine_name].rate95 - stats[engine_name].rate80) / max_rate95)|round(3) }})" class="stacked-bar-chart-rate95"></span>{{- "" -}}
|
||||
<span class="stacked-bar-chart-rate100"></span>{{- "" -}}
|
||||
</span>{{- "" -}}
|
||||
<div class="engine-tooltip text-left" role="tooltip" id="{{engine_name}}_graph">{{- "" -}}
|
||||
<p>{{ _('Median') }}: {{ stats[engine_name].time }}</p>{{- "" -}}
|
||||
<p>{{ _('P80') }}: {{ stats[engine_name].rate80 }}</p>{{- "" -}}
|
||||
<p>{{ _('P95') }}: {{ stats[engine_name].rate95 }}</p>{{- "" -}}
|
||||
</div>
|
||||
{%- endif -%}
|
||||
</td>
|
||||
{%- endmacro -%}
|
||||
|
||||
{%- macro engine_reliability(engine_name, css_align_class) -%}
|
||||
{% set r = reliabilities.get(engine_name, {}).get('reliablity', None) %}
|
||||
{% set checker_result = reliabilities.get(engine_name, {}).get('checker', []) %}
|
||||
{% set errors = reliabilities.get(engine_name, {}).get('errors', []) %}
|
||||
{% if r != None %}
|
||||
{% if r <= 50 %}{% set label = 'danger' %}
|
||||
{% elif r < 80 %}{% set label = 'warning' %}
|
||||
{% elif r < 90 %}{% set label = 'default' %}
|
||||
{% else %}{% set label = 'success' %}
|
||||
{% endif %}
|
||||
{% else %}
|
||||
{% set r = '' %}
|
||||
{% endif %}
|
||||
{% if checker_result or errors %}
|
||||
<td class="{{ css_align_class }} {{ label }}">{{- "" -}}
|
||||
<span aria-labelledby="{{engine_name}}_reliablity">
|
||||
{%- if reliabilities[engine_name].checker %}{{ icon('exclamation-sign', 'The checker fails on the some tests') }}{% endif %} {{ r -}}
|
||||
</span>{{- "" -}}
|
||||
<div class="engine-tooltip text-left" role="tooltip" id="{{engine_name}}_reliablity">
|
||||
{%- if checker_result -%}
|
||||
<p>{{ _("Failed checker test(s): ") }} {{ ', '.join(checker_result) }}</p>
|
||||
{%- endif -%}
|
||||
{%- for error in errors -%}
|
||||
<p>{{ error }} </p>{{- "" -}}
|
||||
{%- endfor -%}
|
||||
</div>{{- "" -}}
|
||||
</td>
|
||||
{%- else -%}
|
||||
<td class="{{ css_align_class }} {{ label }}"><span>{{ r }}</span></td>
|
||||
{%- endif -%}
|
||||
{%- endmacro -%}
|
||||
|
||||
{%- block title %}{{ _('preferences') }} - {% endblock -%}
|
||||
|
||||
{% block content %}
|
||||
|
||||
<div>
|
||||
|
@ -182,7 +240,6 @@
|
|||
</fieldset>
|
||||
</div>
|
||||
<div class="tab-pane active_if_nojs" id="tab_engine">
|
||||
|
||||
<!-- Nav tabs -->
|
||||
<ul class="nav nav-tabs nav-justified hide_if_nojs" role="tablist">
|
||||
{% for categ in all_categories %}
|
||||
|
@ -217,14 +274,16 @@
|
|||
<th scope="col">{{ _("Allow") }}</th>
|
||||
<th scope="col">{{ _("Engine name") }}</th>
|
||||
<th scope="col">{{ _("Shortcut") }}</th>
|
||||
<th scope="col">{{ _("Selected language") }}</th>
|
||||
<th scope="col">{{ _("SafeSearch") }}</th>
|
||||
<th scope="col">{{ _("Time range") }}</th>
|
||||
<th scope="col">{{ _("Avg. time") }}</th>
|
||||
<th scope="col">{{ _("Max time") }}</th>
|
||||
<th scope="col" style="width: 10rem">{{ _("Selected language") }}</th>
|
||||
<th scope="col" style="width: 10rem">{{ _("SafeSearch") }}</th>
|
||||
<th scope="col" style="width: 10rem">{{ _("Time range") }}</th>
|
||||
<th scope="col">{{ _("Response time") }}</th>
|
||||
<th scope="col" class="text-right" style="width: 7rem">{{ _("Max time") }}</th>
|
||||
<th scope="col" class="text-right" style="width: 7rem">{{ _("Reliablity") }}</th>
|
||||
{% else %}
|
||||
<th scope="col" class="text-right">{{ _("Max time") }}</th>
|
||||
<th scope="col" class="text-right">{{ _("Avg. time") }}</th>
|
||||
<th scope="col">{{ _("Reliablity") }}</th>
|
||||
<th scope="col">{{ _("Max time") }}</th>
|
||||
<th scope="col" class="text-right">{{ _("Response time") }}</th>
|
||||
<th scope="col" class="text-right">{{ _("Time range") }}</th>
|
||||
<th scope="col" class="text-right">{{ _("SafeSearch") }}</th>
|
||||
<th scope="col" class="text-right">{{ _("Selected language") }}</th>
|
||||
|
@ -246,17 +305,19 @@
|
|||
{{- engine_about(search_engine, 'tooltip_' + categ + '_' + search_engine.name) -}}
|
||||
</th>
|
||||
<td class="name">{{ shortcuts[search_engine.name] }}</td>
|
||||
<td>{{ support_toggle(stats[search_engine.name].supports_selected_language) }}</td>
|
||||
<td>{{ support_toggle(search_engine.safesearch==True) }}</td>
|
||||
<td>{{ support_toggle(search_engine.time_range_support==True) }}</td>
|
||||
<td class="{{ 'danger' if stats[search_engine.name]['warn_time'] else '' }}">{% if stats[search_engine.name]['warn_time'] %}{{ icon('exclamation-sign')}} {% endif %}{{ 'N/A' if stats[search_engine.name].time==None else stats[search_engine.name].time }}</td>
|
||||
<td class="{{ 'danger' if stats[search_engine.name]['warn_timeout'] else '' }}">{% if stats[search_engine.name]['warn_timeout'] %}{{ icon('exclamation-sign') }} {% endif %}{{ search_engine.timeout }}</td>
|
||||
<td>{{ support_toggle(supports[search_engine.name]['supports_selected_language']) }}</td>
|
||||
<td>{{ support_toggle(supports[search_engine.name]['safesearch']) }}</td>
|
||||
<td>{{ support_toggle(supports[search_engine.name]['time_range_support']) }}</td>
|
||||
{{ engine_time(search_engine.name, 'text-right') }}
|
||||
<td class="text-right {{ 'danger' if stats[search_engine.name]['warn_timeout'] else '' }}">{% if stats[search_engine.name]['warn_timeout'] %}{{ icon('exclamation-sign') }} {% endif %}{{ search_engine.timeout }}</td>
|
||||
{{ engine_reliability(search_engine.name, 'text-right ') }}
|
||||
{% else %}
|
||||
<td class="{{ 'danger' if stats[search_engine.name]['warn_timeout'] else '' }}">{{ search_engine.timeout }}{% if stats[search_engine.name]['warn_time'] %} {{ icon('exclamation-sign')}}{% endif %}</td>
|
||||
<td class="{{ 'danger' if stats[search_engine.name]['warn_time'] else '' }}">{{ 'N/A' if stats[search_engine.name].time==None else stats[search_engine.name].time }}{% if stats[search_engine.name]['warn_time'] %} {{ icon('exclamation-sign')}}{% endif %}</td>
|
||||
<td>{{ support_toggle(search_engine.time_range_support==True) }}</td>
|
||||
<td>{{ support_toggle(search_engine.safesearch==True) }}</td>
|
||||
<td>{{ support_toggle(stats[search_engine.name].supports_selected_language) }}</td>
|
||||
{{ engine_reliability(search_engine.name, 'text-left') }}
|
||||
<td class="text-left {{ 'danger' if stats[search_engine.name]['warn_timeout'] else '' }}">{{ search_engine.timeout }}{% if stats[search_engine.name]['warn_time'] %} {{ icon('exclamation-sign')}}{% endif %}</td>
|
||||
{{ engine_time(search_engine.name, 'text-left') }}
|
||||
<td>{{ support_toggle(supports[search_engine.name]['time_range_support']) }}</td>
|
||||
<td>{{ support_toggle(supports[search_engine.name]['safesearch']) }}</td>
|
||||
<td>{{ support_toggle(supports[search_engine.name]['supports_selected_language']) }}</td>
|
||||
<td>{{ shortcuts[search_engine.name] }}</td>
|
||||
<th scope="row"><span>{% if search_engine.enable_http %}{{ icon('exclamation-sign', 'No HTTPS') }}{% endif %}{{ search_engine.name }}</span>{{ engine_about(search_engine) }}</th>
|
||||
<td class="onoff-checkbox">
|
||||
|
|
|
@ -1,4 +1,16 @@
|
|||
{% extends "oscar/base.html" %}
|
||||
{% block styles %}
|
||||
<link rel="stylesheet" href="{{ url_for('static', filename='css/charts.min.css') }}" type="text/css" />
|
||||
<style>
|
||||
#engine-times {
|
||||
--labels-size: 20rem;
|
||||
}
|
||||
|
||||
#engine-times th {
|
||||
text-align: right;
|
||||
}
|
||||
</style>
|
||||
{% endblock %}
|
||||
{% block title %}{{ _('stats') }} - {% endblock %}
|
||||
{% block content %}
|
||||
<div class="container-fluid">
|
||||
|
|
|
@ -79,7 +79,11 @@
|
|||
|
||||
{%- macro checkbox(name, checked, readonly, disabled) -%}
|
||||
<div class="checkbox">{{- '' -}}
|
||||
<input type="checkbox" value="None" id="{{ name }}" name="{{ name }}" {% if checked %}checked{% endif %}{% if readonly %} readonly="readonly" {% endif %}{% if disabled %} disabled="disabled" {% endif %}/>{{- '' -}}
|
||||
<label for="{{ name }}"></label>{{- '' -}}
|
||||
{%- if checked == '?' -%}
|
||||
{{ icon_small('warning') }}
|
||||
{%- else -%}
|
||||
<input type="checkbox" value="None" id="{{ name }}" name="{{ name }}" {% if checked %}checked{% endif %}{% if readonly %} readonly="readonly" {% endif %}{% if disabled %} disabled="disabled" {% endif %}/>{{- '' -}}
|
||||
<label for="{{ name }}"></label>{{- '' -}}
|
||||
{%- endif -%}
|
||||
</div>
|
||||
{%- endmacro -%}
|
||||
|
|
|
@ -29,6 +29,58 @@
|
|||
{%- endif -%}
|
||||
{%- endmacro %}
|
||||
|
||||
{%- macro engine_time(engine_name) -%}
|
||||
<td class="{{ label }}" style="padding: 2px; width: 13rem;">{{- "" -}}
|
||||
{%- if stats[engine_name].time != None -%}
|
||||
<span class="stacked-bar-chart-value">{{- stats[engine_name].time -}}</span>{{- "" -}}
|
||||
<span class="stacked-bar-chart" aria-labelledby="{{engine_name}}_chart" aria-hidden="true">{{- "" -}}
|
||||
<span style="width: calc(max(2px, 100%*{{ (stats[engine_name].time / max_rate95)|round(3) }}))" class="stacked-bar-chart-median"></span>{{- "" -}}
|
||||
<span style="width: calc(100%*{{ ((stats[engine_name].rate80 - stats[engine_name].time) / max_rate95)|round(3) }})" class="stacked-bar-chart-rate80"></span>{{- "" -}}
|
||||
<span style="width: calc(100%*{{ ((stats[engine_name].rate95 - stats[engine_name].rate80) / max_rate95)|round(3) }})" class="stacked-bar-chart-rate95"></span>{{- "" -}}
|
||||
<span class="stacked-bar-chart-rate100"></span>{{- "" -}}
|
||||
</span>{{- "" -}}
|
||||
<div class="engine-tooltip text-left" role="tooltip" id="{{engine_name}}_graph">{{- "" -}}
|
||||
<p>{{ _('Median') }}: {{ stats[engine_name].time }}</p>{{- "" -}}
|
||||
<p>{{ _('P80') }}: {{ stats[engine_name].rate80 }}</p>{{- "" -}}
|
||||
<p>{{ _('P95') }}: {{ stats[engine_name].rate95 }}</p>{{- "" -}}
|
||||
</div>
|
||||
{%- endif -%}
|
||||
</td>
|
||||
{%- endmacro -%}
|
||||
|
||||
{%- macro engine_reliability(engine_name) -%}
|
||||
{% set r = reliabilities.get(engine_name, {}).get('reliablity', None) %}
|
||||
{% set checker_result = reliabilities.get(engine_name, {}).get('checker', []) %}
|
||||
{% set errors = reliabilities.get(engine_name, {}).get('errors', []) %}
|
||||
{% if r != None %}
|
||||
{% if r <= 50 %}{% set label = 'danger' %}
|
||||
{% elif r < 80 %}{% set label = 'warning' %}
|
||||
{% elif r < 90 %}{% set label = '' %}
|
||||
{% else %}{% set label = 'success' %}
|
||||
{% endif %}
|
||||
{% else %}
|
||||
{% set r = '' %}
|
||||
{% endif %}
|
||||
{% if checker_result or errors %}
|
||||
<td class="{{ label }}">{{- "" -}}
|
||||
<span aria-labelledby="{{engine_name}}_reliablity">
|
||||
{%- if reliabilities[engine_name].checker %}{{ icon('warning', 'The checker fails on the some tests') }}{% endif %} {{ r -}}
|
||||
</span>{{- "" -}}
|
||||
<div class="engine-tooltip" style="right: 12rem;" role="tooltip" id="{{engine_name}}_reliablity">
|
||||
{%- if checker_result -%}
|
||||
<p>{{ _("The checker fails on this tests: ") }} {{ ', '.join(checker_result) }}</p>
|
||||
{%- endif -%}
|
||||
{%- if errors %}<p>{{ _('Errors:') }}</p>{% endif -%}
|
||||
{%- for error in errors -%}
|
||||
<p>{{ error }} </p>{{- "" -}}
|
||||
{%- endfor -%}
|
||||
</div>{{- "" -}}
|
||||
</td>
|
||||
{%- else -%}
|
||||
<td class="{{ css_align_class }} {{ label }}"><span>{{ r }}</span></td>
|
||||
{%- endif -%}
|
||||
{%- endmacro -%}
|
||||
|
||||
{% block head %} {% endblock %}
|
||||
{% block content %}
|
||||
|
||||
|
@ -123,8 +175,9 @@
|
|||
<th>{{ _("Supports selected language") }}</th>
|
||||
<th>{{ _("SafeSearch") }}</th>
|
||||
<th>{{ _("Time range") }}</th>
|
||||
<th>{{ _("Avg. time") }}</th>
|
||||
<th>{{ _("Response time") }}</th>
|
||||
<th>{{ _("Max time") }}</th>
|
||||
<th>{{ _("Reliablity") }}</th>
|
||||
</tr>
|
||||
{% for search_engine in engines_by_category[categ] %}
|
||||
|
||||
|
@ -134,11 +187,12 @@
|
|||
<td class="engine_checkbox">{{ checkbox_onoff(engine_id, (search_engine.name, categ) in disabled_engines) }}</td>
|
||||
<th class="name">{% if search_engine.enable_http %}{{ icon('warning', 'No HTTPS') }}{% endif %} {{ search_engine.name }} {{ engine_about(search_engine) }}</th>
|
||||
<td class="shortcut">{{ shortcuts[search_engine.name] }}</td>
|
||||
<td>{{ checkbox(engine_id + '_supported_languages', current_language == 'all' or current_language in search_engine.supported_languages or current_language.split('-')[0] in search_engine.supported_languages, true, true) }}</td>
|
||||
<td>{{ checkbox(engine_id + '_safesearch', search_engine.safesearch==True, true, true) }}</td>
|
||||
<td>{{ checkbox(engine_id + '_time_range_support', search_engine.time_range_support==True, true, true) }}</td>
|
||||
<td class="{{ 'danger' if stats[search_engine.name]['warn_time'] else '' }}">{{ 'N/A' if stats[search_engine.name].time==None else stats[search_engine.name].time }}</td>
|
||||
<td>{{ checkbox(engine_id + '_supported_languages', supports[search_engine.name]['supports_selected_language'], true, true) }}</td>
|
||||
<td>{{ checkbox(engine_id + '_safesearch', supports[search_engine.name]['safesearch'], true, true) }}</td>
|
||||
<td>{{ checkbox(engine_id + '_time_range_support', supports[search_engine.name]['time_range_support'], true, true) }}</td>
|
||||
{{ engine_time(search_engine.name) }}
|
||||
<td class="{{ 'danger' if stats[search_engine.name]['warn_timeout'] else '' }}">{{ search_engine.timeout }}</td>
|
||||
{{ engine_reliability(search_engine.name) }}
|
||||
</tr>
|
||||
{% endif %}
|
||||
{% endfor %}
|
||||
|
|
182
searx/webapp.py
182
searx/webapp.py
|
@ -51,7 +51,7 @@ from searx import logger
|
|||
logger = logger.getChild('webapp')
|
||||
|
||||
from datetime import datetime, timedelta
|
||||
from time import time
|
||||
from timeit import default_timer
|
||||
from html import escape
|
||||
from io import StringIO
|
||||
from urllib.parse import urlencode, urlparse
|
||||
|
@ -73,9 +73,7 @@ from flask.json import jsonify
|
|||
from searx import brand, static_path
|
||||
from searx import settings, searx_dir, searx_debug
|
||||
from searx.exceptions import SearxParameterException
|
||||
from searx.engines import (
|
||||
categories, engines, engine_shortcuts, get_engines_stats
|
||||
)
|
||||
from searx.engines import categories, engines, engine_shortcuts
|
||||
from searx.webutils import (
|
||||
UnicodeWriter, highlight_content, get_resources_directory,
|
||||
get_static_files, get_result_templates, get_themes,
|
||||
|
@ -95,7 +93,7 @@ from searx.preferences import Preferences, ValidationException, LANGUAGE_CODES
|
|||
from searx.answerers import answerers
|
||||
from searx.network import stream as http_stream
|
||||
from searx.answerers import ask
|
||||
from searx.metrology.error_recorder import errors_per_engines
|
||||
from searx.metrics import get_engines_stats, get_engine_errors, histogram, counter
|
||||
|
||||
# serve pages with HTTP/1.1
|
||||
from werkzeug.serving import WSGIRequestHandler
|
||||
|
@ -172,6 +170,31 @@ _category_names = (gettext('files'),
|
|||
gettext('onions'),
|
||||
gettext('science'))
|
||||
|
||||
#
|
||||
exception_classname_to_label = {
|
||||
"searx.exceptions.SearxEngineCaptchaException": gettext("CAPTCHA"),
|
||||
"searx.exceptions.SearxEngineTooManyRequestsException": gettext("too many requests"),
|
||||
"searx.exceptions.SearxEngineAccessDeniedException": gettext("access denied"),
|
||||
"searx.exceptions.SearxEngineAPIException": gettext("server API error"),
|
||||
"httpx.TimeoutException": gettext("HTTP timeout"),
|
||||
"httpx.ConnectTimeout": gettext("HTTP timeout"),
|
||||
"httpx.ReadTimeout": gettext("HTTP timeout"),
|
||||
"httpx.WriteTimeout": gettext("HTTP timeout"),
|
||||
"httpx.HTTPStatusError": gettext("HTTP error"),
|
||||
"httpx.ConnectError": gettext("HTTP connection error"),
|
||||
"httpx.RemoteProtocolError": gettext("HTTP protocol error"),
|
||||
"httpx.LocalProtocolError": gettext("HTTP protocol error"),
|
||||
"httpx.ProtocolError": gettext("HTTP protocol error"),
|
||||
"httpx.ReadError": gettext("network error"),
|
||||
"httpx.WriteError": gettext("network error"),
|
||||
"httpx.ProxyError": gettext("proxy error"),
|
||||
"searx.exceptions.SearxEngineXPathException": gettext("parsing error"),
|
||||
"KeyError": gettext("parsing error"),
|
||||
"json.decoder.JSONDecodeError": gettext("parsing error"),
|
||||
"lxml.etree.ParserError": gettext("parsing error"),
|
||||
None: gettext("unexpected crash"),
|
||||
}
|
||||
|
||||
_flask_babel_get_translations = flask_babel.get_translations
|
||||
|
||||
|
||||
|
@ -463,7 +486,7 @@ def _get_ordered_categories():
|
|||
|
||||
@app.before_request
|
||||
def pre_request():
|
||||
request.start_time = time()
|
||||
request.start_time = default_timer()
|
||||
request.timings = []
|
||||
request.errors = []
|
||||
|
||||
|
@ -521,7 +544,7 @@ def add_default_headers(response):
|
|||
|
||||
@app.after_request
|
||||
def post_request(response):
|
||||
total_time = time() - request.start_time
|
||||
total_time = default_timer() - request.start_time
|
||||
timings_all = ['total;dur=' + str(round(total_time * 1000, 3))]
|
||||
if len(request.timings) > 0:
|
||||
timings = sorted(request.timings, key=lambda v: v['total'])
|
||||
|
@ -764,6 +787,8 @@ def __get_translated_errors(unresponsive_engines):
|
|||
error_msg = gettext(unresponsive_engine[1])
|
||||
if unresponsive_engine[2]:
|
||||
error_msg = "{} {}".format(error_msg, unresponsive_engine[2])
|
||||
if unresponsive_engine[3]:
|
||||
error_msg = gettext('Suspended') + ': ' + error_msg
|
||||
translated_errors.add((unresponsive_engine[0], error_msg))
|
||||
return translated_errors
|
||||
|
||||
|
@ -850,35 +875,106 @@ def preferences():
|
|||
allowed_plugins = request.preferences.plugins.get_enabled()
|
||||
|
||||
# stats for preferences page
|
||||
stats = {}
|
||||
filtered_engines = dict(filter(lambda kv: (kv[0], request.preferences.validate_token(kv[1])), engines.items()))
|
||||
|
||||
engines_by_category = {}
|
||||
for c in categories:
|
||||
engines_by_category[c] = []
|
||||
for e in categories[c]:
|
||||
if not request.preferences.validate_token(e):
|
||||
continue
|
||||
|
||||
stats[e.name] = {'time': None,
|
||||
'warn_timeout': False,
|
||||
'warn_time': False}
|
||||
if e.timeout > settings['outgoing']['request_timeout']:
|
||||
stats[e.name]['warn_timeout'] = True
|
||||
stats[e.name]['supports_selected_language'] = _is_selected_language_supported(e, request.preferences)
|
||||
engines_by_category[c].append(e)
|
||||
engines_by_category[c] = [e for e in categories[c] if e.name in filtered_engines]
|
||||
# sort the engines alphabetically since the order in settings.yml is meaningless.
|
||||
list.sort(engines_by_category[c], key=lambda e: e.name)
|
||||
|
||||
# get first element [0], the engine time,
|
||||
# and then the second element [1] : the time (the first one is the label)
|
||||
for engine_stat in get_engines_stats(request.preferences)[0][1]:
|
||||
stats[engine_stat.get('name')]['time'] = round(engine_stat.get('avg'), 3)
|
||||
if engine_stat.get('avg') > settings['outgoing']['request_timeout']:
|
||||
stats[engine_stat.get('name')]['warn_time'] = True
|
||||
stats = {}
|
||||
max_rate95 = 0
|
||||
for _, e in filtered_engines.items():
|
||||
h = histogram('engine', e.name, 'time', 'total')
|
||||
median = round(h.percentage(50), 1) if h.count > 0 else None
|
||||
rate80 = round(h.percentage(80), 1) if h.count > 0 else None
|
||||
rate95 = round(h.percentage(95), 1) if h.count > 0 else None
|
||||
|
||||
max_rate95 = max(max_rate95, rate95 or 0)
|
||||
|
||||
result_count_sum = histogram('engine', e.name, 'result', 'count').sum
|
||||
successful_count = counter('engine', e.name, 'search', 'count', 'successful')
|
||||
result_count = int(result_count_sum / float(successful_count)) if successful_count else 0
|
||||
|
||||
stats[e.name] = {
|
||||
'time': median if median else None,
|
||||
'rate80': rate80 if rate80 else None,
|
||||
'rate95': rate95 if rate95 else None,
|
||||
'warn_timeout': e.timeout > settings['outgoing']['request_timeout'],
|
||||
'supports_selected_language': _is_selected_language_supported(e, request.preferences),
|
||||
'result_count': result_count,
|
||||
}
|
||||
# end of stats
|
||||
|
||||
# reliabilities
|
||||
reliabilities = {}
|
||||
engine_errors = get_engine_errors(filtered_engines)
|
||||
checker_results = checker_get_result()
|
||||
checker_results = checker_results['engines'] \
|
||||
if checker_results['status'] == 'ok' and 'engines' in checker_results else {}
|
||||
for _, e in filtered_engines.items():
|
||||
checker_result = checker_results.get(e.name, {})
|
||||
checker_success = checker_result.get('success', True)
|
||||
errors = engine_errors.get(e.name) or []
|
||||
if counter('engine', e.name, 'search', 'count', 'sent') == 0:
|
||||
# no request
|
||||
reliablity = None
|
||||
elif checker_success and not errors:
|
||||
reliablity = 100
|
||||
elif 'simple' in checker_result.get('errors', {}):
|
||||
# the basic (simple) test doesn't work: the engine is broken accoding to the checker
|
||||
# even if there is no exception
|
||||
reliablity = 0
|
||||
else:
|
||||
reliablity = 100 - sum([error['percentage'] for error in errors if not error.get('secondary')])
|
||||
|
||||
reliabilities[e.name] = {
|
||||
'reliablity': reliablity,
|
||||
'errors': [],
|
||||
'checker': checker_results.get(e.name, {}).get('errors', {}).keys(),
|
||||
}
|
||||
# keep the order of the list checker_results[e.name]['errors'] and deduplicate.
|
||||
# the first element has the highest percentage rate.
|
||||
reliabilities_errors = []
|
||||
for error in errors:
|
||||
error_user_message = None
|
||||
if error.get('secondary') or 'exception_classname' not in error:
|
||||
continue
|
||||
error_user_message = exception_classname_to_label.get(error.get('exception_classname'))
|
||||
if not error:
|
||||
error_user_message = exception_classname_to_label[None]
|
||||
if error_user_message not in reliabilities_errors:
|
||||
reliabilities_errors.append(error_user_message)
|
||||
reliabilities[e.name]['errors'] = reliabilities_errors
|
||||
|
||||
# supports
|
||||
supports = {}
|
||||
for _, e in filtered_engines.items():
|
||||
supports_selected_language = _is_selected_language_supported(e, request.preferences)
|
||||
safesearch = e.safesearch
|
||||
time_range_support = e.time_range_support
|
||||
for checker_test_name in checker_results.get(e.name, {}).get('errors', {}):
|
||||
if supports_selected_language and checker_test_name.startswith('lang_'):
|
||||
supports_selected_language = '?'
|
||||
elif safesearch and checker_test_name == 'safesearch':
|
||||
safesearch = '?'
|
||||
elif time_range_support and checker_test_name == 'time_range':
|
||||
time_range_support = '?'
|
||||
supports[e.name] = {
|
||||
'supports_selected_language': supports_selected_language,
|
||||
'safesearch': safesearch,
|
||||
'time_range_support': time_range_support,
|
||||
}
|
||||
|
||||
#
|
||||
locked_preferences = list()
|
||||
if 'preferences' in settings and 'lock' in settings['preferences']:
|
||||
locked_preferences = settings['preferences']['lock']
|
||||
|
||||
#
|
||||
return render('preferences.html',
|
||||
selected_categories=get_selected_categories(request.preferences, request.form),
|
||||
all_categories=_get_ordered_categories(),
|
||||
|
@ -887,6 +983,9 @@ def preferences():
|
|||
image_proxy=image_proxy,
|
||||
engines_by_category=engines_by_category,
|
||||
stats=stats,
|
||||
max_rate95=max_rate95,
|
||||
reliabilities=reliabilities,
|
||||
supports=supports,
|
||||
answerers=[{'info': a.self_info(), 'keywords': a.keywords} for a in answerers],
|
||||
disabled_engines=disabled_engines,
|
||||
autocomplete_backends=autocomplete_backends,
|
||||
|
@ -974,38 +1073,23 @@ def image_proxy():
|
|||
@app.route('/stats', methods=['GET'])
|
||||
def stats():
|
||||
"""Render engine statistics page."""
|
||||
stats = get_engines_stats(request.preferences)
|
||||
filtered_engines = dict(filter(lambda kv: (kv[0], request.preferences.validate_token(kv[1])), engines.items()))
|
||||
engine_stats = get_engines_stats(filtered_engines)
|
||||
return render(
|
||||
'stats.html',
|
||||
stats=stats,
|
||||
stats=[(gettext('Engine time (sec)'), engine_stats['time_total']),
|
||||
(gettext('Page loads (sec)'), engine_stats['time_http']),
|
||||
(gettext('Number of results'), engine_stats['result_count']),
|
||||
(gettext('Scores'), engine_stats['scores']),
|
||||
(gettext('Scores per result'), engine_stats['scores_per_result']),
|
||||
(gettext('Errors'), engine_stats['error_count'])]
|
||||
)
|
||||
|
||||
|
||||
@app.route('/stats/errors', methods=['GET'])
|
||||
def stats_errors():
|
||||
result = {}
|
||||
engine_names = list(errors_per_engines.keys())
|
||||
engine_names.sort()
|
||||
for engine_name in engine_names:
|
||||
error_stats = errors_per_engines[engine_name]
|
||||
sent_search_count = max(engines[engine_name].stats['sent_search_count'], 1)
|
||||
sorted_context_count_list = sorted(error_stats.items(), key=lambda context_count: context_count[1])
|
||||
r = []
|
||||
percentage_sum = 0
|
||||
for context, count in sorted_context_count_list:
|
||||
percentage = round(20 * count / sent_search_count) * 5
|
||||
percentage_sum += percentage
|
||||
r.append({
|
||||
'filename': context.filename,
|
||||
'function': context.function,
|
||||
'line_no': context.line_no,
|
||||
'code': context.code,
|
||||
'exception_classname': context.exception_classname,
|
||||
'log_message': context.log_message,
|
||||
'log_parameters': context.log_parameters,
|
||||
'percentage': percentage,
|
||||
})
|
||||
result[engine_name] = sorted(r, reverse=True, key=lambda d: d['percentage'])
|
||||
filtered_engines = dict(filter(lambda kv: (kv[0], request.preferences.validate_token(kv[1])), engines.items()))
|
||||
result = get_engine_errors(filtered_engines)
|
||||
return jsonify(result)
|
||||
|
||||
|
||||
|
|
Loading…
Reference in a new issue