forked from Ponysearch/Ponysearch
[mod] pylint all engines without PYLINT_SEARXNG_DISABLE_OPTION
Signed-off-by: Markus Heiser <markus.heiser@darmarit.de>
This commit is contained in:
parent
707d6270c8
commit
8205f170ff
155 changed files with 166 additions and 258 deletions
14
manage
14
manage
|
@ -64,6 +64,11 @@ pylint.FILES() {
|
||||||
find . -name searxng.msg
|
find . -name searxng.msg
|
||||||
}
|
}
|
||||||
|
|
||||||
|
PYLINT_FILES=()
|
||||||
|
while IFS= read -r line; do
|
||||||
|
PYLINT_FILES+=("$line")
|
||||||
|
done <<< "$(pylint.FILES)"
|
||||||
|
|
||||||
YAMLLINT_FILES=()
|
YAMLLINT_FILES=()
|
||||||
while IFS= read -r line; do
|
while IFS= read -r line; do
|
||||||
YAMLLINT_FILES+=("$line")
|
YAMLLINT_FILES+=("$line")
|
||||||
|
@ -77,9 +82,6 @@ PYLINT_SEARXNG_DISABLE_OPTION="\
|
||||||
I,C,R,\
|
I,C,R,\
|
||||||
W0105,W0212,W0511,W0603,W0613,W0621,W0702,W0703,W1401,\
|
W0105,W0212,W0511,W0603,W0613,W0621,W0702,W0703,W1401,\
|
||||||
E1136"
|
E1136"
|
||||||
PYLINT_ADDITIONAL_BUILTINS_FOR_ENGINES="traits,supported_languages,language_aliases,logger,categories"
|
|
||||||
PYLINT_OPTIONS="-m pylint -j 0 --rcfile .pylintrc"
|
|
||||||
|
|
||||||
help() {
|
help() {
|
||||||
nvm.help
|
nvm.help
|
||||||
cat <<EOF
|
cat <<EOF
|
||||||
|
@ -338,12 +340,6 @@ format.python() {
|
||||||
dump_return $?
|
dump_return $?
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
PYLINT_FILES=()
|
|
||||||
while IFS= read -r line; do
|
|
||||||
PYLINT_FILES+=("$line")
|
|
||||||
done <<< "$(pylint.FILES)"
|
|
||||||
|
|
||||||
# shellcheck disable=SC2119
|
# shellcheck disable=SC2119
|
||||||
main() {
|
main() {
|
||||||
|
|
||||||
|
|
|
@ -1,6 +1,7 @@
|
||||||
# SPDX-License-Identifier: AGPL-3.0-or-later
|
# SPDX-License-Identifier: AGPL-3.0-or-later
|
||||||
"""
|
# pylint: disable=invalid-name
|
||||||
1337x
|
"""1337x
|
||||||
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
from urllib.parse import quote, urljoin
|
from urllib.parse import quote, urljoin
|
||||||
|
|
|
@ -1,5 +1,4 @@
|
||||||
# SPDX-License-Identifier: AGPL-3.0-or-later
|
# SPDX-License-Identifier: AGPL-3.0-or-later
|
||||||
# lint: pylint
|
|
||||||
# pylint: disable=invalid-name
|
# pylint: disable=invalid-name
|
||||||
"""9GAG (social media)"""
|
"""9GAG (social media)"""
|
||||||
|
|
||||||
|
|
|
@ -1,5 +1,4 @@
|
||||||
# SPDX-License-Identifier: AGPL-3.0-or-later
|
# SPDX-License-Identifier: AGPL-3.0-or-later
|
||||||
# lint: pylint
|
|
||||||
"""Load and initialize the ``engines``, see :py:func:`load_engines` and register
|
"""Load and initialize the ``engines``, see :py:func:`load_engines` and register
|
||||||
:py:obj:`engine_shortcuts`.
|
:py:obj:`engine_shortcuts`.
|
||||||
|
|
||||||
|
|
|
@ -74,7 +74,7 @@ def response(resp):
|
||||||
if number_of_results:
|
if number_of_results:
|
||||||
try:
|
try:
|
||||||
results.append({'number_of_results': int(extract_text(number_of_results))})
|
results.append({'number_of_results': int(extract_text(number_of_results))})
|
||||||
except:
|
except: # pylint: disable=bare-except
|
||||||
pass
|
pass
|
||||||
|
|
||||||
return results
|
return results
|
||||||
|
|
|
@ -1,5 +1,4 @@
|
||||||
# SPDX-License-Identifier: AGPL-3.0-or-later
|
# SPDX-License-Identifier: AGPL-3.0-or-later
|
||||||
# lint: pylint
|
|
||||||
"""`Anna's Archive`_ is a free non-profit online shadow library metasearch
|
"""`Anna's Archive`_ is a free non-profit online shadow library metasearch
|
||||||
engine providing access to a variety of book resources (also via IPFS), created
|
engine providing access to a variety of book resources (also via IPFS), created
|
||||||
by a team of anonymous archivists (AnnaArchivist_).
|
by a team of anonymous archivists (AnnaArchivist_).
|
||||||
|
|
|
@ -1,5 +1,4 @@
|
||||||
# SPDX-License-Identifier: AGPL-3.0-or-later
|
# SPDX-License-Identifier: AGPL-3.0-or-later
|
||||||
# lint: pylint
|
|
||||||
"""APKMirror
|
"""APKMirror
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
|
|
@ -1,7 +1,6 @@
|
||||||
# SPDX-License-Identifier: AGPL-3.0-or-later
|
# SPDX-License-Identifier: AGPL-3.0-or-later
|
||||||
# lint: pylint
|
"""Apple App Store
|
||||||
"""
|
|
||||||
Apple App Store
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
from json import loads
|
from json import loads
|
||||||
|
|
|
@ -1,5 +1,4 @@
|
||||||
# SPDX-License-Identifier: AGPL-3.0-or-later
|
# SPDX-License-Identifier: AGPL-3.0-or-later
|
||||||
# lint: pylint
|
|
||||||
"""Apple Maps"""
|
"""Apple Maps"""
|
||||||
|
|
||||||
from json import loads
|
from json import loads
|
||||||
|
|
|
@ -1,5 +1,4 @@
|
||||||
# SPDX-License-Identifier: AGPL-3.0-or-later
|
# SPDX-License-Identifier: AGPL-3.0-or-later
|
||||||
# lint: pylint
|
|
||||||
"""
|
"""
|
||||||
Arch Linux Wiki
|
Arch Linux Wiki
|
||||||
~~~~~~~~~~~~~~~
|
~~~~~~~~~~~~~~~
|
||||||
|
|
|
@ -1,5 +1,4 @@
|
||||||
# SPDX-License-Identifier: AGPL-3.0-or-later
|
# SPDX-License-Identifier: AGPL-3.0-or-later
|
||||||
# lint: pylint
|
|
||||||
"""The Art Institute of Chicago
|
"""The Art Institute of Chicago
|
||||||
|
|
||||||
Explore thousands of artworks from The Art Institute of Chicago.
|
Explore thousands of artworks from The Art Institute of Chicago.
|
||||||
|
|
|
@ -1,11 +1,12 @@
|
||||||
# SPDX-License-Identifier: AGPL-3.0-or-later
|
# SPDX-License-Identifier: AGPL-3.0-or-later
|
||||||
|
"""ArXiV (Scientific preprints)
|
||||||
|
|
||||||
"""
|
"""
|
||||||
ArXiV (Scientific preprints)
|
|
||||||
"""
|
from datetime import datetime
|
||||||
|
|
||||||
from lxml import etree
|
from lxml import etree
|
||||||
from lxml.etree import XPath
|
from lxml.etree import XPath
|
||||||
from datetime import datetime
|
|
||||||
from searx.utils import eval_xpath, eval_xpath_list, eval_xpath_getindex
|
from searx.utils import eval_xpath, eval_xpath_list, eval_xpath_getindex
|
||||||
|
|
||||||
# about
|
# about
|
||||||
|
@ -50,7 +51,7 @@ def request(query, params):
|
||||||
# basic search
|
# basic search
|
||||||
offset = (params['pageno'] - 1) * number_of_results
|
offset = (params['pageno'] - 1) * number_of_results
|
||||||
|
|
||||||
string_args = dict(query=query, offset=offset, number_of_results=number_of_results)
|
string_args = {'query': query, 'offset': offset, 'number_of_results': number_of_results}
|
||||||
|
|
||||||
params['url'] = base_url.format(**string_args)
|
params['url'] = base_url.format(**string_args)
|
||||||
|
|
||||||
|
|
|
@ -1,5 +1,4 @@
|
||||||
# SPDX-License-Identifier: AGPL-3.0-or-later
|
# SPDX-License-Identifier: AGPL-3.0-or-later
|
||||||
# lint: pylint
|
|
||||||
"""Ask.com"""
|
"""Ask.com"""
|
||||||
|
|
||||||
from urllib.parse import urlencode
|
from urllib.parse import urlencode
|
||||||
|
|
|
@ -1,5 +1,4 @@
|
||||||
# SPDX-License-Identifier: AGPL-3.0-or-later
|
# SPDX-License-Identifier: AGPL-3.0-or-later
|
||||||
# lint: pylint
|
|
||||||
"""Bandcamp (Music)
|
"""Bandcamp (Music)
|
||||||
|
|
||||||
@website https://bandcamp.com/
|
@website https://bandcamp.com/
|
||||||
|
|
|
@ -1,12 +1,12 @@
|
||||||
# SPDX-License-Identifier: AGPL-3.0-or-later
|
# SPDX-License-Identifier: AGPL-3.0-or-later
|
||||||
|
"""BASE (Scholar publications)
|
||||||
|
|
||||||
"""
|
"""
|
||||||
BASE (Scholar publications)
|
from datetime import datetime
|
||||||
"""
|
import re
|
||||||
|
|
||||||
from urllib.parse import urlencode
|
from urllib.parse import urlencode
|
||||||
from lxml import etree
|
from lxml import etree
|
||||||
from datetime import datetime
|
|
||||||
import re
|
|
||||||
from searx.utils import searx_useragent
|
from searx.utils import searx_useragent
|
||||||
|
|
||||||
# about
|
# about
|
||||||
|
@ -55,13 +55,17 @@ shorcut_dict = {
|
||||||
|
|
||||||
def request(query, params):
|
def request(query, params):
|
||||||
# replace shortcuts with API advanced search keywords
|
# replace shortcuts with API advanced search keywords
|
||||||
for key in shorcut_dict.keys():
|
for key, val in shorcut_dict.items():
|
||||||
query = re.sub(key, shorcut_dict[key], query)
|
query = re.sub(key, val, query)
|
||||||
|
|
||||||
# basic search
|
# basic search
|
||||||
offset = (params['pageno'] - 1) * number_of_results
|
offset = (params['pageno'] - 1) * number_of_results
|
||||||
|
|
||||||
string_args = dict(query=urlencode({'query': query}), offset=offset, hits=number_of_results)
|
string_args = {
|
||||||
|
'query': urlencode({'query': query}),
|
||||||
|
'offset': offset,
|
||||||
|
'hits': number_of_results,
|
||||||
|
}
|
||||||
|
|
||||||
params['url'] = base_url.format(**string_args)
|
params['url'] = base_url.format(**string_args)
|
||||||
|
|
||||||
|
@ -99,7 +103,7 @@ def response(resp):
|
||||||
try:
|
try:
|
||||||
publishedDate = datetime.strptime(date, date_format)
|
publishedDate = datetime.strptime(date, date_format)
|
||||||
break
|
break
|
||||||
except:
|
except: # pylint: disable=bare-except
|
||||||
pass
|
pass
|
||||||
|
|
||||||
if publishedDate is not None:
|
if publishedDate is not None:
|
||||||
|
|
|
@ -1,5 +1,4 @@
|
||||||
# SPDX-License-Identifier: AGPL-3.0-or-later
|
# SPDX-License-Identifier: AGPL-3.0-or-later
|
||||||
# lint: pylint
|
|
||||||
"""Bilibili is a Chinese video sharing website.
|
"""Bilibili is a Chinese video sharing website.
|
||||||
|
|
||||||
.. _Bilibili: https://www.bilibili.com
|
.. _Bilibili: https://www.bilibili.com
|
||||||
|
|
|
@ -1,5 +1,4 @@
|
||||||
# SPDX-License-Identifier: AGPL-3.0-or-later
|
# SPDX-License-Identifier: AGPL-3.0-or-later
|
||||||
# lint: pylint
|
|
||||||
"""This is the implementation of the Bing-WEB engine. Some of this
|
"""This is the implementation of the Bing-WEB engine. Some of this
|
||||||
implementations are shared by other engines:
|
implementations are shared by other engines:
|
||||||
|
|
||||||
|
|
|
@ -1,5 +1,4 @@
|
||||||
# SPDX-License-Identifier: AGPL-3.0-or-later
|
# SPDX-License-Identifier: AGPL-3.0-or-later
|
||||||
# lint: pylint
|
|
||||||
"""Bing-Images: description see :py:obj:`searx.engines.bing`.
|
"""Bing-Images: description see :py:obj:`searx.engines.bing`.
|
||||||
"""
|
"""
|
||||||
# pylint: disable=invalid-name
|
# pylint: disable=invalid-name
|
||||||
|
|
|
@ -1,5 +1,4 @@
|
||||||
# SPDX-License-Identifier: AGPL-3.0-or-later
|
# SPDX-License-Identifier: AGPL-3.0-or-later
|
||||||
# lint: pylint
|
|
||||||
"""Bing-News: description see :py:obj:`searx.engines.bing`.
|
"""Bing-News: description see :py:obj:`searx.engines.bing`.
|
||||||
|
|
||||||
.. hint::
|
.. hint::
|
||||||
|
|
|
@ -1,8 +1,7 @@
|
||||||
# SPDX-License-Identifier: AGPL-3.0-or-later
|
# SPDX-License-Identifier: AGPL-3.0-or-later
|
||||||
# lint: pylint
|
# pylint: disable=invalid-name
|
||||||
"""Bing-Videos: description see :py:obj:`searx.engines.bing`.
|
"""Bing-Videos: description see :py:obj:`searx.engines.bing`.
|
||||||
"""
|
"""
|
||||||
# pylint: disable=invalid-name
|
|
||||||
|
|
||||||
from typing import TYPE_CHECKING
|
from typing import TYPE_CHECKING
|
||||||
import json
|
import json
|
||||||
|
|
|
@ -1,5 +1,4 @@
|
||||||
# SPDX-License-Identifier: AGPL-3.0-or-later
|
# SPDX-License-Identifier: AGPL-3.0-or-later
|
||||||
# lint: pylint
|
|
||||||
"""BPB refers to ``Bundeszentrale für poltische Bildung``, which is a German
|
"""BPB refers to ``Bundeszentrale für poltische Bildung``, which is a German
|
||||||
governmental institution aiming to reduce misinformation by providing resources
|
governmental institution aiming to reduce misinformation by providing resources
|
||||||
about politics and history.
|
about politics and history.
|
||||||
|
|
|
@ -1,5 +1,4 @@
|
||||||
# SPDX-License-Identifier: AGPL-3.0-or-later
|
# SPDX-License-Identifier: AGPL-3.0-or-later
|
||||||
# lint: pylint
|
|
||||||
"""Brave supports the categories listed in :py:obj:`brave_category` (General,
|
"""Brave supports the categories listed in :py:obj:`brave_category` (General,
|
||||||
news, videos, images). The support of :py:obj:`paging` and :py:obj:`time range
|
news, videos, images). The support of :py:obj:`paging` and :py:obj:`time range
|
||||||
<time_range_support>` is limited (see remarks).
|
<time_range_support>` is limited (see remarks).
|
||||||
|
|
|
@ -1,5 +1,4 @@
|
||||||
# SPDX-License-Identifier: AGPL-3.0-or-later
|
# SPDX-License-Identifier: AGPL-3.0-or-later
|
||||||
# lint: pylint
|
|
||||||
"""BT4G_ (bt4g.com) is not a tracker and doesn't store any content and only
|
"""BT4G_ (bt4g.com) is not a tracker and doesn't store any content and only
|
||||||
collects torrent metadata (such as file names and file sizes) and a magnet link
|
collects torrent metadata (such as file names and file sizes) and a magnet link
|
||||||
(torrent identifier).
|
(torrent identifier).
|
||||||
|
|
|
@ -3,8 +3,9 @@
|
||||||
BTDigg (Videos, Music, Files)
|
BTDigg (Videos, Music, Files)
|
||||||
"""
|
"""
|
||||||
|
|
||||||
from lxml import html
|
|
||||||
from urllib.parse import quote, urljoin
|
from urllib.parse import quote, urljoin
|
||||||
|
|
||||||
|
from lxml import html
|
||||||
from searx.utils import extract_text, get_torrent_size
|
from searx.utils import extract_text, get_torrent_size
|
||||||
|
|
||||||
# about
|
# about
|
||||||
|
@ -67,7 +68,7 @@ def response(resp):
|
||||||
# convert files to int if possible
|
# convert files to int if possible
|
||||||
try:
|
try:
|
||||||
files = int(files)
|
files = int(files)
|
||||||
except:
|
except: # pylint: disable=bare-except
|
||||||
files = None
|
files = None
|
||||||
|
|
||||||
magnetlink = result.xpath('.//div[@class="torrent_magnet"]//a')[0].attrib['href']
|
magnetlink = result.xpath('.//div[@class="torrent_magnet"]//a')[0].attrib['href']
|
||||||
|
|
|
@ -1,5 +1,4 @@
|
||||||
# SPDX-License-Identifier: AGPL-3.0-or-later
|
# SPDX-License-Identifier: AGPL-3.0-or-later
|
||||||
# lint: pylint
|
|
||||||
"""Chefkoch is a German database of recipes.
|
"""Chefkoch is a German database of recipes.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
|
|
@ -106,7 +106,7 @@ def init(engine_settings):
|
||||||
if 'command' not in engine_settings:
|
if 'command' not in engine_settings:
|
||||||
raise ValueError('engine command : missing configuration key: command')
|
raise ValueError('engine command : missing configuration key: command')
|
||||||
|
|
||||||
global command, working_dir, delimiter, parse_regex, environment_variables
|
global command, working_dir, delimiter, parse_regex, environment_variables # pylint: disable=global-statement
|
||||||
|
|
||||||
command = engine_settings['command']
|
command = engine_settings['command']
|
||||||
|
|
||||||
|
@ -172,7 +172,7 @@ def _get_results_from_process(results, cmd, pageno):
|
||||||
_command_logger.debug('skipped result:', raw_result)
|
_command_logger.debug('skipped result:', raw_result)
|
||||||
continue
|
continue
|
||||||
|
|
||||||
if start <= count and count <= end:
|
if start <= count and count <= end: # pylint: disable=chained-comparison
|
||||||
result['template'] = result_template
|
result['template'] = result_template
|
||||||
results.append(result)
|
results.append(result)
|
||||||
|
|
||||||
|
@ -185,6 +185,7 @@ def _get_results_from_process(results, cmd, pageno):
|
||||||
return_code = process.wait(timeout=timeout)
|
return_code = process.wait(timeout=timeout)
|
||||||
if return_code != 0:
|
if return_code != 0:
|
||||||
raise RuntimeError('non-zero return code when running command', cmd, return_code)
|
raise RuntimeError('non-zero return code when running command', cmd, return_code)
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
def __get_results_limits(pageno):
|
def __get_results_limits(pageno):
|
||||||
|
@ -230,7 +231,7 @@ def __parse_single_result(raw_result):
|
||||||
elements = raw_result.split(delimiter['chars'], maxsplit=len(delimiter['keys']) - 1)
|
elements = raw_result.split(delimiter['chars'], maxsplit=len(delimiter['keys']) - 1)
|
||||||
if len(elements) != len(delimiter['keys']):
|
if len(elements) != len(delimiter['keys']):
|
||||||
return {}
|
return {}
|
||||||
for i in range(len(elements)):
|
for i in range(len(elements)): # pylint: disable=consider-using-enumerate
|
||||||
result[delimiter['keys'][i]] = elements[i]
|
result[delimiter['keys'][i]] = elements[i]
|
||||||
|
|
||||||
if parse_regex:
|
if parse_regex:
|
||||||
|
|
|
@ -1,5 +1,4 @@
|
||||||
# SPDX-License-Identifier: AGPL-3.0-or-later
|
# SPDX-License-Identifier: AGPL-3.0-or-later
|
||||||
# lint: pylint
|
|
||||||
"""CORE (science)
|
"""CORE (science)
|
||||||
|
|
||||||
"""
|
"""
|
||||||
|
|
|
@ -1,5 +1,4 @@
|
||||||
# SPDX-License-Identifier: AGPL-3.0-or-later
|
# SPDX-License-Identifier: AGPL-3.0-or-later
|
||||||
# lint: pylint
|
|
||||||
"""Cppreference
|
"""Cppreference
|
||||||
"""
|
"""
|
||||||
from lxml import html
|
from lxml import html
|
||||||
|
|
|
@ -1,5 +1,4 @@
|
||||||
# SPDX-License-Identifier: AGPL-3.0-or-later
|
# SPDX-License-Identifier: AGPL-3.0-or-later
|
||||||
# lint: pylint
|
|
||||||
"""CrossRef"""
|
"""CrossRef"""
|
||||||
|
|
||||||
from urllib.parse import urlencode
|
from urllib.parse import urlencode
|
||||||
|
|
|
@ -1,5 +1,4 @@
|
||||||
# SPDX-License-Identifier: AGPL-3.0-or-later
|
# SPDX-License-Identifier: AGPL-3.0-or-later
|
||||||
# lint: pylint
|
|
||||||
"""Currency convert (DuckDuckGo)
|
"""Currency convert (DuckDuckGo)
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
|
|
@ -1,5 +1,4 @@
|
||||||
# SPDX-License-Identifier: AGPL-3.0-or-later
|
# SPDX-License-Identifier: AGPL-3.0-or-later
|
||||||
# lint: pylint
|
|
||||||
"""
|
"""
|
||||||
Dailymotion (Videos)
|
Dailymotion (Videos)
|
||||||
~~~~~~~~~~~~~~~~~~~~
|
~~~~~~~~~~~~~~~~~~~~
|
||||||
|
|
|
@ -1,5 +1,4 @@
|
||||||
# SPDX-License-Identifier: AGPL-3.0-or-later
|
# SPDX-License-Identifier: AGPL-3.0-or-later
|
||||||
# lint: pylint
|
|
||||||
"""Deepl translation engine"""
|
"""Deepl translation engine"""
|
||||||
|
|
||||||
from json import loads
|
from json import loads
|
||||||
|
|
|
@ -45,7 +45,7 @@ def response(resp):
|
||||||
for result in search_res.get('data', []):
|
for result in search_res.get('data', []):
|
||||||
if result['type'] == 'track':
|
if result['type'] == 'track':
|
||||||
title = result['title']
|
title = result['title']
|
||||||
url = result['link']
|
url = result['link'] # pylint: disable=redefined-outer-name
|
||||||
|
|
||||||
if url.startswith('http://'):
|
if url.startswith('http://'):
|
||||||
url = 'https' + url[4:]
|
url = 'https' + url[4:]
|
||||||
|
|
|
@ -1,5 +1,4 @@
|
||||||
# SPDX-License-Identifier: AGPL-3.0-or-later
|
# SPDX-License-Identifier: AGPL-3.0-or-later
|
||||||
# lint: pylint
|
|
||||||
"""Within this module we implement a *demo offline engine*. Do not look to
|
"""Within this module we implement a *demo offline engine*. Do not look to
|
||||||
close to the implementation, its just a simple example. To get in use of this
|
close to the implementation, its just a simple example. To get in use of this
|
||||||
*demo* engine add the following entry to your engines list in ``settings.yml``:
|
*demo* engine add the following entry to your engines list in ``settings.yml``:
|
||||||
|
|
|
@ -1,5 +1,4 @@
|
||||||
# SPDX-License-Identifier: AGPL-3.0-or-later
|
# SPDX-License-Identifier: AGPL-3.0-or-later
|
||||||
# lint: pylint
|
|
||||||
"""Within this module we implement a *demo online engine*. Do not look to
|
"""Within this module we implement a *demo online engine*. Do not look to
|
||||||
close to the implementation, its just a simple example which queries `The Art
|
close to the implementation, its just a simple example which queries `The Art
|
||||||
Institute of Chicago <https://www.artic.edu>`_
|
Institute of Chicago <https://www.artic.edu>`_
|
||||||
|
|
|
@ -1,5 +1,4 @@
|
||||||
# SPDX-License-Identifier: AGPL-3.0-or-later
|
# SPDX-License-Identifier: AGPL-3.0-or-later
|
||||||
# lint: pylint
|
|
||||||
"""DeStatis
|
"""DeStatis
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
|
|
@ -1,5 +1,4 @@
|
||||||
# SPDX-License-Identifier: AGPL-3.0-or-later
|
# SPDX-License-Identifier: AGPL-3.0-or-later
|
||||||
# lint: pylint
|
|
||||||
"""Deviantart (Images)
|
"""Deviantart (Images)
|
||||||
|
|
||||||
"""
|
"""
|
||||||
|
|
|
@ -26,7 +26,7 @@ results_xpath = './/table[@id="r"]/tr'
|
||||||
https_support = True
|
https_support = True
|
||||||
|
|
||||||
|
|
||||||
def request(query, params):
|
def request(query, params): # pylint: disable=unused-argument
|
||||||
params['url'] = url.format(from_lang=params['from_lang'][2], to_lang=params['to_lang'][2], query=params['query'])
|
params['url'] = url.format(from_lang=params['from_lang'][2], to_lang=params['to_lang'][2], query=params['query'])
|
||||||
|
|
||||||
return params
|
return params
|
||||||
|
@ -40,7 +40,7 @@ def response(resp):
|
||||||
for k, result in enumerate(eval_xpath(dom, results_xpath)[1:]):
|
for k, result in enumerate(eval_xpath(dom, results_xpath)[1:]):
|
||||||
try:
|
try:
|
||||||
from_result, to_results_raw = eval_xpath(result, './td')
|
from_result, to_results_raw = eval_xpath(result, './td')
|
||||||
except:
|
except: # pylint: disable=bare-except
|
||||||
continue
|
continue
|
||||||
|
|
||||||
to_results = []
|
to_results = []
|
||||||
|
|
|
@ -37,9 +37,9 @@ def response(resp):
|
||||||
search_res = dom.xpath('.//td[@class="x-item"]')
|
search_res = dom.xpath('.//td[@class="x-item"]')
|
||||||
|
|
||||||
if not search_res:
|
if not search_res:
|
||||||
return list()
|
return []
|
||||||
|
|
||||||
results = list()
|
results = []
|
||||||
for result in search_res:
|
for result in search_res:
|
||||||
url = urljoin(URL, result.xpath('.//a[@title]/@href')[0])
|
url = urljoin(URL, result.xpath('.//a[@title]/@href')[0])
|
||||||
title = extract_text(result.xpath('.//a[@title]'))
|
title = extract_text(result.xpath('.//a[@title]'))
|
||||||
|
|
|
@ -1,5 +1,4 @@
|
||||||
# SPDX-License-Identifier: AGPL-3.0-or-later
|
# SPDX-License-Identifier: AGPL-3.0-or-later
|
||||||
# lint: pylint
|
|
||||||
"""Docker Hub (IT)
|
"""Docker Hub (IT)
|
||||||
|
|
||||||
"""
|
"""
|
||||||
|
|
|
@ -18,7 +18,7 @@ about = {
|
||||||
}
|
}
|
||||||
|
|
||||||
# engine dependent config
|
# engine dependent config
|
||||||
categories = ['general'] # TODO , 'images', 'music', 'videos', 'files'
|
categories = ['general'] # 'images', 'music', 'videos', 'files'
|
||||||
paging = False
|
paging = False
|
||||||
number_of_results = 5
|
number_of_results = 5
|
||||||
|
|
||||||
|
@ -31,8 +31,8 @@ search_url = (
|
||||||
'&{query}'
|
'&{query}'
|
||||||
# fmt: on
|
# fmt: on
|
||||||
)
|
)
|
||||||
# TODO '&startRecord={offset}'
|
# '&startRecord={offset}'
|
||||||
# TODO '&maximumRecords={limit}'
|
# '&maximumRecords={limit}'
|
||||||
|
|
||||||
|
|
||||||
# do search-request
|
# do search-request
|
||||||
|
@ -54,7 +54,7 @@ def response(resp):
|
||||||
for r in eval_xpath(doc, '//div[@class="search_quickresult"]/ul/li'):
|
for r in eval_xpath(doc, '//div[@class="search_quickresult"]/ul/li'):
|
||||||
try:
|
try:
|
||||||
res_url = eval_xpath(r, './/a[@class="wikilink1"]/@href')[-1]
|
res_url = eval_xpath(r, './/a[@class="wikilink1"]/@href')[-1]
|
||||||
except:
|
except: # pylint: disable=bare-except
|
||||||
continue
|
continue
|
||||||
|
|
||||||
if not res_url:
|
if not res_url:
|
||||||
|
@ -76,7 +76,7 @@ def response(resp):
|
||||||
|
|
||||||
# append result
|
# append result
|
||||||
results.append({'title': title, 'content': content, 'url': base_url + res_url})
|
results.append({'title': title, 'content': content, 'url': base_url + res_url})
|
||||||
except:
|
except: # pylint: disable=bare-except
|
||||||
continue
|
continue
|
||||||
|
|
||||||
if not res_url:
|
if not res_url:
|
||||||
|
|
|
@ -1,5 +1,4 @@
|
||||||
# SPDX-License-Identifier: AGPL-3.0-or-later
|
# SPDX-License-Identifier: AGPL-3.0-or-later
|
||||||
# lint: pylint
|
|
||||||
"""
|
"""
|
||||||
DuckDuckGo Lite
|
DuckDuckGo Lite
|
||||||
~~~~~~~~~~~~~~~
|
~~~~~~~~~~~~~~~
|
||||||
|
|
|
@ -1,5 +1,4 @@
|
||||||
# SPDX-License-Identifier: AGPL-3.0-or-later
|
# SPDX-License-Identifier: AGPL-3.0-or-later
|
||||||
# lint: pylint
|
|
||||||
"""
|
"""
|
||||||
DuckDuckGo Instant Answer API
|
DuckDuckGo Instant Answer API
|
||||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||||
|
|
|
@ -1,5 +1,4 @@
|
||||||
# SPDX-License-Identifier: AGPL-3.0-or-later
|
# SPDX-License-Identifier: AGPL-3.0-or-later
|
||||||
# lint: pylint
|
|
||||||
"""
|
"""
|
||||||
DuckDuckGo Weather
|
DuckDuckGo Weather
|
||||||
~~~~~~~~~~~~~~~~~~
|
~~~~~~~~~~~~~~~~~~
|
||||||
|
|
|
@ -1,6 +1,7 @@
|
||||||
# SPDX-License-Identifier: AGPL-3.0-or-later
|
# SPDX-License-Identifier: AGPL-3.0-or-later
|
||||||
"""
|
# pylint: disable=invalid-name
|
||||||
Dummy Offline
|
"""Dummy Offline
|
||||||
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
@ -14,7 +15,7 @@ about = {
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
def search(query, request_params):
|
def search(query, request_params): # pylint: disable=unused-argument
|
||||||
return [
|
return [
|
||||||
{
|
{
|
||||||
'result': 'this is what you get',
|
'result': 'this is what you get',
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
# SPDX-License-Identifier: AGPL-3.0-or-later
|
# SPDX-License-Identifier: AGPL-3.0-or-later
|
||||||
"""
|
"""Dummy
|
||||||
Dummy
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
# about
|
# about
|
||||||
|
@ -15,10 +15,10 @@ about = {
|
||||||
|
|
||||||
|
|
||||||
# do search-request
|
# do search-request
|
||||||
def request(query, params):
|
def request(query, params): # pylint: disable=unused-argument
|
||||||
return params
|
return params
|
||||||
|
|
||||||
|
|
||||||
# get response from search-request
|
# get response from search-request
|
||||||
def response(resp):
|
def response(resp): # pylint: disable=unused-argument
|
||||||
return []
|
return []
|
||||||
|
|
|
@ -3,9 +3,10 @@
|
||||||
Ebay (Videos, Music, Files)
|
Ebay (Videos, Music, Files)
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
from urllib.parse import quote
|
||||||
|
|
||||||
from lxml import html
|
from lxml import html
|
||||||
from searx.engines.xpath import extract_text
|
from searx.engines.xpath import extract_text
|
||||||
from urllib.parse import quote
|
|
||||||
|
|
||||||
# about
|
# about
|
||||||
about = {
|
about = {
|
||||||
|
|
|
@ -1,5 +1,4 @@
|
||||||
# SPDX-License-Identifier: AGPL-3.0-or-later
|
# SPDX-License-Identifier: AGPL-3.0-or-later
|
||||||
# lint: pylint
|
|
||||||
"""Emojipedia
|
"""Emojipedia
|
||||||
|
|
||||||
Emojipedia is an emoji reference website which documents the meaning and
|
Emojipedia is an emoji reference website which documents the meaning and
|
||||||
|
|
|
@ -78,12 +78,10 @@ def response(resp):
|
||||||
else:
|
else:
|
||||||
thumbnail_src = img_src
|
thumbnail_src = img_src
|
||||||
|
|
||||||
url = build_flickr_url(photo['owner'], photo['id'])
|
|
||||||
|
|
||||||
# append result
|
# append result
|
||||||
results.append(
|
results.append(
|
||||||
{
|
{
|
||||||
'url': url,
|
'url': build_flickr_url(photo['owner'], photo['id']),
|
||||||
'title': photo['title'],
|
'title': photo['title'],
|
||||||
'img_src': img_src,
|
'img_src': img_src,
|
||||||
'thumbnail_src': thumbnail_src,
|
'thumbnail_src': thumbnail_src,
|
||||||
|
|
|
@ -1,5 +1,4 @@
|
||||||
# SPDX-License-Identifier: AGPL-3.0-or-later
|
# SPDX-License-Identifier: AGPL-3.0-or-later
|
||||||
# lint: pylint
|
|
||||||
"""Flickr (Images)
|
"""Flickr (Images)
|
||||||
|
|
||||||
"""
|
"""
|
||||||
|
|
|
@ -1,5 +1,4 @@
|
||||||
# SPDX-License-Identifier: AGPL-3.0-or-later
|
# SPDX-License-Identifier: AGPL-3.0-or-later
|
||||||
# lint: pylint
|
|
||||||
"""Fyyd (podcasts)
|
"""Fyyd (podcasts)
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
|
|
@ -1,5 +1,4 @@
|
||||||
# SPDX-License-Identifier: AGPL-3.0-or-later
|
# SPDX-License-Identifier: AGPL-3.0-or-later
|
||||||
# lint: pylint
|
|
||||||
# pylint: disable=invalid-name
|
# pylint: disable=invalid-name
|
||||||
"""Genius
|
"""Genius
|
||||||
|
|
||||||
|
|
|
@ -107,7 +107,7 @@ def request(query, params):
|
||||||
def response(resp):
|
def response(resp):
|
||||||
# get the base URL for the language in which request was made
|
# get the base URL for the language in which request was made
|
||||||
language = locale_to_lang_code(resp.search_params['language'])
|
language = locale_to_lang_code(resp.search_params['language'])
|
||||||
base_url = get_lang_urls(language)['base']
|
url = get_lang_urls(language)['base']
|
||||||
|
|
||||||
results = []
|
results = []
|
||||||
|
|
||||||
|
@ -116,7 +116,7 @@ def response(resp):
|
||||||
# parse results
|
# parse results
|
||||||
for result in dom.xpath(xpath_results):
|
for result in dom.xpath(xpath_results):
|
||||||
link = result.xpath(xpath_link)[0]
|
link = result.xpath(xpath_link)[0]
|
||||||
href = urljoin(base_url, link.attrib.get('href'))
|
href = urljoin(url, link.attrib.get('href'))
|
||||||
title = extract_text(link)
|
title = extract_text(link)
|
||||||
content = extract_text(result.xpath(xpath_content))
|
content = extract_text(result.xpath(xpath_content))
|
||||||
|
|
||||||
|
|
|
@ -1,7 +1,6 @@
|
||||||
# SPDX-License-Identifier: AGPL-3.0-or-later
|
# SPDX-License-Identifier: AGPL-3.0-or-later
|
||||||
# lint: pylint
|
|
||||||
|
|
||||||
"""Github (IT)
|
"""Github (IT)
|
||||||
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
from urllib.parse import urlencode
|
from urllib.parse import urlencode
|
||||||
|
|
|
@ -1,5 +1,4 @@
|
||||||
# SPDX-License-Identifier: AGPL-3.0-or-later
|
# SPDX-License-Identifier: AGPL-3.0-or-later
|
||||||
# lint: pylint
|
|
||||||
"""Goodreads (books)
|
"""Goodreads (books)
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
|
|
@ -1,5 +1,4 @@
|
||||||
# SPDX-License-Identifier: AGPL-3.0-or-later
|
# SPDX-License-Identifier: AGPL-3.0-or-later
|
||||||
# lint: pylint
|
|
||||||
"""This is the implementation of the Google WEB engine. Some of this
|
"""This is the implementation of the Google WEB engine. Some of this
|
||||||
implementations (manly the :py:obj:`get_google_info`) are shared by other
|
implementations (manly the :py:obj:`get_google_info`) are shared by other
|
||||||
engines:
|
engines:
|
||||||
|
|
|
@ -1,5 +1,4 @@
|
||||||
# SPDX-License-Identifier: AGPL-3.0-or-later
|
# SPDX-License-Identifier: AGPL-3.0-or-later
|
||||||
# lint: pylint
|
|
||||||
"""This is the implementation of the Google Images engine using the internal
|
"""This is the implementation of the Google Images engine using the internal
|
||||||
Google API used by the Google Go Android app.
|
Google API used by the Google Go Android app.
|
||||||
|
|
||||||
|
|
|
@ -1,5 +1,4 @@
|
||||||
# SPDX-License-Identifier: AGPL-3.0-or-later
|
# SPDX-License-Identifier: AGPL-3.0-or-later
|
||||||
# lint: pylint
|
|
||||||
"""This is the implementation of the Google News engine.
|
"""This is the implementation of the Google News engine.
|
||||||
|
|
||||||
Google News has a different region handling compared to Google WEB.
|
Google News has a different region handling compared to Google WEB.
|
||||||
|
|
|
@ -1,5 +1,4 @@
|
||||||
# SPDX-License-Identifier: AGPL-3.0-or-later
|
# SPDX-License-Identifier: AGPL-3.0-or-later
|
||||||
# lint: pylint
|
|
||||||
"""Google Play Apps & Google Play Movies
|
"""Google Play Apps & Google Play Movies
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
|
|
@ -1,5 +1,4 @@
|
||||||
# SPDX-License-Identifier: AGPL-3.0-or-later
|
# SPDX-License-Identifier: AGPL-3.0-or-later
|
||||||
# lint: pylint
|
|
||||||
"""This is the implementation of the Google Scholar engine.
|
"""This is the implementation of the Google Scholar engine.
|
||||||
|
|
||||||
Compared to other Google services the Scholar engine has a simple GET REST-API
|
Compared to other Google services the Scholar engine has a simple GET REST-API
|
||||||
|
|
|
@ -1,5 +1,4 @@
|
||||||
# SPDX-License-Identifier: AGPL-3.0-or-later
|
# SPDX-License-Identifier: AGPL-3.0-or-later
|
||||||
# lint: pylint
|
|
||||||
"""This is the implementation of the Google Videos engine.
|
"""This is the implementation of the Google Videos engine.
|
||||||
|
|
||||||
.. admonition:: Content-Security-Policy (CSP)
|
.. admonition:: Content-Security-Policy (CSP)
|
||||||
|
|
|
@ -1,5 +1,4 @@
|
||||||
# SPDX-License-Identifier: AGPL-3.0-or-later
|
# SPDX-License-Identifier: AGPL-3.0-or-later
|
||||||
# lint: pylint
|
|
||||||
"""Hackernews
|
"""Hackernews
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
|
|
@ -1,6 +1,4 @@
|
||||||
# SPDX-License-Identifier: AGPL-3.0-or-later
|
# SPDX-License-Identifier: AGPL-3.0-or-later
|
||||||
# lint: pylint
|
|
||||||
|
|
||||||
"""IMDB - Internet Movie Database
|
"""IMDB - Internet Movie Database
|
||||||
|
|
||||||
Retrieves results from a basic search. Advanced search options are not
|
Retrieves results from a basic search. Advanced search options are not
|
||||||
|
|
|
@ -1,5 +1,4 @@
|
||||||
# SPDX-License-Identifier: AGPL-3.0-or-later
|
# SPDX-License-Identifier: AGPL-3.0-or-later
|
||||||
# lint: pylint
|
|
||||||
"""Imgur (images)
|
"""Imgur (images)
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
|
|
@ -1,5 +1,4 @@
|
||||||
# SPDX-License-Identifier: AGPL-3.0-or-later
|
# SPDX-License-Identifier: AGPL-3.0-or-later
|
||||||
# lint: pylint
|
|
||||||
"""Internet Archive scholar(science)
|
"""Internet Archive scholar(science)
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
|
|
@ -1,5 +1,4 @@
|
||||||
# SPDX-License-Identifier: AGPL-3.0-or-later
|
# SPDX-License-Identifier: AGPL-3.0-or-later
|
||||||
# lint: pylint
|
|
||||||
"""Invidious (Videos)
|
"""Invidious (Videos)
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
|
|
@ -1,4 +1,15 @@
|
||||||
# SPDX-License-Identifier: AGPL-3.0-or-later
|
# SPDX-License-Identifier: AGPL-3.0-or-later
|
||||||
|
"""The JSON engine is a *generic* engine with which it is possible to configure
|
||||||
|
engines in the settings.
|
||||||
|
|
||||||
|
.. todo::
|
||||||
|
|
||||||
|
- The JSON engine needs documentation!!
|
||||||
|
|
||||||
|
- The parameters of the JSON engine should be adapted to those of the XPath
|
||||||
|
engine.
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
from collections.abc import Iterable
|
from collections.abc import Iterable
|
||||||
from json import loads
|
from json import loads
|
||||||
|
@ -32,32 +43,31 @@ first_page_num = 1
|
||||||
|
|
||||||
|
|
||||||
def iterate(iterable):
|
def iterate(iterable):
|
||||||
if type(iterable) == dict:
|
if isinstance(iterable, dict):
|
||||||
it = iterable.items()
|
items = iterable.items()
|
||||||
|
|
||||||
else:
|
else:
|
||||||
it = enumerate(iterable)
|
items = enumerate(iterable)
|
||||||
for index, value in it:
|
for index, value in items:
|
||||||
yield str(index), value
|
yield str(index), value
|
||||||
|
|
||||||
|
|
||||||
def is_iterable(obj):
|
def is_iterable(obj):
|
||||||
if type(obj) == str:
|
if isinstance(obj, str):
|
||||||
return False
|
return False
|
||||||
return isinstance(obj, Iterable)
|
return isinstance(obj, Iterable)
|
||||||
|
|
||||||
|
|
||||||
def parse(query):
|
def parse(query): # pylint: disable=redefined-outer-name
|
||||||
q = []
|
q = [] # pylint: disable=invalid-name
|
||||||
for part in query.split('/'):
|
for part in query.split('/'):
|
||||||
if part == '':
|
if part == '':
|
||||||
continue
|
continue
|
||||||
else:
|
|
||||||
q.append(part)
|
q.append(part)
|
||||||
return q
|
return q
|
||||||
|
|
||||||
|
|
||||||
def do_query(data, q):
|
def do_query(data, q): # pylint: disable=invalid-name
|
||||||
ret = []
|
ret = []
|
||||||
if not q:
|
if not q:
|
||||||
return ret
|
return ret
|
||||||
|
@ -87,10 +97,10 @@ def query(data, query_string):
|
||||||
return do_query(data, q)
|
return do_query(data, q)
|
||||||
|
|
||||||
|
|
||||||
def request(query, params):
|
def request(query, params): # pylint: disable=redefined-outer-name
|
||||||
query = urlencode({'q': query})[2:]
|
query = urlencode({'q': query})[2:]
|
||||||
|
|
||||||
fp = {'query': query}
|
fp = {'query': query} # pylint: disable=invalid-name
|
||||||
if paging and search_url.find('{pageno}') >= 0:
|
if paging and search_url.find('{pageno}') >= 0:
|
||||||
fp['pageno'] = (params['pageno'] - 1) * page_size + first_page_num
|
fp['pageno'] = (params['pageno'] - 1) * page_size + first_page_num
|
||||||
|
|
||||||
|
@ -115,18 +125,18 @@ def response(resp):
|
||||||
content_filter = html_to_text if content_html_to_text else identity
|
content_filter = html_to_text if content_html_to_text else identity
|
||||||
|
|
||||||
if results_query:
|
if results_query:
|
||||||
rs = query(json, results_query)
|
rs = query(json, results_query) # pylint: disable=invalid-name
|
||||||
if not len(rs):
|
if not rs:
|
||||||
return results
|
return results
|
||||||
for result in rs[0]:
|
for result in rs[0]:
|
||||||
try:
|
try:
|
||||||
url = query(result, url_query)[0]
|
url = query(result, url_query)[0]
|
||||||
title = query(result, title_query)[0]
|
title = query(result, title_query)[0]
|
||||||
except:
|
except: # pylint: disable=bare-except
|
||||||
continue
|
continue
|
||||||
try:
|
try:
|
||||||
content = query(result, content_query)[0]
|
content = query(result, content_query)[0]
|
||||||
except:
|
except: # pylint: disable=bare-except
|
||||||
content = ""
|
content = ""
|
||||||
results.append(
|
results.append(
|
||||||
{
|
{
|
||||||
|
|
|
@ -1,5 +1,4 @@
|
||||||
# SPDX-License-Identifier: AGPL-3.0-or-later
|
# SPDX-License-Identifier: AGPL-3.0-or-later
|
||||||
# lint: pylint
|
|
||||||
"""Kickass Torrent (Videos, Music, Files)"""
|
"""Kickass Torrent (Videos, Music, Files)"""
|
||||||
|
|
||||||
import random
|
import random
|
||||||
|
|
|
@ -1,5 +1,4 @@
|
||||||
# SPDX-License-Identifier: AGPL-3.0-or-later
|
# SPDX-License-Identifier: AGPL-3.0-or-later
|
||||||
# lint: pylint
|
|
||||||
"""This engine uses the Lemmy API (https://lemmy.ml/api/v3/search), which is
|
"""This engine uses the Lemmy API (https://lemmy.ml/api/v3/search), which is
|
||||||
documented at `lemmy-js-client`_ / `Interface Search`_. Since Lemmy is
|
documented at `lemmy-js-client`_ / `Interface Search`_. Since Lemmy is
|
||||||
federated, results are from many different, independent lemmy instances, and not
|
federated, results are from many different, independent lemmy instances, and not
|
||||||
|
|
|
@ -1,5 +1,4 @@
|
||||||
# SPDX-License-Identifier: AGPL-3.0-or-later
|
# SPDX-License-Identifier: AGPL-3.0-or-later
|
||||||
# lint: pylint
|
|
||||||
"""lib.rs (packages)"""
|
"""lib.rs (packages)"""
|
||||||
|
|
||||||
from urllib.parse import quote_plus
|
from urllib.parse import quote_plus
|
||||||
|
|
|
@ -1,5 +1,4 @@
|
||||||
# SPDX-License-Identifier: AGPL-3.0-or-later
|
# SPDX-License-Identifier: AGPL-3.0-or-later
|
||||||
# lint: pylint
|
|
||||||
"""Lingva (alternative Google Translate frontend)"""
|
"""Lingva (alternative Google Translate frontend)"""
|
||||||
|
|
||||||
from json import loads
|
from json import loads
|
||||||
|
|
|
@ -1,5 +1,4 @@
|
||||||
# SPDX-License-Identifier: AGPL-3.0-or-later
|
# SPDX-License-Identifier: AGPL-3.0-or-later
|
||||||
# lint: pylint
|
|
||||||
"""LiveSpace (Videos)
|
"""LiveSpace (Videos)
|
||||||
|
|
||||||
.. hint::
|
.. hint::
|
||||||
|
|
|
@ -1,5 +1,4 @@
|
||||||
# SPDX-License-Identifier: AGPL-3.0-or-later
|
# SPDX-License-Identifier: AGPL-3.0-or-later
|
||||||
# lint: pylint
|
|
||||||
"""Mastodon_ is an open source alternative to large social media platforms like
|
"""Mastodon_ is an open source alternative to large social media platforms like
|
||||||
Twitter/X, Facebook, ...
|
Twitter/X, Facebook, ...
|
||||||
|
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
# SPDX-License-Identifier: AGPL-3.0-or-later
|
# SPDX-License-Identifier: AGPL-3.0-or-later
|
||||||
# lint: pylint
|
|
||||||
"""Material Icons (images)
|
"""Material Icons (images)
|
||||||
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
import re
|
import re
|
||||||
|
|
|
@ -1,5 +1,4 @@
|
||||||
# SPDX-License-Identifier: AGPL-3.0-or-later
|
# SPDX-License-Identifier: AGPL-3.0-or-later
|
||||||
# lint: pylint
|
|
||||||
"""MediathekViewWeb (API)
|
"""MediathekViewWeb (API)
|
||||||
|
|
||||||
"""
|
"""
|
||||||
|
|
|
@ -1,5 +1,4 @@
|
||||||
# SPDX-License-Identifier: AGPL-3.0-or-later
|
# SPDX-License-Identifier: AGPL-3.0-or-later
|
||||||
# lint: pylint
|
|
||||||
"""The MediaWiki engine is a *generic* engine to **query** Wikimedia wikis by
|
"""The MediaWiki engine is a *generic* engine to **query** Wikimedia wikis by
|
||||||
the `MediaWiki Action API`_. For a `query action`_ all Wikimedia wikis have
|
the `MediaWiki Action API`_. For a `query action`_ all Wikimedia wikis have
|
||||||
endpoints that follow this pattern::
|
endpoints that follow this pattern::
|
||||||
|
|
|
@ -1,5 +1,4 @@
|
||||||
# SPDX-License-Identifier: AGPL-3.0-or-later
|
# SPDX-License-Identifier: AGPL-3.0-or-later
|
||||||
# lint: pylint
|
|
||||||
""".. sidebar:: info
|
""".. sidebar:: info
|
||||||
|
|
||||||
- :origin:`meilisearch.py <searx/engines/meilisearch.py>`
|
- :origin:`meilisearch.py <searx/engines/meilisearch.py>`
|
||||||
|
|
|
@ -1,5 +1,4 @@
|
||||||
# SPDX-License-Identifier: AGPL-3.0-or-later
|
# SPDX-License-Identifier: AGPL-3.0-or-later
|
||||||
# lint: pylint
|
|
||||||
"""metacpan
|
"""metacpan
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
|
|
@ -1,5 +1,4 @@
|
||||||
# SPDX-License-Identifier: AGPL-3.0-or-later
|
# SPDX-License-Identifier: AGPL-3.0-or-later
|
||||||
# lint: pylint
|
|
||||||
"""Mixcloud (Music)
|
"""Mixcloud (Music)
|
||||||
|
|
||||||
"""
|
"""
|
||||||
|
|
|
@ -1,5 +1,4 @@
|
||||||
# SPDX-License-Identifier: AGPL-3.0-or-later
|
# SPDX-License-Identifier: AGPL-3.0-or-later
|
||||||
# lint: pylint
|
|
||||||
"""MongoDB_ is a document based database program that handles JSON like data.
|
"""MongoDB_ is a document based database program that handles JSON like data.
|
||||||
Before configuring the ``mongodb`` engine, you must install the dependency
|
Before configuring the ``mongodb`` engine, you must install the dependency
|
||||||
pymongo_.
|
pymongo_.
|
||||||
|
|
|
@ -1,5 +1,4 @@
|
||||||
# SPDX-License-Identifier: AGPL-3.0-or-later
|
# SPDX-License-Identifier: AGPL-3.0-or-later
|
||||||
# lint: pylint
|
|
||||||
"""Moviepilot is a German movie database, similar to IMDB or TMDB. It doesn't
|
"""Moviepilot is a German movie database, similar to IMDB or TMDB. It doesn't
|
||||||
have any official API, but it uses JSON requests internally to fetch search
|
have any official API, but it uses JSON requests internally to fetch search
|
||||||
results and suggestions, that's being used in this implementation.
|
results and suggestions, that's being used in this implementation.
|
||||||
|
|
|
@ -1,5 +1,4 @@
|
||||||
# SPDX-License-Identifier: AGPL-3.0-or-later
|
# SPDX-License-Identifier: AGPL-3.0-or-later
|
||||||
# lint: pylint
|
|
||||||
"""Mozhi (alternative frontend for popular translation engines)"""
|
"""Mozhi (alternative frontend for popular translation engines)"""
|
||||||
|
|
||||||
import random
|
import random
|
||||||
|
|
|
@ -1,5 +1,4 @@
|
||||||
# SPDX-License-Identifier: AGPL-3.0-or-later
|
# SPDX-License-Identifier: AGPL-3.0-or-later
|
||||||
# lint: pylint
|
|
||||||
"""Matrix Rooms Search - a fully-featured, standalone, matrix rooms search service.
|
"""Matrix Rooms Search - a fully-featured, standalone, matrix rooms search service.
|
||||||
|
|
||||||
Configuration
|
Configuration
|
||||||
|
|
|
@ -1,5 +1,4 @@
|
||||||
# SPDX-License-Identifier: AGPL-3.0-or-later
|
# SPDX-License-Identifier: AGPL-3.0-or-later
|
||||||
# lint: pylint
|
|
||||||
|
|
||||||
"""This is the implementation of the Mullvad-Leta meta-search engine.
|
"""This is the implementation of the Mullvad-Leta meta-search engine.
|
||||||
|
|
||||||
|
|
|
@ -1,5 +1,4 @@
|
||||||
# SPDX-License-Identifier: AGPL-3.0-or-later
|
# SPDX-License-Identifier: AGPL-3.0-or-later
|
||||||
# lint: pylint
|
|
||||||
"""Mwmbl_ is a non-profit, ad-free, free-libre and free-lunch search engine with
|
"""Mwmbl_ is a non-profit, ad-free, free-libre and free-lunch search engine with
|
||||||
a focus on useability and speed.
|
a focus on useability and speed.
|
||||||
|
|
||||||
|
|
|
@ -1,5 +1,4 @@
|
||||||
# SPDX-License-Identifier: AGPL-3.0-or-later
|
# SPDX-License-Identifier: AGPL-3.0-or-later
|
||||||
# lint: pylint
|
|
||||||
"""MySQL is said to be the most popular open source database. Before enabling
|
"""MySQL is said to be the most popular open source database. Before enabling
|
||||||
MySQL engine, you must install the package ``mysql-connector-python``.
|
MySQL engine, you must install the package ``mysql-connector-python``.
|
||||||
|
|
||||||
|
|
|
@ -1,5 +1,4 @@
|
||||||
# SPDX-License-Identifier: AGPL-3.0-or-later
|
# SPDX-License-Identifier: AGPL-3.0-or-later
|
||||||
# lint: pylint
|
|
||||||
"""npms.io
|
"""npms.io
|
||||||
|
|
||||||
"""
|
"""
|
||||||
|
|
|
@ -1,5 +1,4 @@
|
||||||
# SPDX-License-Identifier: AGPL-3.0-or-later
|
# SPDX-License-Identifier: AGPL-3.0-or-later
|
||||||
# lint: pylint
|
|
||||||
"""Nyaa.si (Anime Bittorrent tracker)
|
"""Nyaa.si (Anime Bittorrent tracker)
|
||||||
|
|
||||||
"""
|
"""
|
||||||
|
|
|
@ -1,5 +1,4 @@
|
||||||
# SPDX-License-Identifier: AGPL-3.0-or-later
|
# SPDX-License-Identifier: AGPL-3.0-or-later
|
||||||
# lint: pylint
|
|
||||||
"""Odysee_ is a decentralized video hosting platform.
|
"""Odysee_ is a decentralized video hosting platform.
|
||||||
|
|
||||||
.. _Odysee: https://github.com/OdyseeTeam/odysee-frontend
|
.. _Odysee: https://github.com/OdyseeTeam/odysee-frontend
|
||||||
|
|
|
@ -1,11 +1,11 @@
|
||||||
# SPDX-License-Identifier: AGPL-3.0-or-later
|
# SPDX-License-Identifier: AGPL-3.0-or-later
|
||||||
"""
|
"""Open Semantic Search
|
||||||
Open Semantic Search
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
from dateutil import parser
|
|
||||||
from json import loads
|
from json import loads
|
||||||
from urllib.parse import quote
|
from urllib.parse import quote
|
||||||
|
from dateutil import parser
|
||||||
|
|
||||||
# about
|
# about
|
||||||
about = {
|
about = {
|
||||||
|
|
|
@ -1,5 +1,4 @@
|
||||||
# SPDX-License-Identifier: AGPL-3.0-or-later
|
# SPDX-License-Identifier: AGPL-3.0-or-later
|
||||||
# lint: pylint
|
|
||||||
"""OpenStreetMap (Map)
|
"""OpenStreetMap (Map)
|
||||||
|
|
||||||
"""
|
"""
|
||||||
|
|
|
@ -96,7 +96,7 @@ def response(resp):
|
||||||
title = gettext('{title} (OBSOLETE)').format(title=result['title'])
|
title = gettext('{title} (OBSOLETE)').format(title=result['title'])
|
||||||
try:
|
try:
|
||||||
superseded_url = pdbe_entry_url.format(pdb_id=result['superseded_by'])
|
superseded_url = pdbe_entry_url.format(pdb_id=result['superseded_by'])
|
||||||
except:
|
except: # pylint: disable=bare-except
|
||||||
continue
|
continue
|
||||||
|
|
||||||
# since we can't construct a proper body from the response, we'll make up our own
|
# since we can't construct a proper body from the response, we'll make up our own
|
||||||
|
|
|
@ -1,5 +1,4 @@
|
||||||
# SPDX-License-Identifier: AGPL-3.0-or-later
|
# SPDX-License-Identifier: AGPL-3.0-or-later
|
||||||
# lint: pylint
|
|
||||||
"""Peertube and :py:obj:`SepiaSearch <searx.engines.sepiasearch>` do share
|
"""Peertube and :py:obj:`SepiaSearch <searx.engines.sepiasearch>` do share
|
||||||
(more or less) the same REST API and the schema of the JSON result is identical.
|
(more or less) the same REST API and the schema of the JSON result is identical.
|
||||||
|
|
||||||
|
|
|
@ -87,7 +87,7 @@ def response(resp):
|
||||||
properties.get('extent')[2],
|
properties.get('extent')[2],
|
||||||
]
|
]
|
||||||
else:
|
else:
|
||||||
# TODO: better boundingbox calculation
|
# better boundingbox calculation?
|
||||||
boundingbox = [
|
boundingbox = [
|
||||||
geojson['coordinates'][1],
|
geojson['coordinates'][1],
|
||||||
geojson['coordinates'][1],
|
geojson['coordinates'][1],
|
||||||
|
|
|
@ -1,5 +1,4 @@
|
||||||
# SPDX-License-Identifier: AGPL-3.0-or-later
|
# SPDX-License-Identifier: AGPL-3.0-or-later
|
||||||
# lint: pylint
|
|
||||||
"""Pinterest (images)
|
"""Pinterest (images)
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
|
|
@ -1,5 +1,4 @@
|
||||||
# SPDX-License-Identifier: AGPL-3.0-or-later
|
# SPDX-License-Identifier: AGPL-3.0-or-later
|
||||||
# lint: pylint
|
|
||||||
"""An alternative privacy-friendly YouTube frontend which is efficient by
|
"""An alternative privacy-friendly YouTube frontend which is efficient by
|
||||||
design. `Piped’s architecture`_ consists of 3 components:
|
design. `Piped’s architecture`_ consists of 3 components:
|
||||||
|
|
||||||
|
|
|
@ -82,14 +82,14 @@ def response(resp):
|
||||||
try:
|
try:
|
||||||
date = datetime.fromtimestamp(float(result["added"]))
|
date = datetime.fromtimestamp(float(result["added"]))
|
||||||
params['publishedDate'] = date
|
params['publishedDate'] = date
|
||||||
except:
|
except: # pylint: disable=bare-except
|
||||||
pass
|
pass
|
||||||
|
|
||||||
# let's try to calculate the torrent size
|
# let's try to calculate the torrent size
|
||||||
try:
|
try:
|
||||||
filesize = get_torrent_size(result["size"], "B")
|
filesize = get_torrent_size(result["size"], "B")
|
||||||
params['filesize'] = filesize
|
params['filesize'] = filesize
|
||||||
except:
|
except: # pylint: disable=bare-except
|
||||||
pass
|
pass
|
||||||
|
|
||||||
# append result
|
# append result
|
||||||
|
|
|
@ -1,5 +1,4 @@
|
||||||
# SPDX-License-Identifier: AGPL-3.0-or-later
|
# SPDX-License-Identifier: AGPL-3.0-or-later
|
||||||
# lint: pylint
|
|
||||||
"""Pixiv (images)"""
|
"""Pixiv (images)"""
|
||||||
|
|
||||||
from urllib.parse import urlencode
|
from urllib.parse import urlencode
|
||||||
|
|
|
@ -1,5 +1,4 @@
|
||||||
# SPDX-License-Identifier: AGPL-3.0-or-later
|
# SPDX-License-Identifier: AGPL-3.0-or-later
|
||||||
# lint: pylint
|
|
||||||
"""pkg.go.dev (packages)"""
|
"""pkg.go.dev (packages)"""
|
||||||
|
|
||||||
import re
|
import re
|
||||||
|
|
|
@ -1,5 +1,4 @@
|
||||||
# SPDX-License-Identifier: AGPL-3.0-or-later
|
# SPDX-License-Identifier: AGPL-3.0-or-later
|
||||||
# lint: pylint
|
|
||||||
"""Podcast Index
|
"""Podcast Index
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Reference in a new issue