Ponysearch/searx/autocomplete.py

196 lines
5.1 KiB
Python
Raw Normal View History

# SPDX-License-Identifier: AGPL-3.0-or-later
# lint: pylint
"""This module implements functions needed for the autocompleter.
"""
# pylint: disable=use-dict-literal
2014-03-29 16:30:49 +01:00
from json import loads
from urllib.parse import urlencode
from lxml import etree
2021-03-18 19:59:01 +01:00
from httpx import HTTPError
2015-04-10 00:59:25 +02:00
from searx import settings
from searx.engines import engines
from searx.network import get as http_get
from searx.exceptions import SearxEngineResponseException
# a fetch_supported_languages() for XPath engines isn't available right now
# _brave = ENGINES_LANGUAGES['brave'].keys()
2015-04-10 00:59:25 +02:00
def get(*args, **kwargs):
if 'timeout' not in kwargs:
kwargs['timeout'] = settings['outgoing']['request_timeout']
kwargs['raise_for_httperror'] = True
2015-04-10 00:59:25 +02:00
return http_get(*args, **kwargs)
def brave(query, _lang):
# brave search autocompleter
url = 'https://search.brave.com/api/suggest?'
url += urlencode({'q': query})
country = 'all'
# if lang in _brave:
# country = lang
kwargs = {'cookies': {'country': country}}
resp = get(url, **kwargs)
results = []
if resp.ok:
data = resp.json()
for item in data[1]:
results.append(item)
return results
def dbpedia(query, _lang):
# dbpedia autocompleter, no HTTPS
autocomplete_url = 'https://lookup.dbpedia.org/api/search.asmx/KeywordSearch?'
2014-03-29 16:30:49 +01:00
2016-01-18 12:47:31 +01:00
response = get(autocomplete_url + urlencode(dict(QueryString=query)))
2014-03-29 16:30:49 +01:00
results = []
if response.ok:
dom = etree.fromstring(response.content)
results = dom.xpath('//Result/Label//text()')
2014-03-29 16:30:49 +01:00
return results
def duckduckgo(query, _lang):
# duckduckgo autocompleter
2014-09-07 23:56:06 +02:00
url = 'https://ac.duckduckgo.com/ac/?{0}&type=list'
resp = loads(get(url.format(urlencode(dict(q=query)))).text)
if len(resp) > 1:
return resp[1]
return []
def google(query, lang):
2014-03-29 16:30:49 +01:00
# google autocompleter
autocomplete_url = 'https://suggestqueries.google.com/complete/search?client=toolbar&'
2014-03-29 16:30:49 +01:00
response = get(autocomplete_url + urlencode(dict(hl=lang, q=query)))
2014-03-29 16:30:49 +01:00
results = []
if response.ok:
2014-03-29 17:04:33 +01:00
dom = etree.fromstring(response.text)
2014-03-29 16:30:49 +01:00
results = dom.xpath('//suggestion/@data')
return results
2022-04-14 03:02:05 +02:00
def seznam(query, _lang):
# seznam search autocompleter
url = 'https://suggest.seznam.cz/fulltext/cs?{query}'
resp = get(
url.format(
query=urlencode(
{'phrase': query, 'cursorPosition': len(query), 'format': 'json-2', 'highlight': '1', 'count': '6'}
)
)
)
2022-04-14 03:02:05 +02:00
if not resp.ok:
return []
data = resp.json()
return [
''.join([part.get('text', '') for part in item.get('text', [])])
for item in data.get('result', [])
if item.get('itemType', None) == 'ItemType.TEXT'
]
2022-04-14 03:02:05 +02:00
def startpage(query, lang):
# startpage autocompleter
lui = engines['startpage'].supported_languages.get(lang, 'english') # vintage / deprecated
2021-11-13 13:26:47 +01:00
url = 'https://startpage.com/suggestions?{query}'
resp = get(url.format(query=urlencode({'q': query, 'segment': 'startpage.udog', 'lui': lui})))
data = resp.json()
return [e['text'] for e in data.get('suggestions', []) if 'text' in e]
def swisscows(query, _lang):
2020-02-14 19:19:24 +01:00
# swisscows autocompleter
url = 'https://swisscows.ch/api/suggest?{query}&itemsCount=5'
resp = loads(get(url.format(query=urlencode({'query': query}))).text)
return resp
def qwant(query, sxng_locale):
"""Autocomplete from Qwant. Supports Qwant's regions."""
2016-03-02 12:54:06 +01:00
results = []
locale = engines['qwant'].traits.get_region(sxng_locale, 'en_US')
url = 'https://api.qwant.com/v3/suggest?{query}'
resp = get(url.format(query=urlencode({'q': query, 'locale': locale, 'version': '2'})))
2016-03-02 12:54:06 +01:00
if resp.ok:
data = resp.json()
2016-03-02 12:54:06 +01:00
if data['status'] == 'success':
for item in data['data']['items']:
results.append(item['value'])
return results
def wikipedia(query, lang):
2014-03-29 16:30:49 +01:00
# wikipedia autocompleter
url = 'https://' + lang + '.wikipedia.org/w/api.php?action=opensearch&{0}&limit=10&namespace=0&format=json'
2014-03-29 16:30:49 +01:00
resp = loads(get(url.format(urlencode(dict(search=query)))).text)
if len(resp) > 1:
return resp[1]
return []
2014-03-29 16:30:49 +01:00
2022-09-09 22:42:44 +02:00
def yandex(query, _lang):
# yandex autocompleter
url = "https://suggest.yandex.com/suggest-ff.cgi?{0}"
resp = loads(get(url.format(urlencode(dict(part=query)))).text)
if len(resp) > 1:
return resp[1]
return []
backends = {
'dbpedia': dbpedia,
'duckduckgo': duckduckgo,
'google': google,
2022-04-14 03:02:05 +02:00
'seznam': seznam,
'startpage': startpage,
'swisscows': swisscows,
'qwant': qwant,
'wikipedia': wikipedia,
'brave': brave,
2022-09-09 22:42:44 +02:00
'yandex': yandex,
}
def search_autocomplete(backend_name, query, sxng_locale):
backend = backends.get(backend_name)
if backend is None:
return []
if engines[backend_name].traits.data_type != "traits_v1":
# vintage / deprecated
if not sxng_locale or sxng_locale == 'all':
sxng_locale = 'en'
else:
sxng_locale = sxng_locale.split('-')[0]
try:
return backend(query, sxng_locale)
2021-03-18 19:59:01 +01:00
except (HTTPError, SearxEngineResponseException):
return []