2021-01-13 11:31:25 +01:00
|
|
|
# SPDX-License-Identifier: AGPL-3.0-or-later
|
2021-07-13 18:16:09 +02:00
|
|
|
# lint: pylint
|
|
|
|
"""Qwant (Web, News, Images, Videos)
|
|
|
|
|
|
|
|
This engine uses the Qwant API (https://api.qwant.com/v3). The API is
|
|
|
|
undocumented but can be reverse engineered by reading the network log of
|
|
|
|
https://www.qwant.com/ queries.
|
|
|
|
|
|
|
|
This implementation is used by different qwant engines in the settings.yml::
|
|
|
|
|
|
|
|
- name: qwant
|
|
|
|
categories: general
|
|
|
|
...
|
|
|
|
- name: qwant news
|
|
|
|
categories: news
|
|
|
|
...
|
|
|
|
- name: qwant images
|
|
|
|
categories: images
|
|
|
|
...
|
|
|
|
- name: qwant videos
|
|
|
|
categories: videos
|
|
|
|
...
|
|
|
|
|
2015-06-01 00:00:32 +02:00
|
|
|
"""
|
|
|
|
|
2021-07-13 18:16:09 +02:00
|
|
|
from datetime import (
|
|
|
|
datetime,
|
|
|
|
timedelta,
|
|
|
|
)
|
2016-12-10 21:27:47 +01:00
|
|
|
from json import loads
|
2020-08-06 17:42:46 +02:00
|
|
|
from urllib.parse import urlencode
|
2021-07-13 18:16:09 +02:00
|
|
|
|
|
|
|
# from searx import logger
|
|
|
|
from searx.utils import match_language
|
|
|
|
from searx.exceptions import SearxEngineAPIException
|
[httpx] replace searx.poolrequests by searx.network
settings.yml:
* outgoing.networks:
* can contains network definition
* propertiers: enable_http, verify, http2, max_connections, max_keepalive_connections,
keepalive_expiry, local_addresses, support_ipv4, support_ipv6, proxies, max_redirects, retries
* retries: 0 by default, number of times searx retries to send the HTTP request (using different IP & proxy each time)
* local_addresses can be "192.168.0.1/24" (it supports IPv6)
* support_ipv4 & support_ipv6: both True by default
see https://github.com/searx/searx/pull/1034
* each engine can define a "network" section:
* either a full network description
* either reference an existing network
* all HTTP requests of engine use the same HTTP configuration (it was not the case before, see proxy configuration in master)
2021-04-05 10:43:33 +02:00
|
|
|
from searx.network import raise_for_httperror
|
2020-08-06 17:42:46 +02:00
|
|
|
|
2021-07-13 18:16:09 +02:00
|
|
|
#logger = logger.getChild('qwant')
|
|
|
|
|
2021-01-13 11:31:25 +01:00
|
|
|
# about
|
|
|
|
about = {
|
|
|
|
"website": 'https://www.qwant.com/',
|
|
|
|
"wikidata_id": 'Q14657870',
|
|
|
|
"official_api_documentation": None,
|
|
|
|
"use_official_api": True,
|
|
|
|
"require_api_key": False,
|
|
|
|
"results": 'JSON',
|
|
|
|
}
|
2015-06-01 00:00:32 +02:00
|
|
|
|
|
|
|
# engine dependent config
|
2020-11-03 11:35:53 +01:00
|
|
|
categories = []
|
2015-06-01 00:00:32 +02:00
|
|
|
paging = True
|
2021-05-03 02:24:28 +02:00
|
|
|
supported_languages_url = about['website']
|
2015-06-01 00:00:32 +02:00
|
|
|
|
2021-07-13 18:16:09 +02:00
|
|
|
category_to_keyword = {
|
|
|
|
'general': 'web',
|
|
|
|
'news': 'news',
|
|
|
|
'images': 'images',
|
|
|
|
'videos': 'videos',
|
|
|
|
}
|
2015-06-02 20:36:58 +02:00
|
|
|
|
2015-06-01 00:00:32 +02:00
|
|
|
# search-url
|
2021-07-13 18:16:09 +02:00
|
|
|
url = 'https://api.qwant.com/v3/search/{keyword}?q={query}&count={count}&offset={offset}'
|
2015-06-01 00:00:32 +02:00
|
|
|
|
|
|
|
def request(query, params):
|
2021-07-13 18:16:09 +02:00
|
|
|
"""Qwant search request"""
|
|
|
|
keyword = category_to_keyword[categories[0]]
|
|
|
|
count = 10 # web: count must be equal to 10
|
|
|
|
|
|
|
|
if keyword == 'images':
|
|
|
|
count = 50
|
|
|
|
offset = (params['pageno'] - 1) * count
|
|
|
|
# count + offset must be lower than 250
|
|
|
|
offset = min(offset, 199)
|
2015-06-02 22:11:47 +02:00
|
|
|
else:
|
2021-07-13 18:16:09 +02:00
|
|
|
offset = (params['pageno'] - 1) * count
|
|
|
|
# count + offset must be lower than 50
|
|
|
|
offset = min(offset, 40)
|
|
|
|
|
|
|
|
params['url'] = url.format(
|
|
|
|
keyword = keyword,
|
|
|
|
query = urlencode({'q': query}),
|
|
|
|
offset = offset,
|
|
|
|
count = count,
|
|
|
|
)
|
2015-06-01 00:00:32 +02:00
|
|
|
|
2017-07-20 22:47:20 +02:00
|
|
|
# add language tag
|
2019-01-06 15:27:46 +01:00
|
|
|
if params['language'] != 'all':
|
2021-07-13 18:16:09 +02:00
|
|
|
language = match_language(
|
|
|
|
params['language'],
|
|
|
|
# pylint: disable=undefined-variable
|
|
|
|
supported_languages,
|
|
|
|
language_aliases,
|
|
|
|
)
|
|
|
|
params['url'] += '&locale=' + language.replace('-', '_')
|
2015-06-01 00:00:32 +02:00
|
|
|
|
2020-12-09 21:23:20 +01:00
|
|
|
params['raise_for_httperror'] = False
|
2015-06-01 00:00:32 +02:00
|
|
|
return params
|
|
|
|
|
|
|
|
|
|
|
|
def response(resp):
|
2021-07-13 18:16:09 +02:00
|
|
|
"""Get response from Qwant's search request"""
|
2015-06-01 00:00:32 +02:00
|
|
|
|
2021-07-13 18:16:09 +02:00
|
|
|
keyword = category_to_keyword[categories[0]]
|
|
|
|
results = []
|
2020-12-09 21:23:20 +01:00
|
|
|
|
|
|
|
# load JSON result
|
2015-06-01 00:00:32 +02:00
|
|
|
search_results = loads(resp.text)
|
2021-07-13 18:16:09 +02:00
|
|
|
data = search_results.get('data', {})
|
2015-06-01 00:00:32 +02:00
|
|
|
|
2020-12-09 21:23:20 +01:00
|
|
|
# check for an API error
|
|
|
|
if search_results.get('status') != 'success':
|
2021-07-13 18:16:09 +02:00
|
|
|
msg = ",".join(data.get('message', ['unknown', ]))
|
|
|
|
raise SearxEngineAPIException('API error::' + msg)
|
|
|
|
|
|
|
|
# raise for other errors
|
|
|
|
raise_for_httperror(resp)
|
|
|
|
|
|
|
|
if keyword == 'web':
|
|
|
|
# The WEB query contains a list named 'mainline'. This list can contain
|
|
|
|
# different result types (e.g. mainline[0]['type'] returns type of the
|
|
|
|
# result items in mainline[0]['items']
|
|
|
|
mainline = data.get('result', {}).get('items', {}).get('mainline', {})
|
|
|
|
else:
|
|
|
|
# Queries on News, Images and Videos do not have a list named 'mainline'
|
|
|
|
# in the response. The result items are directly in the list
|
|
|
|
# result['items'].
|
|
|
|
mainline = data.get('result', {}).get('items', [])
|
|
|
|
mainline = [
|
|
|
|
{'type' : keyword, 'items' : mainline },
|
|
|
|
]
|
2020-12-09 21:23:20 +01:00
|
|
|
|
2015-06-01 00:00:32 +02:00
|
|
|
# return empty array if there are no results
|
2021-07-13 18:16:09 +02:00
|
|
|
if not mainline:
|
2015-06-01 00:00:32 +02:00
|
|
|
return []
|
|
|
|
|
2021-07-13 18:16:09 +02:00
|
|
|
for row in mainline:
|
|
|
|
|
|
|
|
mainline_type = row.get('type', 'web')
|
|
|
|
if mainline_type == 'ads':
|
|
|
|
# ignore adds
|
|
|
|
continue
|
|
|
|
|
|
|
|
mainline_items = row.get('items', [])
|
|
|
|
for item in mainline_items:
|
|
|
|
|
|
|
|
title = item['title']
|
|
|
|
res_url = item['url']
|
|
|
|
|
|
|
|
if mainline_type == 'web':
|
|
|
|
content = item['desc']
|
|
|
|
results.append({
|
|
|
|
'title': title,
|
|
|
|
'url': res_url,
|
|
|
|
'content': content,
|
|
|
|
})
|
2015-06-01 00:00:32 +02:00
|
|
|
|
2021-07-13 18:16:09 +02:00
|
|
|
elif mainline_type == 'news':
|
|
|
|
pub_date = datetime.fromtimestamp(item['date'], None)
|
|
|
|
news_media = item.get('media', [])
|
2017-02-12 14:58:49 +01:00
|
|
|
img_src = None
|
2021-07-13 18:16:09 +02:00
|
|
|
if news_media:
|
|
|
|
img_src = news_media[0].get('pict', {}).get('url', None)
|
|
|
|
results.append({
|
|
|
|
'title': title,
|
|
|
|
'url': res_url,
|
|
|
|
'publishedDate': pub_date,
|
|
|
|
'img_src': img_src,
|
|
|
|
})
|
|
|
|
|
|
|
|
elif mainline_type == 'images':
|
|
|
|
thumbnail = item['thumbnail']
|
|
|
|
img_src = item['media']
|
|
|
|
results.append({
|
|
|
|
'title': title,
|
|
|
|
'url': res_url,
|
|
|
|
'template': 'images.html',
|
|
|
|
'thumbnail_src': thumbnail,
|
|
|
|
'img_src': img_src,
|
|
|
|
})
|
|
|
|
|
|
|
|
elif mainline_type == 'videos':
|
|
|
|
content = item['desc']
|
|
|
|
length = timedelta(seconds=item['duration'])
|
|
|
|
pub_date = datetime.fromtimestamp(item['date'])
|
|
|
|
thumbnail = item['thumbnail']
|
|
|
|
|
|
|
|
results.append({
|
|
|
|
'title': title,
|
|
|
|
'url': res_url,
|
|
|
|
'content': content,
|
|
|
|
'publishedDate': pub_date,
|
|
|
|
'thumbnail': thumbnail,
|
|
|
|
'template': 'videos.html',
|
|
|
|
'length': length,
|
|
|
|
})
|
2015-06-01 00:00:32 +02:00
|
|
|
|
|
|
|
return results
|
2017-02-25 03:21:48 +01:00
|
|
|
|
|
|
|
|
|
|
|
# get supported languages from their site
|
|
|
|
def _fetch_supported_languages(resp):
|
|
|
|
# list of regions is embedded in page as a js object
|
|
|
|
response_text = resp.text
|
2021-05-03 02:24:28 +02:00
|
|
|
response_text = response_text[response_text.find('INITIAL_PROPS'):]
|
|
|
|
response_text = response_text[response_text.find('{'):response_text.find('</script>')]
|
2017-02-25 03:21:48 +01:00
|
|
|
|
|
|
|
regions_json = loads(response_text)
|
|
|
|
|
2021-05-03 02:24:28 +02:00
|
|
|
supported_languages = []
|
|
|
|
for country, langs in regions_json['locales'].items():
|
|
|
|
for lang in langs['langs']:
|
|
|
|
lang_code = "{lang}-{country}".format(lang=lang, country=country)
|
|
|
|
supported_languages.append(lang_code)
|
2017-02-25 03:21:48 +01:00
|
|
|
|
|
|
|
return supported_languages
|