Ponysearch/searx/engines/wikipedia.py

68 lines
1.8 KiB
Python
Raw Normal View History

## Wikipedia (Web)
#
# @website http://www.wikipedia.org
# @provide-api yes (http://www.mediawiki.org/wiki/API:Search)
#
# @using-api yes
# @results JSON
# @stable yes
# @parse url, title
#
# @todo content
2014-01-31 04:35:23 +01:00
from json import loads
from urllib import urlencode, quote
# engine dependent config
categories = ['general']
2014-01-31 04:35:23 +01:00
language_support = True
paging = True
number_of_results = 1
# search-url
url = 'https://{language}.wikipedia.org/'
search_url = url + 'w/api.php?action=query&list=search&{query}&srprop=timestamp&format=json&sroffset={offset}&srlimit={limit}' # noqa
2014-01-31 04:35:23 +01:00
# do search-request
2014-01-31 04:35:23 +01:00
def request(query, params):
offset = (params['pageno'] - 1) * number_of_results
2014-01-31 04:35:23 +01:00
if params['language'] == 'all':
language = 'en'
else:
language = params['language'].split('_')[0]
# write search-language back to params, required in response
2014-01-31 04:35:23 +01:00
params['language'] = language
2014-01-31 04:35:23 +01:00
params['url'] = search_url.format(query=urlencode({'srsearch': query}),
offset=offset,
limit=number_of_results,
2014-01-31 04:35:23 +01:00
language=language)
2014-01-31 04:35:23 +01:00
return params
# get response from search-request
2014-01-31 04:35:23 +01:00
def response(resp):
results = []
2014-01-31 04:35:23 +01:00
search_results = loads(resp.text)
# return empty array if there are no results
if not search_results.get('query', {}).get('search'):
return []
# parse results
for result in search_results['query']['search']:
res_url = url.format(language=resp.search_params['language']) + 'wiki/' + quote(result['title'].replace(' ', '_').encode('utf-8'))
# append result
results.append({'url': res_url,
'title': result['title'],
'content': ''})
# return results
return results