2014-12-16 20:40:03 +01:00
|
|
|
#!/usr/bin/env python
|
|
|
|
|
2015-05-02 15:45:17 +02:00
|
|
|
"""
|
|
|
|
Flickr (Images)
|
|
|
|
|
|
|
|
@website https://www.flickr.com
|
|
|
|
@provide-api yes (https://secure.flickr.com/services/api/flickr.photos.search.html)
|
|
|
|
|
|
|
|
@using-api no
|
|
|
|
@results HTML
|
|
|
|
@stable no
|
|
|
|
@parse url, title, thumbnail, img_src
|
|
|
|
"""
|
2014-12-16 20:40:03 +01:00
|
|
|
|
|
|
|
from json import loads
|
2016-10-30 21:20:40 +01:00
|
|
|
from time import time
|
2014-12-16 20:40:03 +01:00
|
|
|
import re
|
2015-01-27 19:25:03 +01:00
|
|
|
from searx.engines import logger
|
2019-08-02 13:37:13 +02:00
|
|
|
from searx.url_utils import urlencode
|
|
|
|
from searx.utils import ecma_unescape, html_to_text
|
2015-01-27 19:25:03 +01:00
|
|
|
|
|
|
|
logger = logger.getChild('flickr-noapi')
|
2014-12-16 20:40:03 +01:00
|
|
|
|
|
|
|
categories = ['images']
|
|
|
|
|
2015-05-02 15:45:17 +02:00
|
|
|
url = 'https://www.flickr.com/'
|
|
|
|
search_url = url + 'search?{query}&page={page}'
|
2016-10-30 21:20:40 +01:00
|
|
|
time_range_url = '&min_upload_date={start}&max_upload_date={end}'
|
2014-12-16 20:40:03 +01:00
|
|
|
photo_url = 'https://www.flickr.com/photos/{userid}/{photoid}'
|
2019-07-28 10:42:00 +02:00
|
|
|
modelexport_re = re.compile(r"^\s*modelExport:\s*({.*}),$", re.M)
|
2014-12-22 14:15:59 +01:00
|
|
|
image_sizes = ('o', 'k', 'h', 'b', 'c', 'z', 'n', 'm', 't', 'q', 's')
|
2014-12-16 20:40:03 +01:00
|
|
|
|
|
|
|
paging = True
|
2016-10-30 21:20:40 +01:00
|
|
|
time_range_support = True
|
|
|
|
time_range_dict = {'day': 60 * 60 * 24,
|
|
|
|
'week': 60 * 60 * 24 * 7,
|
2016-12-11 16:39:12 +01:00
|
|
|
'month': 60 * 60 * 24 * 7 * 4,
|
|
|
|
'year': 60 * 60 * 24 * 7 * 52}
|
2014-12-16 20:40:03 +01:00
|
|
|
|
2014-12-22 14:15:59 +01:00
|
|
|
|
2014-12-16 20:40:03 +01:00
|
|
|
def build_flickr_url(user_id, photo_id):
|
2014-12-22 14:15:59 +01:00
|
|
|
return photo_url.format(userid=user_id, photoid=photo_id)
|
2014-12-16 20:40:03 +01:00
|
|
|
|
|
|
|
|
2016-10-30 21:20:40 +01:00
|
|
|
def _get_time_range_url(time_range):
|
|
|
|
if time_range in time_range_dict:
|
|
|
|
return time_range_url.format(start=time(), end=str(int(time()) - time_range_dict[time_range]))
|
|
|
|
return ''
|
|
|
|
|
2015-05-27 13:49:25 +02:00
|
|
|
|
2016-10-30 21:20:40 +01:00
|
|
|
def request(query, params):
|
|
|
|
params['url'] = (search_url.format(query=urlencode({'text': query}), page=params['pageno'])
|
|
|
|
+ _get_time_range_url(params['time_range']))
|
2014-12-16 20:40:03 +01:00
|
|
|
return params
|
|
|
|
|
|
|
|
|
|
|
|
def response(resp):
|
|
|
|
results = []
|
2014-12-22 14:15:59 +01:00
|
|
|
|
2019-07-28 10:42:00 +02:00
|
|
|
matches = modelexport_re.search(resp.text)
|
2014-12-22 14:15:59 +01:00
|
|
|
|
|
|
|
if matches is None:
|
2014-12-16 20:40:03 +01:00
|
|
|
return results
|
|
|
|
|
|
|
|
match = matches.group(1)
|
2019-07-28 10:42:00 +02:00
|
|
|
model_export = loads(match)
|
2014-12-22 14:15:59 +01:00
|
|
|
|
2019-07-28 10:42:00 +02:00
|
|
|
if 'legend' not in model_export:
|
|
|
|
return results
|
2014-12-22 14:15:59 +01:00
|
|
|
|
2019-07-28 10:42:00 +02:00
|
|
|
legend = model_export['legend']
|
2014-12-22 14:15:59 +01:00
|
|
|
|
2019-07-28 10:42:00 +02:00
|
|
|
# handle empty page
|
|
|
|
if not legend or not legend[0]:
|
|
|
|
return results
|
2014-12-22 14:15:59 +01:00
|
|
|
|
2019-07-28 10:42:00 +02:00
|
|
|
for index in legend:
|
|
|
|
photo = model_export['main'][index[0]][int(index[1])][index[2]][index[3]][int(index[4])]
|
2019-08-02 13:37:13 +02:00
|
|
|
author = ecma_unescape(photo.get('realname', ''))
|
|
|
|
source = ecma_unescape(photo.get('username', '')) + ' @ Flickr'
|
|
|
|
title = ecma_unescape(photo.get('title', ''))
|
|
|
|
content = html_to_text(ecma_unescape(photo.get('description', '')))
|
2014-12-22 14:15:59 +01:00
|
|
|
img_src = None
|
2014-12-16 20:40:03 +01:00
|
|
|
# From the biggest to the lowest format
|
2014-12-22 14:15:59 +01:00
|
|
|
for image_size in image_sizes:
|
|
|
|
if image_size in photo['sizes']:
|
2015-01-27 19:25:03 +01:00
|
|
|
img_src = photo['sizes'][image_size]['url']
|
2019-07-28 10:42:00 +02:00
|
|
|
img_format = 'jpg ' \
|
|
|
|
+ str(photo['sizes'][image_size]['width']) \
|
|
|
|
+ 'x' \
|
|
|
|
+ str(photo['sizes'][image_size]['height'])
|
2014-12-22 14:15:59 +01:00
|
|
|
break
|
|
|
|
|
|
|
|
if not img_src:
|
2015-01-27 19:25:03 +01:00
|
|
|
logger.debug('cannot find valid image size: {0}'.format(repr(photo)))
|
2014-12-22 14:15:59 +01:00
|
|
|
continue
|
|
|
|
|
2015-05-27 13:49:25 +02:00
|
|
|
# For a bigger thumbnail, keep only the url_z, not the url_n
|
2015-01-17 19:21:09 +01:00
|
|
|
if 'n' in photo['sizes']:
|
2015-01-27 19:25:03 +01:00
|
|
|
thumbnail_src = photo['sizes']['n']['url']
|
2015-01-17 19:24:35 +01:00
|
|
|
elif 'z' in photo['sizes']:
|
2015-01-27 19:25:03 +01:00
|
|
|
thumbnail_src = photo['sizes']['z']['url']
|
2015-01-17 19:21:09 +01:00
|
|
|
else:
|
|
|
|
thumbnail_src = img_src
|
|
|
|
|
2019-07-28 10:42:00 +02:00
|
|
|
if 'ownerNsid' not in photo:
|
|
|
|
# should not happen, disowned photo? Show it anyway
|
|
|
|
url = img_src
|
|
|
|
else:
|
|
|
|
url = build_flickr_url(photo['ownerNsid'], photo['id'])
|
2014-12-22 14:15:59 +01:00
|
|
|
|
2020-01-02 22:29:10 +01:00
|
|
|
result = {
|
|
|
|
'url': url,
|
|
|
|
'img_src': img_src,
|
|
|
|
'thumbnail_src': thumbnail_src,
|
|
|
|
'source': source,
|
|
|
|
'img_format': img_format,
|
|
|
|
'template': 'images.html'
|
|
|
|
}
|
|
|
|
try:
|
|
|
|
result['author'] = author.encode('utf-8')
|
|
|
|
result['title'] = title.encode('utf-8')
|
|
|
|
result['content'] = content.encode('utf-8')
|
|
|
|
except:
|
|
|
|
result['author'] = ''
|
|
|
|
result['title'] = ''
|
|
|
|
result['content'] = ''
|
|
|
|
results.append(result)
|
2014-12-22 14:15:59 +01:00
|
|
|
|
2014-12-16 20:40:03 +01:00
|
|
|
return results
|