forked from Ponysearch/Ponysearch
Merge pull request #93 from dalf/master
yahoo, bing_new and dailymotion fixes
This commit is contained in:
commit
1e99cf2a0e
3 changed files with 29 additions and 9 deletions
|
@ -56,10 +56,14 @@ def response(resp):
|
||||||
link = result.xpath('.//div[@class="newstitle"]/a')[0]
|
link = result.xpath('.//div[@class="newstitle"]/a')[0]
|
||||||
url = link.attrib.get('href')
|
url = link.attrib.get('href')
|
||||||
title = ' '.join(link.xpath('.//text()'))
|
title = ' '.join(link.xpath('.//text()'))
|
||||||
content = escape(' '.join(result.xpath('.//div[@class="sn_txt"]/div//span[@class="sn_snip"]//text()')))
|
contentXPath = result.xpath('.//div[@class="sn_txt"]/div//span[@class="sn_snip"]//text()')
|
||||||
|
if contentXPath != None:
|
||||||
|
content = escape(' '.join(contentXPath))
|
||||||
|
|
||||||
# parse publishedDate
|
# parse publishedDate
|
||||||
publishedDate = escape(' '.join(result.xpath('.//div[@class="sn_txt"]/div//span[@class="sn_ST"]//span[@class="sn_tm"]//text()')))
|
publishedDateXPath = result.xpath('.//div[@class="sn_txt"]/div//span[contains(@class,"sn_ST")]//span[contains(@class,"sn_tm")]//text()')
|
||||||
|
if publishedDateXPath != None:
|
||||||
|
publishedDate = escape(' '.join(publishedDateXPath))
|
||||||
|
|
||||||
if re.match("^[0-9]+ minute(s|) ago$", publishedDate):
|
if re.match("^[0-9]+ minute(s|) ago$", publishedDate):
|
||||||
timeNumbers = re.findall(r'\d+', publishedDate)
|
timeNumbers = re.findall(r'\d+', publishedDate)
|
||||||
|
@ -74,8 +78,17 @@ def response(resp):
|
||||||
publishedDate = datetime.now()\
|
publishedDate = datetime.now()\
|
||||||
- timedelta(hours=int(timeNumbers[0]))\
|
- timedelta(hours=int(timeNumbers[0]))\
|
||||||
- timedelta(minutes=int(timeNumbers[1]))
|
- timedelta(minutes=int(timeNumbers[1]))
|
||||||
|
elif re.match("^[0-9]+ day(s|) ago$", publishedDate):
|
||||||
|
timeNumbers = re.findall(r'\d+', publishedDate)
|
||||||
|
publishedDate = datetime.now()\
|
||||||
|
- timedelta(days=int(timeNumbers[0]))
|
||||||
else:
|
else:
|
||||||
publishedDate = parser.parse(publishedDate)
|
try:
|
||||||
|
# FIXME use params['language'] to parse either mm/dd or dd/mm
|
||||||
|
publishedDate = parser.parse(publishedDate, dayfirst=False)
|
||||||
|
except TypeError:
|
||||||
|
# FIXME
|
||||||
|
publishedDate = datetime.now()
|
||||||
|
|
||||||
# append result
|
# append result
|
||||||
results.append({'url': url,
|
results.append({'url': url,
|
||||||
|
|
|
@ -16,8 +16,8 @@ from lxml import html
|
||||||
|
|
||||||
# engine dependent config
|
# engine dependent config
|
||||||
categories = ['videos']
|
categories = ['videos']
|
||||||
locale = 'en_US'
|
|
||||||
paging = True
|
paging = True
|
||||||
|
language_support = True
|
||||||
|
|
||||||
# search-url
|
# search-url
|
||||||
# see http://www.dailymotion.com/doc/api/obj-video.html
|
# see http://www.dailymotion.com/doc/api/obj-video.html
|
||||||
|
@ -26,6 +26,11 @@ search_url = 'https://api.dailymotion.com/videos?fields=title,description,durati
|
||||||
|
|
||||||
# do search-request
|
# do search-request
|
||||||
def request(query, params):
|
def request(query, params):
|
||||||
|
if params['language'] == 'all':
|
||||||
|
locale = 'en-US'
|
||||||
|
else:
|
||||||
|
locale = params['language']
|
||||||
|
|
||||||
params['url'] = search_url.format(
|
params['url'] = search_url.format(
|
||||||
query=urlencode({'search': query, 'localization': locale}),
|
query=urlencode({'search': query, 'localization': locale}),
|
||||||
pageno=params['pageno'])
|
pageno=params['pageno'])
|
||||||
|
|
|
@ -40,9 +40,11 @@ def parse_url(url_string):
|
||||||
if endpos > -1:
|
if endpos > -1:
|
||||||
endpositions.append(endpos)
|
endpositions.append(endpos)
|
||||||
|
|
||||||
end = min(endpositions)
|
if start==0 or len(endpositions) == 0:
|
||||||
|
return url_string
|
||||||
return unquote(url_string[start:end])
|
else:
|
||||||
|
end = min(endpositions)
|
||||||
|
return unquote(url_string[start:end])
|
||||||
|
|
||||||
|
|
||||||
# do search-request
|
# do search-request
|
||||||
|
|
Loading…
Reference in a new issue