From 599c1137bbc2927c7f0d387ccd65daa2a30a6bf4 Mon Sep 17 00:00:00 2001
From: dalf <alex@al-f.net>
Date: Sat, 11 Oct 2014 12:46:12 +0200
Subject: [PATCH 1/3] [fix] the bang was included in the search string

---
 searx/search.py | 10 +++++-----
 1 file changed, 5 insertions(+), 5 deletions(-)

diff --git a/searx/search.py b/searx/search.py
index 0aa9d500a..064c68844 100644
--- a/searx/search.py
+++ b/searx/search.py
@@ -311,9 +311,6 @@ class Search(object):
         if not self.request_data.get('q'):
             raise Exception('noquery')
 
-        # set query
-        self.query = self.request_data['q']
-
         # set pagenumber
         pageno_param = self.request_data.get('pageno', '1')
         if not pageno_param.isdigit() or int(pageno_param) < 1:
@@ -322,8 +319,11 @@ class Search(object):
         self.pageno = int(pageno_param)
 
         # parse query, if tags are set, which change the serch engine or search-language
-        query_obj = Query(self.query, self.blocked_engines)
-        query_obj.parse_query()        
+        query_obj = Query(self.request_data['q'], self.blocked_engines)
+        query_obj.parse_query()
+
+        # set query
+        self.query = query_obj.getSearchQuery()
 
         # get last selected language in query, if possible
         # TODO support search with multible languages

From 295b1699ced9b79f3b6e5e4375460ca6ddb64431 Mon Sep 17 00:00:00 2001
From: dalf <alex@al-f.net>
Date: Sat, 11 Oct 2014 12:47:30 +0200
Subject: [PATCH 2/3] [mod] return only one result from the wikidata engine

---
 searx/engines/wikidata.py | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/searx/engines/wikidata.py b/searx/engines/wikidata.py
index 8c8e7f219..9ba5fcd0b 100644
--- a/searx/engines/wikidata.py
+++ b/searx/engines/wikidata.py
@@ -2,7 +2,7 @@ import json
 from requests import get
 from urllib import urlencode
 
-resultCount=2
+resultCount=1
 urlSearch = 'https://www.wikidata.org/w/api.php?action=query&list=search&format=json&srnamespace=0&srprop=sectiontitle&{query}'
 urlDetail = 'https://www.wikidata.org/w/api.php?action=wbgetentities&format=json&props=labels%7Cinfo%7Csitelinks%7Csitelinks%2Furls%7Cdescriptions%7Cclaims&{query}'
 urlMap = 'https://www.openstreetmap.org/?lat={latitude}&lon={longitude}&zoom={zoom}&layers=M'

From cac1761a54d4d72f9000e40cc04f05da3d78b7da Mon Sep 17 00:00:00 2001
From: dalf <alex@al-f.net>
Date: Sat, 11 Oct 2014 15:49:50 +0200
Subject: [PATCH 3/3] [enh] infoboxes : if the result doesn't contain anything
 except one link, use the normal result template

---
 searx/engines/duckduckgo_definitions.py | 27 ++++++-----
 searx/engines/wikidata.py               | 60 +++++++++++++++++--------
 2 files changed, 58 insertions(+), 29 deletions(-)

diff --git a/searx/engines/duckduckgo_definitions.py b/searx/engines/duckduckgo_definitions.py
index 3da7352a4..c008f22f7 100644
--- a/searx/engines/duckduckgo_definitions.py
+++ b/searx/engines/duckduckgo_definitions.py
@@ -116,15 +116,22 @@ def response(resp):
 
     if len(heading)>0:
         # TODO get infobox.meta.value where .label='article_title'
-        results.append({
-               'infobox': heading,
-               'id': infobox_id,
-               'entity': entity,
-               'content': content,
-               'img_src' : image,
-               'attributes': attributes,
-               'urls': urls,
-               'relatedTopics': relatedTopics
-               })
+        if image==None and len(attributes)==0 and len(urls)==1 and len(relatedTopics)==0 and len(content)==0:
+            results.append({
+                    'url': urls[0]['url'],
+                    'title': heading,
+                    'content': content
+                    })
+        else:
+            results.append({
+                    'infobox': heading,
+                    'id': infobox_id,
+                    'entity': entity,
+                    'content': content,
+                    'img_src' : image,
+                    'attributes': attributes,
+                    'urls': urls,
+                    'relatedTopics': relatedTopics
+                    })
 
     return results
diff --git a/searx/engines/wikidata.py b/searx/engines/wikidata.py
index 9ba5fcd0b..761f8f73c 100644
--- a/searx/engines/wikidata.py
+++ b/searx/engines/wikidata.py
@@ -33,17 +33,20 @@ def response(resp):
     return results
 
 def getDetail(jsonresponse, wikidata_id, language):
-    result = jsonresponse.get('entities', {}).get(wikidata_id, {})
-
-    title = result.get('labels', {}).get(language, {}).get('value', None)
-    if title == None:
-        title = result.get('labels', {}).get('en', {}).get('value', wikidata_id)
     results = []
     urls = []
     attributes = []
 
-    description = result.get('descriptions', {}).get(language, {}).get('value', '')
-    if description == '':
+    result = jsonresponse.get('entities', {}).get(wikidata_id, {})
+
+    title = result.get('labels', {}).get(language, {}).get('value', None)
+    if title == None:
+        title = result.get('labels', {}).get('en', {}).get('value', None)
+    if title == None:
+        return results
+
+    description = result.get('descriptions', {}).get(language, {}).get('value', None)
+    if description == None:
         description = result.get('descriptions', {}).get('en', {}).get('value', '')
 
     claims = result.get('claims', {})
@@ -52,11 +55,16 @@ def getDetail(jsonresponse, wikidata_id, language):
         urls.append({ 'title' : 'Official site', 'url': official_website })
         results.append({ 'title': title, 'url' : official_website })
 
+    wikipedia_link_count = 0
     if language != 'en':
-        add_url(urls, 'Wikipedia (' + language + ')', get_wikilink(result, language + 'wiki'))
+        wikipedia_link_count += add_url(urls, 'Wikipedia (' + language + ')', get_wikilink(result, language + 'wiki'))
     wikipedia_en_link = get_wikilink(result, 'enwiki')
-    add_url(urls, 'Wikipedia (en)', wikipedia_en_link)
-
+    wikipedia_link_count += add_url(urls, 'Wikipedia (en)', wikipedia_en_link)
+    if wikipedia_link_count == 0:
+        misc_language = get_wiki_firstlanguage(result, 'wiki')
+        if misc_language != None:
+            add_url(urls, 'Wikipedia (' + misc_language + ')', get_wikilink(result, misc_language + 'wiki'))
+        
     if language != 'en':
         add_url(urls, 'Wiki voyage (' + language + ')', get_wikilink(result, language + 'wikivoyage'))
     add_url(urls, 'Wiki voyage (en)', get_wikilink(result, 'enwikivoyage'))
@@ -105,14 +113,20 @@ def getDetail(jsonresponse, wikidata_id, language):
     if date_of_death != None:
         attributes.append({'label' : 'Date of death', 'value' : date_of_death})
 
-
-    results.append({
-            'infobox' : title,
-            'id' : wikipedia_en_link,
-            'content' : description,
-            'attributes' : attributes,
-            'urls' : urls
-            })
+    if len(attributes)==0 and len(urls)==2 and len(description)==0:
+        results.append({
+                'url': urls[0]['url'],
+                'title': title,
+                'content': description
+                })
+    else:
+        results.append({
+                'infobox' : title,
+                'id' : wikipedia_en_link,
+                'content' : description,
+                'attributes' : attributes,
+                'urls' : urls
+                })
 
     return results
 
@@ -120,7 +134,9 @@ def getDetail(jsonresponse, wikidata_id, language):
 def add_url(urls, title, url):
     if url != None:
         urls.append({'title' : title, 'url' : url})
-
+        return 1
+    else:
+        return 0
 
 def get_mainsnak(claims, propertyName):
     propValue = claims.get(propertyName, {})
@@ -213,3 +229,9 @@ def get_wikilink(result, wikiid):
     elif url.startswith('//'):
         url = 'https:' + url
     return url
+
+def get_wiki_firstlanguage(result, wikipatternid):
+    for k in result.get('sitelinks', {}).keys():
+        if k.endswith(wikipatternid) and len(k)==(2+len(wikipatternid)):
+            return k[0:2]
+    return None