autocomplete.py 7.4 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280
  1. # SPDX-License-Identifier: AGPL-3.0-or-later
  2. """This module implements functions needed for the autocompleter.
  3. """
  4. # pylint: disable=use-dict-literal
  5. import json
  6. import html
  7. from urllib.parse import urlencode, quote_plus
  8. import lxml
  9. from httpx import HTTPError
  10. from searx import settings
  11. from searx.engines import (
  12. engines,
  13. google,
  14. )
  15. from searx.network import get as http_get, post as http_post
  16. from searx.exceptions import SearxEngineResponseException
  17. def update_kwargs(**kwargs):
  18. if 'timeout' not in kwargs:
  19. kwargs['timeout'] = settings['outgoing']['request_timeout']
  20. kwargs['raise_for_httperror'] = True
  21. def get(*args, **kwargs):
  22. update_kwargs(**kwargs)
  23. return http_get(*args, **kwargs)
  24. def post(*args, **kwargs):
  25. update_kwargs(**kwargs)
  26. return http_post(*args, **kwargs)
  27. def baidu(query, _lang):
  28. # baidu search autocompleter
  29. base_url = "https://www.baidu.com/sugrec?"
  30. response = get(base_url + urlencode({'ie': 'utf-8', 'json': 1, 'prod': 'pc', 'wd': query}))
  31. results = []
  32. if response.ok:
  33. data = response.json()
  34. if 'g' in data:
  35. for item in data['g']:
  36. results.append(item['q'])
  37. return results
  38. def brave(query, _lang):
  39. # brave search autocompleter
  40. url = 'https://search.brave.com/api/suggest?'
  41. url += urlencode({'q': query})
  42. country = 'all'
  43. # if lang in _brave:
  44. # country = lang
  45. kwargs = {'cookies': {'country': country}}
  46. resp = get(url, **kwargs)
  47. results = []
  48. if resp.ok:
  49. data = resp.json()
  50. for item in data[1]:
  51. results.append(item)
  52. return results
  53. def dbpedia(query, _lang):
  54. # dbpedia autocompleter, no HTTPS
  55. autocomplete_url = 'https://lookup.dbpedia.org/api/search.asmx/KeywordSearch?'
  56. response = get(autocomplete_url + urlencode(dict(QueryString=query)))
  57. results = []
  58. if response.ok:
  59. dom = lxml.etree.fromstring(response.content)
  60. results = dom.xpath('//Result/Label//text()')
  61. return results
  62. def duckduckgo(query, sxng_locale):
  63. """Autocomplete from DuckDuckGo. Supports DuckDuckGo's languages"""
  64. traits = engines['duckduckgo'].traits
  65. args = {
  66. 'q': query,
  67. 'kl': traits.get_region(sxng_locale, traits.all_locale),
  68. }
  69. url = 'https://duckduckgo.com/ac/?type=list&' + urlencode(args)
  70. resp = get(url)
  71. ret_val = []
  72. if resp.ok:
  73. j = resp.json()
  74. if len(j) > 1:
  75. ret_val = j[1]
  76. return ret_val
  77. def google_complete(query, sxng_locale):
  78. """Autocomplete from Google. Supports Google's languages and subdomains
  79. (:py:obj:`searx.engines.google.get_google_info`) by using the async REST
  80. API::
  81. https://{subdomain}/complete/search?{args}
  82. """
  83. google_info = google.get_google_info({'searxng_locale': sxng_locale}, engines['google'].traits)
  84. url = 'https://{subdomain}/complete/search?{args}'
  85. args = urlencode(
  86. {
  87. 'q': query,
  88. 'client': 'gws-wiz',
  89. 'hl': google_info['params']['hl'],
  90. }
  91. )
  92. results = []
  93. resp = get(url.format(subdomain=google_info['subdomain'], args=args))
  94. if resp.ok:
  95. json_txt = resp.text[resp.text.find('[') : resp.text.find(']', -3) + 1]
  96. data = json.loads(json_txt)
  97. for item in data[0]:
  98. results.append(lxml.html.fromstring(item[0]).text_content())
  99. return results
  100. def mwmbl(query, _lang):
  101. """Autocomplete from Mwmbl_."""
  102. # mwmbl autocompleter
  103. url = 'https://api.mwmbl.org/search/complete?{query}'
  104. results = get(url.format(query=urlencode({'q': query}))).json()[1]
  105. # results starting with `go:` are direct urls and not useful for auto completion
  106. return [result for result in results if not result.startswith("go: ") and not result.startswith("search: ")]
  107. def seznam(query, _lang):
  108. # seznam search autocompleter
  109. url = 'https://suggest.seznam.cz/fulltext/cs?{query}'
  110. resp = get(
  111. url.format(
  112. query=urlencode(
  113. {'phrase': query, 'cursorPosition': len(query), 'format': 'json-2', 'highlight': '1', 'count': '6'}
  114. )
  115. )
  116. )
  117. if not resp.ok:
  118. return []
  119. data = resp.json()
  120. return [
  121. ''.join([part.get('text', '') for part in item.get('text', [])])
  122. for item in data.get('result', [])
  123. if item.get('itemType', None) == 'ItemType.TEXT'
  124. ]
  125. def stract(query, _lang):
  126. # stract autocompleter (beta)
  127. url = f"https://stract.com/beta/api/autosuggest?q={quote_plus(query)}"
  128. resp = post(url)
  129. if not resp.ok:
  130. return []
  131. return [html.unescape(suggestion['raw']) for suggestion in resp.json()]
  132. def startpage(query, sxng_locale):
  133. """Autocomplete from Startpage. Supports Startpage's languages"""
  134. lui = engines['startpage'].traits.get_language(sxng_locale, 'english')
  135. url = 'https://startpage.com/suggestions?{query}'
  136. resp = get(url.format(query=urlencode({'q': query, 'segment': 'startpage.udog', 'lui': lui})))
  137. data = resp.json()
  138. return [e['text'] for e in data.get('suggestions', []) if 'text' in e]
  139. def swisscows(query, _lang):
  140. # swisscows autocompleter
  141. url = 'https://swisscows.ch/api/suggest?{query}&itemsCount=5'
  142. resp = json.loads(get(url.format(query=urlencode({'query': query}))).text)
  143. return resp
  144. def qwant(query, sxng_locale):
  145. """Autocomplete from Qwant. Supports Qwant's regions."""
  146. results = []
  147. locale = engines['qwant'].traits.get_region(sxng_locale, 'en_US')
  148. url = 'https://api.qwant.com/v3/suggest?{query}'
  149. resp = get(url.format(query=urlencode({'q': query, 'locale': locale, 'version': '2'})))
  150. if resp.ok:
  151. data = resp.json()
  152. if data['status'] == 'success':
  153. for item in data['data']['items']:
  154. results.append(item['value'])
  155. return results
  156. def wikipedia(query, sxng_locale):
  157. """Autocomplete from Wikipedia. Supports Wikipedia's languages (aka netloc)."""
  158. results = []
  159. eng_traits = engines['wikipedia'].traits
  160. wiki_lang = eng_traits.get_language(sxng_locale, 'en')
  161. wiki_netloc = eng_traits.custom['wiki_netloc'].get(wiki_lang, 'en.wikipedia.org')
  162. url = 'https://{wiki_netloc}/w/api.php?{args}'
  163. args = urlencode(
  164. {
  165. 'action': 'opensearch',
  166. 'format': 'json',
  167. 'formatversion': '2',
  168. 'search': query,
  169. 'namespace': '0',
  170. 'limit': '10',
  171. }
  172. )
  173. resp = get(url.format(args=args, wiki_netloc=wiki_netloc))
  174. if resp.ok:
  175. data = resp.json()
  176. if len(data) > 1:
  177. results = data[1]
  178. return results
  179. def yandex(query, _lang):
  180. # yandex autocompleter
  181. url = "https://suggest.yandex.com/suggest-ff.cgi?{0}"
  182. resp = json.loads(get(url.format(urlencode(dict(part=query)))).text)
  183. if len(resp) > 1:
  184. return resp[1]
  185. return []
  186. backends = {
  187. 'baidu': baidu,
  188. 'brave': brave,
  189. 'dbpedia': dbpedia,
  190. 'duckduckgo': duckduckgo,
  191. 'google': google_complete,
  192. 'mwmbl': mwmbl,
  193. 'qwant': qwant,
  194. 'seznam': seznam,
  195. 'startpage': startpage,
  196. 'stract': stract,
  197. 'swisscows': swisscows,
  198. 'wikipedia': wikipedia,
  199. 'yandex': yandex,
  200. }
  201. def search_autocomplete(backend_name, query, sxng_locale):
  202. backend = backends.get(backend_name)
  203. if backend is None:
  204. return []
  205. try:
  206. return backend(query, sxng_locale)
  207. except (HTTPError, SearxEngineResponseException):
  208. return []