online.py 8.6 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234
  1. # SPDX-License-Identifier: AGPL-3.0-or-later
  2. """Processors for engine-type: ``online``
  3. """
  4. # pylint: disable=use-dict-literal
  5. from timeit import default_timer
  6. import asyncio
  7. import ssl
  8. import httpx
  9. import searx.network
  10. from searx.utils import gen_useragent
  11. from searx.exceptions import (
  12. SearxEngineAccessDeniedException,
  13. SearxEngineCaptchaException,
  14. SearxEngineTooManyRequestsException,
  15. )
  16. from searx.metrics.error_recorder import count_error
  17. from .abstract import EngineProcessor
  18. def default_request_params():
  19. """Default request parameters for ``online`` engines."""
  20. return {
  21. # fmt: off
  22. 'method': 'GET',
  23. 'headers': {},
  24. 'data': {},
  25. 'url': '',
  26. 'cookies': {},
  27. 'auth': None
  28. # fmt: on
  29. }
  30. class OnlineProcessor(EngineProcessor):
  31. """Processor class for ``online`` engines."""
  32. engine_type = 'online'
  33. def initialize(self):
  34. # set timeout for all HTTP requests
  35. searx.network.set_timeout_for_thread(self.engine.timeout, start_time=default_timer())
  36. # reset the HTTP total time
  37. searx.network.reset_time_for_thread()
  38. # set the network
  39. searx.network.set_context_network_name(self.engine_name)
  40. super().initialize()
  41. def get_params(self, search_query, engine_category):
  42. """Returns a set of :ref:`request params <engine request online>` or ``None``
  43. if request is not supported.
  44. """
  45. params = super().get_params(search_query, engine_category)
  46. if params is None:
  47. return None
  48. # add default params
  49. params.update(default_request_params())
  50. # add an user agent
  51. params['headers']['User-Agent'] = gen_useragent()
  52. # add Accept-Language header
  53. if self.engine.send_accept_language_header and search_query.locale:
  54. ac_lang = search_query.locale.language
  55. if search_query.locale.territory:
  56. ac_lang = "%s-%s,%s;q=0.9,*;q=0.5" % (
  57. search_query.locale.language,
  58. search_query.locale.territory,
  59. search_query.locale.language,
  60. )
  61. params['headers']['Accept-Language'] = ac_lang
  62. self.logger.debug('HTTP Accept-Language: %s', params['headers'].get('Accept-Language', ''))
  63. return params
  64. def _send_http_request(self, params):
  65. # create dictionary which contain all
  66. # information about the request
  67. request_args = dict(headers=params['headers'], cookies=params['cookies'], auth=params['auth'])
  68. # verify
  69. # if not None, it overrides the verify value defined in the network.
  70. # use False to accept any server certificate
  71. # use a path to file to specify a server certificate
  72. verify = params.get('verify')
  73. if verify is not None:
  74. request_args['verify'] = params['verify']
  75. # max_redirects
  76. max_redirects = params.get('max_redirects')
  77. if max_redirects:
  78. request_args['max_redirects'] = max_redirects
  79. # allow_redirects
  80. if 'allow_redirects' in params:
  81. request_args['allow_redirects'] = params['allow_redirects']
  82. # soft_max_redirects
  83. soft_max_redirects = params.get('soft_max_redirects', max_redirects or 0)
  84. # raise_for_status
  85. request_args['raise_for_httperror'] = params.get('raise_for_httperror', True)
  86. # specific type of request (GET or POST)
  87. if params['method'] == 'GET':
  88. req = searx.network.get
  89. else:
  90. req = searx.network.post
  91. request_args['data'] = params['data']
  92. # send the request
  93. response = req(params['url'], **request_args)
  94. # check soft limit of the redirect count
  95. if len(response.history) > soft_max_redirects:
  96. # unexpected redirect : record an error
  97. # but the engine might still return valid results.
  98. status_code = str(response.status_code or '')
  99. reason = response.reason_phrase or ''
  100. hostname = response.url.host
  101. count_error(
  102. self.engine_name,
  103. '{} redirects, maximum: {}'.format(len(response.history), soft_max_redirects),
  104. (status_code, reason, hostname),
  105. secondary=True,
  106. )
  107. return response
  108. def _search_basic(self, query, params):
  109. # update request parameters dependent on
  110. # search-engine (contained in engines folder)
  111. self.engine.request(query, params)
  112. # ignoring empty urls
  113. if not params['url']:
  114. return None
  115. # send request
  116. response = self._send_http_request(params)
  117. # parse the response
  118. response.search_params = params
  119. return self.engine.response(response)
  120. def search(self, query, params, result_container, start_time, timeout_limit):
  121. # set timeout for all HTTP requests
  122. searx.network.set_timeout_for_thread(timeout_limit, start_time=start_time)
  123. # reset the HTTP total time
  124. searx.network.reset_time_for_thread()
  125. # set the network
  126. searx.network.set_context_network_name(self.engine_name)
  127. try:
  128. # send requests and parse the results
  129. search_results = self._search_basic(query, params)
  130. self.extend_container(result_container, start_time, search_results)
  131. except ssl.SSLError as e:
  132. # requests timeout (connect or read)
  133. self.handle_exception(result_container, e, suspend=True)
  134. self.logger.error("SSLError {}, verify={}".format(e, searx.network.get_network(self.engine_name).verify))
  135. except (httpx.TimeoutException, asyncio.TimeoutError) as e:
  136. # requests timeout (connect or read)
  137. self.handle_exception(result_container, e, suspend=True)
  138. self.logger.error(
  139. "HTTP requests timeout (search duration : {0} s, timeout: {1} s) : {2}".format(
  140. default_timer() - start_time, timeout_limit, e.__class__.__name__
  141. )
  142. )
  143. except (httpx.HTTPError, httpx.StreamError) as e:
  144. # other requests exception
  145. self.handle_exception(result_container, e, suspend=True)
  146. self.logger.exception(
  147. "requests exception (search duration : {0} s, timeout: {1} s) : {2}".format(
  148. default_timer() - start_time, timeout_limit, e
  149. )
  150. )
  151. except SearxEngineCaptchaException as e:
  152. self.handle_exception(result_container, e, suspend=True)
  153. self.logger.exception('CAPTCHA')
  154. except SearxEngineTooManyRequestsException as e:
  155. self.handle_exception(result_container, e, suspend=True)
  156. self.logger.exception('Too many requests')
  157. except SearxEngineAccessDeniedException as e:
  158. self.handle_exception(result_container, e, suspend=True)
  159. self.logger.exception('SearXNG is blocked')
  160. except Exception as e: # pylint: disable=broad-except
  161. self.handle_exception(result_container, e)
  162. self.logger.exception('exception : {0}'.format(e))
  163. def get_default_tests(self):
  164. tests = {}
  165. tests['simple'] = {
  166. 'matrix': {'query': ('life', 'computer')},
  167. 'result_container': ['not_empty'],
  168. }
  169. if getattr(self.engine, 'paging', False):
  170. tests['paging'] = {
  171. 'matrix': {'query': 'time', 'pageno': (1, 2, 3)},
  172. 'result_container': ['not_empty'],
  173. 'test': ['unique_results'],
  174. }
  175. if 'general' in self.engine.categories:
  176. # avoid documentation about HTML tags (<time> and <input type="time">)
  177. tests['paging']['matrix']['query'] = 'news'
  178. if getattr(self.engine, 'time_range', False):
  179. tests['time_range'] = {
  180. 'matrix': {'query': 'news', 'time_range': (None, 'day')},
  181. 'result_container': ['not_empty'],
  182. 'test': ['unique_results'],
  183. }
  184. if getattr(self.engine, 'traits', False):
  185. tests['lang_fr'] = {
  186. 'matrix': {'query': 'paris', 'lang': 'fr'},
  187. 'result_container': ['not_empty', ('has_language', 'fr')],
  188. }
  189. tests['lang_en'] = {
  190. 'matrix': {'query': 'paris', 'lang': 'en'},
  191. 'result_container': ['not_empty', ('has_language', 'en')],
  192. }
  193. if getattr(self.engine, 'safesearch', False):
  194. tests['safesearch'] = {'matrix': {'query': 'porn', 'safesearch': (0, 2)}, 'test': ['unique_results']}
  195. return tests