etools.py 1.3 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657
  1. # SPDX-License-Identifier: AGPL-3.0-or-later
  2. """
  3. eTools (Web)
  4. """
  5. from lxml import html
  6. from urllib.parse import quote
  7. from searx.utils import extract_text, eval_xpath
  8. # about
  9. about = {
  10. "website": 'https://www.etools.ch',
  11. "wikidata_id": None,
  12. "official_api_documentation": None,
  13. "use_official_api": False,
  14. "require_api_key": False,
  15. "results": 'HTML',
  16. }
  17. categories = ['general']
  18. paging = False
  19. safesearch = True
  20. base_url = 'https://www.etools.ch'
  21. search_path = '/searchAdvancedSubmit.do'\
  22. '?query={search_term}'\
  23. '&pageResults=20'\
  24. '&safeSearch={safesearch}'
  25. def request(query, params):
  26. if params['safesearch']:
  27. safesearch = 'true'
  28. else:
  29. safesearch = 'false'
  30. params['url'] = base_url + search_path.format(search_term=quote(query), safesearch=safesearch)
  31. return params
  32. def response(resp):
  33. results = []
  34. dom = html.fromstring(resp.text)
  35. for result in eval_xpath(dom, '//table[@class="result"]//td[@class="record"]'):
  36. url = eval_xpath(result, './a/@href')[0]
  37. title = extract_text(eval_xpath(result, './a//text()'))
  38. content = extract_text(eval_xpath(result, './/div[@class="text"]//text()'))
  39. results.append({'url': url,
  40. 'title': title,
  41. 'content': content})
  42. return results