json_engine.py 2.7 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119
  1. from collections import Iterable
  2. from json import loads
  3. from sys import version_info
  4. from searx.url_utils import urlencode
  5. if version_info[0] == 3:
  6. unicode = str
  7. search_url = None
  8. url_query = None
  9. content_query = None
  10. title_query = None
  11. paging = False
  12. suggestion_query = ''
  13. results_query = ''
  14. # parameters for engines with paging support
  15. #
  16. # number of results on each page
  17. # (only needed if the site requires not a page number, but an offset)
  18. page_size = 1
  19. # number of the first page (usually 0 or 1)
  20. first_page_num = 1
  21. def iterate(iterable):
  22. if type(iterable) == dict:
  23. it = iterable.items()
  24. else:
  25. it = enumerate(iterable)
  26. for index, value in it:
  27. yield str(index), value
  28. def is_iterable(obj):
  29. if type(obj) == str:
  30. return False
  31. if type(obj) == unicode:
  32. return False
  33. return isinstance(obj, Iterable)
  34. def parse(query):
  35. q = []
  36. for part in query.split('/'):
  37. if part == '':
  38. continue
  39. else:
  40. q.append(part)
  41. return q
  42. def do_query(data, q):
  43. ret = []
  44. if not q:
  45. return ret
  46. qkey = q[0]
  47. for key, value in iterate(data):
  48. if len(q) == 1:
  49. if key == qkey:
  50. ret.append(value)
  51. elif is_iterable(value):
  52. ret.extend(do_query(value, q))
  53. else:
  54. if not is_iterable(value):
  55. continue
  56. if key == qkey:
  57. ret.extend(do_query(value, q[1:]))
  58. else:
  59. ret.extend(do_query(value, q))
  60. return ret
  61. def query(data, query_string):
  62. q = parse(query_string)
  63. return do_query(data, q)
  64. def request(query, params):
  65. query = urlencode({'q': query})[2:]
  66. fp = {'query': query}
  67. if paging and search_url.find('{pageno}') >= 0:
  68. fp['pageno'] = (params['pageno'] - 1) * page_size + first_page_num
  69. params['url'] = search_url.format(**fp)
  70. params['query'] = query
  71. return params
  72. def response(resp):
  73. results = []
  74. json = loads(resp.text)
  75. if results_query:
  76. for result in query(json, results_query)[0]:
  77. url = query(result, url_query)[0]
  78. title = query(result, title_query)[0]
  79. content = query(result, content_query)[0]
  80. results.append({'url': url, 'title': title, 'content': content})
  81. else:
  82. for url, title, content in zip(
  83. query(json, url_query),
  84. query(json, title_query),
  85. query(json, content_query)
  86. ):
  87. results.append({'url': url, 'title': title, 'content': content})
  88. if not suggestion_query:
  89. return results
  90. for suggestion in query(json, suggestion_query):
  91. results.append({'suggestion': suggestion})
  92. return results