DDG.py 6.3 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134
  1. from __future__ import annotations
  2. import json
  3. import aiohttp
  4. from aiohttp import ClientSession, BaseConnector
  5. from ..typing import AsyncResult, Messages
  6. from .base_provider import AsyncGeneratorProvider, ProviderModelMixin, BaseConversation
  7. from .helper import format_prompt
  8. from ..requests.aiohttp import get_connector
  9. from ..requests.raise_for_status import raise_for_status
  10. from .. import debug
  11. MODELS = [
  12. {"model":"gpt-4o","modelName":"GPT-4o","modelVariant":None,"modelStyleId":"gpt-4o-mini","createdBy":"OpenAI","moderationLevel":"HIGH","isAvailable":1,"inputCharLimit":16e3,"settingId":"4"},
  13. {"model":"gpt-4o-mini","modelName":"GPT-4o","modelVariant":"mini","modelStyleId":"gpt-4o-mini","createdBy":"OpenAI","moderationLevel":"HIGH","isAvailable":0,"inputCharLimit":16e3,"settingId":"3"},
  14. {"model":"claude-3-5-sonnet-20240620","modelName":"Claude 3.5","modelVariant":"Sonnet","modelStyleId":"claude-3-haiku","createdBy":"Anthropic","moderationLevel":"HIGH","isAvailable":1,"inputCharLimit":16e3,"settingId":"7"},
  15. {"model":"claude-3-opus-20240229","modelName":"Claude 3","modelVariant":"Opus","modelStyleId":"claude-3-haiku","createdBy":"Anthropic","moderationLevel":"HIGH","isAvailable":1,"inputCharLimit":16e3,"settingId":"2"},
  16. {"model":"claude-3-haiku-20240307","modelName":"Claude 3","modelVariant":"Haiku","modelStyleId":"claude-3-haiku","createdBy":"Anthropic","moderationLevel":"HIGH","isAvailable":0,"inputCharLimit":16e3,"settingId":"1"},
  17. {"model":"meta-llama/Meta-Llama-3.1-70B-Instruct-Turbo","modelName":"Llama 3.1","modelVariant":"70B","modelStyleId":"llama-3","createdBy":"Meta","moderationLevel":"MEDIUM","isAvailable":0,"isOpenSource":0,"inputCharLimit":16e3,"settingId":"5"},
  18. {"model":"mistralai/Mixtral-8x7B-Instruct-v0.1","modelName":"Mixtral","modelVariant":"8x7B","modelStyleId":"mixtral","createdBy":"Mistral AI","moderationLevel":"LOW","isAvailable":0,"isOpenSource":0,"inputCharLimit":16e3,"settingId":"6"},
  19. {"model":"Qwen/Qwen2.5-Coder-32B-Instruct","modelName":"Qwen 2.5 Coder","modelVariant":"32B","modelStyleId":"qwen","createdBy":"Alibaba Cloud","moderationLevel":"LOW","isAvailable":0,"isOpenSource":1,"inputCharLimit":16e3,"settingId":"90"}
  20. ]
  21. class Conversation(BaseConversation):
  22. vqd: str = None
  23. message_history: Messages = []
  24. def __init__(self, model: str):
  25. self.model = model
  26. class DDG(AsyncGeneratorProvider, ProviderModelMixin):
  27. label = "DuckDuckGo AI Chat"
  28. url = "https://duckduckgo.com/aichat"
  29. api_endpoint = "https://duckduckgo.com/duckchat/v1/chat"
  30. working = True
  31. supports_stream = True
  32. supports_system_message = True
  33. supports_message_history = True
  34. default_model = "gpt-4o-mini"
  35. models = [model.get("model") for model in MODELS]
  36. model_aliases = {
  37. "claude-3-haiku": "claude-3-haiku-20240307",
  38. "llama-3.1-70b": "meta-llama/Meta-Llama-3.1-70B-Instruct-Turbo",
  39. "mixtral-8x7b": "mistralai/Mixtral-8x7B-Instruct-v0.1",
  40. "gpt-4": "gpt-4o-mini",
  41. }
  42. @classmethod
  43. async def get_vqd(cls, proxy: str, connector: BaseConnector = None):
  44. status_url = "https://duckduckgo.com/duckchat/v1/status"
  45. headers = {
  46. 'User-Agent': 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/128.0.0.0 Safari/537.36',
  47. 'Accept': 'text/event-stream',
  48. 'x-vqd-accept': '1'
  49. }
  50. async with aiohttp.ClientSession(connector=get_connector(connector, proxy)) as session:
  51. async with session.get(status_url, headers=headers) as response:
  52. await raise_for_status(response)
  53. return response.headers.get("x-vqd-4")
  54. @classmethod
  55. async def create_async_generator(
  56. cls,
  57. model: str,
  58. messages: Messages,
  59. conversation: Conversation = None,
  60. return_conversation: bool = False,
  61. proxy: str = None,
  62. connector: BaseConnector = None,
  63. **kwargs
  64. ) -> AsyncResult:
  65. model = cls.get_model(model)
  66. is_new_conversation = False
  67. if conversation is None:
  68. conversation = Conversation(model)
  69. is_new_conversation = True
  70. if conversation.vqd is None:
  71. conversation.vqd = await cls.get_vqd(proxy, connector)
  72. if not conversation.vqd:
  73. raise Exception("Failed to obtain VQD token")
  74. headers = {
  75. 'accept': 'text/event-stream',
  76. 'content-type': 'application/json',
  77. 'user-agent': 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/128.0.0.0 Safari/537.36',
  78. 'x-vqd-4': conversation.vqd,
  79. }
  80. async with ClientSession(headers=headers, connector=get_connector(connector, proxy)) as session:
  81. if is_new_conversation:
  82. conversation.message_history = [{"role": "user", "content": format_prompt(messages)}]
  83. else:
  84. if len(messages) >= 2:
  85. conversation.message_history = [
  86. *conversation.message_history,
  87. messages[-2],
  88. messages[-1]
  89. ]
  90. elif len(messages) == 1:
  91. conversation.message_history = [
  92. *conversation.message_history,
  93. messages[-1]
  94. ]
  95. if return_conversation:
  96. yield conversation
  97. data = {
  98. "model": conversation.model,
  99. "messages": conversation.message_history
  100. }
  101. async with session.post(cls.api_endpoint, json=data) as response:
  102. conversation.vqd = response.headers.get("x-vqd-4")
  103. await raise_for_status(response)
  104. async for line in response.content:
  105. if line:
  106. decoded_line = line.decode('utf-8')
  107. if decoded_line.startswith('data: '):
  108. json_str = decoded_line[6:]
  109. if json_str == '[DONE]':
  110. break
  111. try:
  112. json_data = json.loads(json_str)
  113. if 'message' in json_data:
  114. yield json_data['message']
  115. except json.JSONDecodeError:
  116. pass