DarkAI.py 2.7 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182
  1. from __future__ import annotations
  2. import json
  3. from aiohttp import ClientSession
  4. from ..typing import AsyncResult, Messages
  5. from .base_provider import AsyncGeneratorProvider, ProviderModelMixin
  6. from .helper import format_prompt
  7. class DarkAI(AsyncGeneratorProvider, ProviderModelMixin):
  8. url = "https://darkai.foundation/chat"
  9. api_endpoint = "https://darkai.foundation/chat"
  10. working = True
  11. supports_stream = True
  12. supports_system_message = True
  13. supports_message_history = True
  14. default_model = 'llama-3-70b'
  15. models = [
  16. 'gpt-4o', # Uncensored
  17. 'gpt-3.5-turbo', # Uncensored
  18. default_model,
  19. ]
  20. model_aliases = {
  21. "llama-3.1-70b": "llama-3-70b",
  22. }
  23. @classmethod
  24. def get_model(cls, model: str) -> str:
  25. if model in cls.models:
  26. return model
  27. elif model in cls.model_aliases:
  28. return cls.model_aliases[model]
  29. else:
  30. return cls.default_model
  31. @classmethod
  32. async def create_async_generator(
  33. cls,
  34. model: str,
  35. messages: Messages,
  36. proxy: str = None,
  37. **kwargs
  38. ) -> AsyncResult:
  39. model = cls.get_model(model)
  40. headers = {
  41. "accept": "text/event-stream",
  42. "content-type": "application/json",
  43. "user-agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/127.0.0.0 Safari/537.36"
  44. }
  45. async with ClientSession(headers=headers) as session:
  46. prompt = format_prompt(messages)
  47. data = {
  48. "query": prompt,
  49. "model": model,
  50. }
  51. async with session.post(cls.api_endpoint, json=data, proxy=proxy) as response:
  52. response.raise_for_status()
  53. full_text = ""
  54. async for chunk in response.content:
  55. if chunk:
  56. try:
  57. chunk_str = chunk.decode().strip()
  58. if chunk_str.startswith('data: '):
  59. chunk_data = json.loads(chunk_str[6:])
  60. if chunk_data['event'] == 'text-chunk':
  61. full_text += chunk_data['data']['text']
  62. elif chunk_data['event'] == 'stream-end':
  63. if full_text:
  64. yield full_text.strip()
  65. return
  66. except json.JSONDecodeError:
  67. pass
  68. except Exception:
  69. pass
  70. if full_text:
  71. yield full_text.strip()