GizAI.py 2.8 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879
  1. from __future__ import annotations
  2. from aiohttp import ClientSession
  3. from ..typing import AsyncResult, Messages
  4. from .base_provider import AsyncGeneratorProvider, ProviderModelMixin
  5. from .helper import format_prompt
  6. class GizAI(AsyncGeneratorProvider, ProviderModelMixin):
  7. url = "https://app.giz.ai/assistant"
  8. api_endpoint = "https://app.giz.ai/api/data/users/inferenceServer.infer"
  9. working = True
  10. supports_stream = False
  11. supports_system_message = True
  12. supports_message_history = True
  13. default_model = 'chat-gemini-flash'
  14. models = [default_model]
  15. model_aliases = {"gemini-1.5-flash": "chat-gemini-flash",}
  16. @classmethod
  17. def get_model(cls, model: str) -> str:
  18. if model in cls.models:
  19. return model
  20. elif model in cls.model_aliases:
  21. return cls.model_aliases[model]
  22. else:
  23. return cls.default_model
  24. @classmethod
  25. async def create_async_generator(
  26. cls,
  27. model: str,
  28. messages: Messages,
  29. proxy: str = None,
  30. **kwargs
  31. ) -> AsyncResult:
  32. model = cls.get_model(model)
  33. headers = {
  34. 'Accept': 'application/json, text/plain, */*',
  35. 'Accept-Language': 'en-US,en;q=0.9',
  36. 'Cache-Control': 'no-cache',
  37. 'Connection': 'keep-alive',
  38. 'Content-Type': 'application/json',
  39. 'DNT': '1',
  40. 'Origin': 'https://app.giz.ai',
  41. 'Pragma': 'no-cache',
  42. 'Sec-Fetch-Dest': 'empty',
  43. 'Sec-Fetch-Mode': 'cors',
  44. 'Sec-Fetch-Site': 'same-origin',
  45. 'User-Agent': 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/130.0.0.0 Safari/537.36',
  46. 'sec-ch-ua': '"Not?A_Brand";v="99", "Chromium";v="130"',
  47. 'sec-ch-ua-mobile': '?0',
  48. 'sec-ch-ua-platform': '"Linux"'
  49. }
  50. async with ClientSession(headers=headers) as session:
  51. data = {
  52. "model": model,
  53. "input": {
  54. "messages": [
  55. {"content": message.get("content")}
  56. if message.get("role") == "system" else
  57. {"type": "human" if message.get("role") == "user" else "ai", "content": message.get("content")}
  58. for message in messages
  59. ],
  60. "mode": "plan"
  61. },
  62. "noStream": True
  63. }
  64. async with session.post(cls.api_endpoint, json=data, proxy=proxy) as response:
  65. if response.status == 201:
  66. result = await response.json()
  67. yield result['output'].strip()
  68. else:
  69. raise Exception(f"Unexpected response status: {response.status}\n{await response.text()}")