FlowGpt.py 3.7 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102
  1. from __future__ import annotations
  2. import json
  3. import time
  4. import hashlib
  5. from aiohttp import ClientSession
  6. from ...typing import AsyncResult, Messages
  7. from ..base_provider import AsyncGeneratorProvider, ProviderModelMixin
  8. from ..helper import get_random_hex, get_random_string
  9. from ...requests.raise_for_status import raise_for_status
  10. class FlowGpt(AsyncGeneratorProvider, ProviderModelMixin):
  11. url = "https://flowgpt.com/chat"
  12. working = False
  13. supports_message_history = True
  14. supports_system_message = True
  15. default_model = "gpt-3.5-turbo"
  16. models = [
  17. "gpt-3.5-turbo",
  18. "gpt-3.5-long",
  19. "gpt-4-turbo",
  20. "google-gemini",
  21. "claude-instant",
  22. "claude-v1",
  23. "claude-v2",
  24. "llama2-13b",
  25. "mythalion-13b",
  26. "pygmalion-13b",
  27. "chronos-hermes-13b",
  28. "Mixtral-8x7B",
  29. "Dolphin-2.6-8x7B",
  30. ]
  31. model_aliases = {
  32. "gemini": "google-gemini",
  33. "gemini-pro": "google-gemini"
  34. }
  35. @classmethod
  36. async def create_async_generator(
  37. cls,
  38. model: str,
  39. messages: Messages,
  40. proxy: str = None,
  41. temperature: float = 0.7,
  42. **kwargs
  43. ) -> AsyncResult:
  44. model = cls.get_model(model)
  45. timestamp = str(int(time.time()))
  46. auth = "Bearer null"
  47. nonce = get_random_hex()
  48. data = f"{timestamp}-{nonce}-{auth}"
  49. signature = hashlib.md5(data.encode()).hexdigest()
  50. headers = {
  51. "User-Agent": "Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:122.0) Gecko/20100101 Firefox/122.0",
  52. "Accept": "*/*",
  53. "Accept-Language": "en-US;q=0.7,en;q=0.3",
  54. "Accept-Encoding": "gzip, deflate, br",
  55. "Referer": "https://flowgpt.com/",
  56. "Content-Type": "application/json",
  57. "Authorization": "Bearer null",
  58. "Origin": "https://flowgpt.com",
  59. "Connection": "keep-alive",
  60. "Sec-Fetch-Dest": "empty",
  61. "Sec-Fetch-Mode": "cors",
  62. "Sec-Fetch-Site": "same-site",
  63. "TE": "trailers",
  64. "Authorization": auth,
  65. "x-flow-device-id": f"f-{get_random_string(19)}",
  66. "x-nonce": nonce,
  67. "x-signature": signature,
  68. "x-timestamp": timestamp
  69. }
  70. async with ClientSession(headers=headers) as session:
  71. history = [message for message in messages[:-1] if message["role"] != "system"]
  72. system_message = "\n".join([message["content"] for message in messages if message["role"] == "system"])
  73. if not system_message:
  74. system_message = "You are helpful assistant. Follow the user's instructions carefully."
  75. data = {
  76. "model": model,
  77. "nsfw": False,
  78. "question": messages[-1]["content"],
  79. "history": [{"role": "assistant", "content": "Hello, how can I help you today?"}, *history],
  80. "system": system_message,
  81. "temperature": temperature,
  82. "promptId": f"model-{model}",
  83. "documentIds": [],
  84. "chatFileDocumentIds": [],
  85. "generateImage": False,
  86. "generateAudio": False
  87. }
  88. async with session.post("https://prod-backend-k8s.flowgpt.com/v3/chat-anonymous", json=data, proxy=proxy) as response:
  89. await raise_for_status(response)
  90. async for chunk in response.content:
  91. if chunk.strip():
  92. message = json.loads(chunk)
  93. if "event" not in message:
  94. continue
  95. if message["event"] == "text":
  96. yield message["data"]