GPROChat.py 2.2 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970
  1. from __future__ import annotations
  2. import time
  3. import hashlib
  4. from aiohttp import ClientSession
  5. from ..typing import AsyncResult, Messages
  6. from .base_provider import AsyncGeneratorProvider, ProviderModelMixin
  7. from .helper import format_prompt
  8. class GPROChat(AsyncGeneratorProvider, ProviderModelMixin):
  9. url = "https://gprochat.com"
  10. api_endpoint = "https://gprochat.com/api/generate"
  11. working = True
  12. supports_stream = True
  13. supports_message_history = True
  14. default_model = 'gemini-1.5-pro'
  15. @staticmethod
  16. def generate_signature(timestamp: int, message: str) -> str:
  17. secret_key = "2BC120D4-BB36-1B60-26DE-DB630472A3D8"
  18. hash_input = f"{timestamp}:{message}:{secret_key}"
  19. signature = hashlib.sha256(hash_input.encode('utf-8')).hexdigest()
  20. return signature
  21. @classmethod
  22. def get_model(cls, model: str) -> str:
  23. if model in cls.models:
  24. return model
  25. elif model in cls.model_aliases:
  26. return cls.model_aliases[model]
  27. else:
  28. return cls.default_model
  29. @classmethod
  30. async def create_async_generator(
  31. cls,
  32. model: str,
  33. messages: Messages,
  34. proxy: str = None,
  35. **kwargs
  36. ) -> AsyncResult:
  37. model = cls.get_model(model)
  38. timestamp = int(time.time() * 1000)
  39. prompt = format_prompt(messages)
  40. sign = cls.generate_signature(timestamp, prompt)
  41. headers = {
  42. "accept": "*/*",
  43. "origin": cls.url,
  44. "referer": f"{cls.url}/",
  45. "user-agent": "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/129.0.0.0 Safari/537.36",
  46. "content-type": "text/plain;charset=UTF-8"
  47. }
  48. data = {
  49. "messages": [{"role": "user", "parts": [{"text": prompt}]}],
  50. "time": timestamp,
  51. "pass": None,
  52. "sign": sign
  53. }
  54. async with ClientSession(headers=headers) as session:
  55. async with session.post(cls.api_endpoint, json=data, proxy=proxy) as response:
  56. response.raise_for_status()
  57. async for chunk in response.content.iter_any():
  58. if chunk:
  59. yield chunk.decode()