ChatGLM.py 3.4 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394
  1. from __future__ import annotations
  2. import uuid
  3. import json
  4. from aiohttp import ClientSession
  5. from ..typing import AsyncResult, Messages
  6. from ..requests.raise_for_status import raise_for_status
  7. from .base_provider import AsyncGeneratorProvider, ProviderModelMixin
  8. class ChatGLM(AsyncGeneratorProvider, ProviderModelMixin):
  9. url = "https://chatglm.cn"
  10. api_endpoint = "https://chatglm.cn/chatglm/mainchat-api/guest/stream"
  11. working = True
  12. supports_stream = True
  13. supports_system_message = False
  14. supports_message_history = False
  15. default_model = "all-tools-230b"
  16. models = [default_model]
  17. model_aliases = {"glm-4": default_model}
  18. @classmethod
  19. async def create_async_generator(
  20. cls,
  21. model: str,
  22. messages: Messages,
  23. proxy: str = None,
  24. **kwargs
  25. ) -> AsyncResult:
  26. device_id = str(uuid.uuid4()).replace('-', '')
  27. headers = {
  28. 'Accept-Language': 'en-US,en;q=0.9',
  29. 'App-Name': 'chatglm',
  30. 'Authorization': 'undefined',
  31. 'Content-Type': 'application/json',
  32. 'Origin': 'https://chatglm.cn',
  33. 'User-Agent': 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/131.0.0.0 Safari/537.36',
  34. 'X-App-Platform': 'pc',
  35. 'X-App-Version': '0.0.1',
  36. 'X-Device-Id': device_id,
  37. 'Accept': 'text/event-stream'
  38. }
  39. async with ClientSession(headers=headers) as session:
  40. data = {
  41. "assistant_id": "65940acff94777010aa6b796",
  42. "conversation_id": "",
  43. "meta_data": {
  44. "if_plus_model": False,
  45. "is_test": False,
  46. "input_question_type": "xxxx",
  47. "channel": "",
  48. "draft_id": "",
  49. "quote_log_id": "",
  50. "platform": "pc"
  51. },
  52. "messages": [
  53. {
  54. "role": message["role"],
  55. "content": [
  56. {
  57. "type": "text",
  58. "text": message["content"]
  59. }
  60. ]
  61. }
  62. for message in messages
  63. ]
  64. }
  65. yield_text = 0
  66. async with session.post(cls.api_endpoint, json=data, proxy=proxy) as response:
  67. await raise_for_status(response)
  68. async for chunk in response.content:
  69. if chunk:
  70. decoded_chunk = chunk.decode('utf-8')
  71. if decoded_chunk.startswith('data: '):
  72. try:
  73. json_data = json.loads(decoded_chunk[6:])
  74. parts = json_data.get('parts', [])
  75. if parts:
  76. content = parts[0].get('content', [])
  77. if content:
  78. text = content[0].get('text', '')[yield_text:]
  79. if text:
  80. yield text
  81. yield_text += len(text)
  82. except json.JSONDecodeError:
  83. pass