Liaobots.py 11 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330
  1. from __future__ import annotations
  2. import uuid
  3. import json
  4. from aiohttp import ClientSession, BaseConnector
  5. from ..typing import AsyncResult, Messages
  6. from .base_provider import AsyncGeneratorProvider, ProviderModelMixin
  7. from .helper import get_connector
  8. from ..requests import raise_for_status
  9. models = {
  10. "claude-3-5-sonnet-20241022": {
  11. "id": "claude-3-5-sonnet-20241022",
  12. "name": "Claude-3.5-Sonnet-V2",
  13. "model": "Claude",
  14. "provider": "Anthropic",
  15. "maxLength": 800000,
  16. "tokenLimit": 200000,
  17. "context": "200K",
  18. },
  19. "claude-3-5-sonnet-20241022-t": {
  20. "id": "claude-3-5-sonnet-20241022-t",
  21. "name": "Claude-3.5-Sonnet-V2-T",
  22. "model": "Claude",
  23. "provider": "Anthropic",
  24. "maxLength": 800000,
  25. "tokenLimit": 200000,
  26. "context": "200K",
  27. },
  28. "claude-3-7-sonnet-20250219": {
  29. "id": "claude-3-7-sonnet-20250219",
  30. "name": "Claude-3.7-Sonnet",
  31. "model": "Claude",
  32. "provider": "Anthropic",
  33. "maxLength": 800000,
  34. "tokenLimit": 200000,
  35. "context": "200K",
  36. },
  37. "claude-3-7-sonnet-20250219-t": {
  38. "id": "claude-3-7-sonnet-20250219-t",
  39. "name": "Claude-3.7-Sonnet-T",
  40. "model": "Claude",
  41. "provider": "Anthropic",
  42. "maxLength": 800000,
  43. "tokenLimit": 200000,
  44. "context": "200K",
  45. },
  46. "claude-3-7-sonnet-20250219-thinking": {
  47. "id": "claude-3-7-sonnet-20250219-thinking",
  48. "name": "Claude-3.7-Sonnet-Thinking",
  49. "model": "Claude",
  50. "provider": "Anthropic",
  51. "maxLength": 800000,
  52. "tokenLimit": 200000,
  53. "context": "200K",
  54. },
  55. "claude-3-opus-20240229": {
  56. "id": "claude-3-opus-20240229",
  57. "name": "Claude-3-Opus",
  58. "model": "Claude",
  59. "provider": "Anthropic",
  60. "maxLength": 800000,
  61. "tokenLimit": 200000,
  62. "context": "200K",
  63. },
  64. "claude-3-sonnet-20240229": {
  65. "id": "claude-3-sonnet-20240229",
  66. "name": "Claude-3-Sonnet",
  67. "model": "Claude",
  68. "provider": "Anthropic",
  69. "maxLength": 800000,
  70. "tokenLimit": 200000,
  71. "context": "200K",
  72. },
  73. "deepseek-r1": {
  74. "id": "deepseek-r1",
  75. "name": "DeepSeek-R1",
  76. "model": "DeepSeek-R1",
  77. "provider": "DeepSeek",
  78. "maxLength": 400000,
  79. "tokenLimit": 100000,
  80. "context": "128K",
  81. },
  82. "deepseek-r1-distill-llama-70b": {
  83. "id": "deepseek-r1-distill-llama-70b",
  84. "name": "DeepSeek-R1-70B",
  85. "model": "DeepSeek-R1-70B",
  86. "provider": "DeepSeek",
  87. "maxLength": 400000,
  88. "tokenLimit": 100000,
  89. "context": "128K",
  90. },
  91. "deepseek-v3": {
  92. "id": "deepseek-v3",
  93. "name": "DeepSeek-V3",
  94. "model": "DeepSeek-V3",
  95. "provider": "DeepSeek",
  96. "maxLength": 400000,
  97. "tokenLimit": 100000,
  98. "context": "128K",
  99. },
  100. "gemini-2.0-flash": {
  101. "id": "gemini-2.0-flash",
  102. "name": "Gemini-2.0-Flash",
  103. "model": "Gemini",
  104. "provider": "Google",
  105. "maxLength": 4000000,
  106. "tokenLimit": 1000000,
  107. "context": "1024K",
  108. },
  109. "gemini-2.0-flash-thinking-exp": {
  110. "id": "gemini-2.0-flash-thinking-exp",
  111. "name": "Gemini-2.0-Flash-Thinking-Exp",
  112. "model": "Gemini",
  113. "provider": "Google",
  114. "maxLength": 4000000,
  115. "tokenLimit": 1000000,
  116. "context": "1024K",
  117. },
  118. "gemini-2.0-pro-exp": {
  119. "id": "gemini-2.0-pro-exp",
  120. "name": "Gemini-2.0-Pro-Exp",
  121. "model": "Gemini",
  122. "provider": "Google",
  123. "maxLength": 4000000,
  124. "tokenLimit": 1000000,
  125. "context": "1024K",
  126. },
  127. "gpt-4o-2024-08-06": {
  128. "id": "gpt-4o-2024-08-06",
  129. "name": "GPT-4o",
  130. "model": "ChatGPT",
  131. "provider": "OpenAI",
  132. "maxLength": 260000,
  133. "tokenLimit": 126000,
  134. "context": "128K",
  135. },
  136. "gpt-4o-mini-2024-07-18": {
  137. "id": "gpt-4o-mini-2024-07-18",
  138. "name": "GPT-4o-Mini",
  139. "model": "ChatGPT",
  140. "provider": "OpenAI",
  141. "maxLength": 260000,
  142. "tokenLimit": 126000,
  143. "context": "128K",
  144. },
  145. "gpt-4o-mini-free": {
  146. "id": "gpt-4o-mini-free",
  147. "name": "GPT-4o-Mini-Free",
  148. "model": "ChatGPT",
  149. "provider": "OpenAI",
  150. "maxLength": 31200,
  151. "tokenLimit": 7800,
  152. "context": "8K",
  153. },
  154. "grok-3": {
  155. "id": "grok-3",
  156. "name": "Grok-3",
  157. "model": "Grok",
  158. "provider": "x.ai",
  159. "maxLength": 800000,
  160. "tokenLimit": 200000,
  161. "context": "200K",
  162. },
  163. "grok-3-r1": {
  164. "id": "grok-3-r1",
  165. "name": "Grok-3-Thinking",
  166. "model": "Grok",
  167. "provider": "x.ai",
  168. "maxLength": 800000,
  169. "tokenLimit": 200000,
  170. "context": "200K",
  171. },
  172. "o3-mini": {
  173. "id": "o3-mini",
  174. "name": "o3-mini",
  175. "model": "o3",
  176. "provider": "OpenAI",
  177. "maxLength": 400000,
  178. "tokenLimit": 100000,
  179. "context": "128K",
  180. },
  181. }
  182. class Liaobots(AsyncGeneratorProvider, ProviderModelMixin):
  183. url = "https://liaobots.site"
  184. working = True
  185. supports_message_history = True
  186. supports_system_message = True
  187. default_model = "gpt-4o-2024-08-06"
  188. models = list(models.keys())
  189. model_aliases = {
  190. # Anthropic
  191. "claude-3.5-sonnet": "claude-3-5-sonnet-20241022",
  192. "claude-3.5-sonnet": "claude-3-5-sonnet-20241022-t",
  193. "claude-3.7-sonnet": "claude-3-7-sonnet-20250219",
  194. "claude-3.7-sonnet": "claude-3-7-sonnet-20250219-t",
  195. "claude-3.7-sonnet-thinking": "claude-3-7-sonnet-20250219-thinking",
  196. "claude-3-opus": "claude-3-opus-20240229",
  197. "claude-3-sonnet": "claude-3-sonnet-20240229",
  198. # DeepSeek
  199. "deepseek-r1": "deepseek-r1-distill-llama-70b",
  200. # Google
  201. "gemini-2.0-flash-thinking": "gemini-2.0-flash-thinking-exp",
  202. "gemini-2.0-pro": "gemini-2.0-pro-exp",
  203. # OpenAI
  204. "gpt-4": default_model,
  205. "gpt-4o": default_model,
  206. "gpt-4o-mini": "gpt-4o-mini-2024-07-18",
  207. "gpt-4o-mini": "gpt-4o-mini-free",
  208. }
  209. _auth_code = ""
  210. _cookie_jar = None
  211. @classmethod
  212. def is_supported(cls, model: str) -> bool:
  213. """
  214. Check if the given model is supported.
  215. """
  216. return model in models or model in cls.model_aliases
  217. @classmethod
  218. async def create_async_generator(
  219. cls,
  220. model: str,
  221. messages: Messages,
  222. proxy: str = None,
  223. connector: BaseConnector = None,
  224. **kwargs
  225. ) -> AsyncResult:
  226. model = cls.get_model(model)
  227. headers = {
  228. "referer": "https://liaobots.work/",
  229. "origin": "https://liaobots.work",
  230. "user-agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/112.0.0.0 Safari/537.36",
  231. }
  232. async with ClientSession(
  233. headers=headers,
  234. cookie_jar=cls._cookie_jar,
  235. connector=get_connector(connector, proxy, True)
  236. ) as session:
  237. data = {
  238. "conversationId": str(uuid.uuid4()),
  239. "model": models[model],
  240. "messages": messages,
  241. "key": "",
  242. "prompt": kwargs.get("system_message", "You are a helpful assistant."),
  243. }
  244. if not cls._auth_code:
  245. async with session.post(
  246. "https://liaobots.work/recaptcha/api/login",
  247. data={"token": "abcdefghijklmnopqrst"},
  248. verify_ssl=False
  249. ) as response:
  250. await raise_for_status(response)
  251. try:
  252. async with session.post(
  253. "https://liaobots.work/api/user",
  254. json={"authcode": cls._auth_code},
  255. verify_ssl=False
  256. ) as response:
  257. await raise_for_status(response)
  258. cls._auth_code = (await response.json(content_type=None))["authCode"]
  259. if not cls._auth_code:
  260. raise RuntimeError("Empty auth code")
  261. cls._cookie_jar = session.cookie_jar
  262. async with session.post(
  263. "https://liaobots.work/api/chat",
  264. json=data,
  265. headers={"x-auth-code": cls._auth_code},
  266. verify_ssl=False
  267. ) as response:
  268. await raise_for_status(response)
  269. async for line in response.content:
  270. if line.startswith(b"data: "):
  271. yield json.loads(line[6:]).get("content")
  272. except:
  273. async with session.post(
  274. "https://liaobots.work/api/user",
  275. json={"authcode": "jGDRFOqHcZKAo"},
  276. verify_ssl=False
  277. ) as response:
  278. await raise_for_status(response)
  279. cls._auth_code = (await response.json(content_type=None))["authCode"]
  280. if not cls._auth_code:
  281. raise RuntimeError("Empty auth code")
  282. cls._cookie_jar = session.cookie_jar
  283. async with session.post(
  284. "https://liaobots.work/api/chat",
  285. json=data,
  286. headers={"x-auth-code": cls._auth_code},
  287. verify_ssl=False
  288. ) as response:
  289. await raise_for_status(response)
  290. async for line in response.content:
  291. if line.startswith(b"data: "):
  292. yield json.loads(line[6:]).get("content")
  293. @classmethod
  294. async def initialize_auth_code(cls, session: ClientSession) -> None:
  295. """
  296. Initialize the auth code by making the necessary login requests.
  297. """
  298. async with session.post(
  299. "https://liaobots.work/api/user",
  300. json={"authcode": "pTIQr4FTnVRfr"},
  301. verify_ssl=False
  302. ) as response:
  303. await raise_for_status(response)
  304. cls._auth_code = (await response.json(content_type=None))["authCode"]
  305. if not cls._auth_code:
  306. raise RuntimeError("Empty auth code")
  307. cls._cookie_jar = session.cookie_jar
  308. @classmethod
  309. async def ensure_auth_code(cls, session: ClientSession) -> None:
  310. """
  311. Ensure the auth code is initialized, and if not, perform the initialization.
  312. """
  313. if not cls._auth_code:
  314. await cls.initialize_auth_code(session)