ThebApi.py 2.5 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677
  1. from __future__ import annotations
  2. import requests
  3. from ...typing import Any, CreateResult, Messages
  4. from ..base_provider import AbstractProvider
  5. models = {
  6. "theb-ai": "TheB.AI",
  7. "gpt-3.5-turbo": "GPT-3.5",
  8. "gpt-3.5-turbo-16k": "GPT-3.5-16K",
  9. "gpt-4-turbo": "GPT-4 Turbo",
  10. "gpt-4": "GPT-4",
  11. "gpt-4-32k": "GPT-4 32K",
  12. "claude-2": "Claude 2",
  13. "claude-1": "Claude",
  14. "claude-1-100k": "Claude 100K",
  15. "claude-instant-1": "Claude Instant",
  16. "claude-instant-1-100k": "Claude Instant 100K",
  17. "palm-2": "PaLM 2",
  18. "palm-2-codey": "Codey",
  19. "vicuna-13b-v1.5": "Vicuna v1.5 13B",
  20. "llama-2-7b-chat": "Llama 2 7B",
  21. "llama-2-13b-chat": "Llama 2 13B",
  22. "llama-2-70b-chat": "Llama 2 70B",
  23. "code-llama-7b": "Code Llama 7B",
  24. "code-llama-13b": "Code Llama 13B",
  25. "code-llama-34b": "Code Llama 34B",
  26. "qwen-7b-chat": "Qwen 7B"
  27. }
  28. class ThebApi(AbstractProvider):
  29. url = "https://theb.ai"
  30. working = True
  31. needs_auth = True
  32. @staticmethod
  33. def create_completion(
  34. model: str,
  35. messages: Messages,
  36. stream: bool,
  37. auth: str,
  38. proxy: str = None,
  39. **kwargs
  40. ) -> CreateResult:
  41. if model and model not in models:
  42. raise ValueError(f"Model are not supported: {model}")
  43. headers = {
  44. 'accept': 'application/json',
  45. 'authorization': f'Bearer {auth}',
  46. 'content-type': 'application/json',
  47. }
  48. # response = requests.get("https://api.baizhi.ai/v1/models", headers=headers).json()["data"]
  49. # models = dict([(m["id"], m["name"]) for m in response])
  50. # print(json.dumps(models, indent=4))
  51. data: dict[str, Any] = {
  52. "model": model if model else "gpt-3.5-turbo",
  53. "messages": messages,
  54. "stream": False,
  55. "model_params": {
  56. "system_prompt": kwargs.get("system_message", "You are ChatGPT, a large language model trained by OpenAI, based on the GPT-3.5 architecture."),
  57. "temperature": 1,
  58. "top_p": 1,
  59. **kwargs
  60. }
  61. }
  62. response = requests.post(
  63. "https://api.theb.ai/v1/chat/completions",
  64. headers=headers,
  65. json=data,
  66. proxies={"https": proxy}
  67. )
  68. try:
  69. response.raise_for_status()
  70. yield response.json()["choices"][0]["message"]["content"]
  71. except:
  72. raise RuntimeError(f"Response: {next(response.iter_lines()).decode()}")