TypeGPT.py 1.5 KB

1234567891011121314151617181920212223242526272829303132333435363738394041
  1. from __future__ import annotations
  2. import requests
  3. from .template import OpenaiTemplate
  4. class TypeGPT(OpenaiTemplate):
  5. label = "TypeGpt"
  6. url = "https://chat.typegpt.net"
  7. api_base = "https://chat.typegpt.net/api/openai/v1"
  8. working = True
  9. headers = {
  10. "accept": "application/json, text/event-stream",
  11. "accept-language": "de,en-US;q=0.9,en;q=0.8",
  12. "content-type": "application/json",
  13. "priority": "u=1, i",
  14. "sec-ch-ua": "\"Not(A:Brand\";v=\"99\", \"Google Chrome\";v=\"133\", \"Chromium\";v=\"133\"",
  15. "sec-ch-ua-mobile": "?0",
  16. "sec-ch-ua-platform": "\"Linux\"",
  17. "sec-fetch-dest": "empty",
  18. "sec-fetch-mode": "cors",
  19. "sec-fetch-site": "same-origin",
  20. "referer": "https://chat.typegpt.net/",
  21. }
  22. default_model = 'gpt-4o-mini-2024-07-18'
  23. default_vision_model = default_model
  24. vision_models = ['gpt-3.5-turbo', 'gpt-3.5-turbo-202201', default_vision_model, "o3-mini"]
  25. fallback_models = vision_models + ["deepseek-r1", "deepseek-v3", "evil"]
  26. image_models = ["Image-Generator"]
  27. model_aliases = {
  28. "gpt-4o-mini": "gpt-4o-mini-2024-07-18",
  29. "evil": "uncensored-r1",
  30. }
  31. @classmethod
  32. def get_models(cls, **kwargs):
  33. if not cls.models:
  34. cls.models = requests.get(f"{cls.url}/api/config").json()["customModels"].split(",")
  35. cls.models = [model.split("@")[0][1:] for model in cls.models if model.startswith("+") and model not in cls.image_models]
  36. return cls.models