123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023 |
- from __future__ import annotations
- from dataclasses import dataclass
- from typing import Dict, List, Optional
- from .Provider import IterListProvider, ProviderType
- from .Provider import (
- ### No Auth Required ###
- Blackbox,
- Chatai,
- Cloudflare,
- Copilot,
- DeepInfraChat,
- Free2GPT,
- FreeGpt,
- HuggingSpace,
- Grok,
- DeepseekAI_JanusPro7b,
- DeepSeekAPI,
- ImageLabs,
- LambdaChat,
- OIVSCodeSer2,
- OIVSCodeSer0501,
- OpenAIFM,
- PerplexityLabs,
- PollinationsAI,
- PollinationsImage,
- TeachAnything,
- Together,
- Websim,
- WeWordle,
- Yqcloud,
-
- ### Needs Auth ###
- BingCreateImages,
- CopilotAccount,
- Gemini,
- GeminiPro,
- HuggingChat,
- HuggingFace,
- HuggingFaceMedia,
- HuggingFaceAPI,
- MetaAI,
- MicrosoftDesigner,
- OpenaiAccount,
- OpenaiChat,
- )
- class ModelRegistry:
- """Simplified registry for automatic model discovery"""
- _models: Dict[str, 'Model'] = {}
- _aliases: Dict[str, str] = {}
-
- @classmethod
- def register(cls, model: 'Model', aliases: List[str] = None):
- """Register a model and optional aliases"""
- if model.name:
- cls._models[model.name] = model
- if aliases:
- for alias in aliases:
- cls._aliases[alias] = model.name
-
- @classmethod
- def get(cls, name: str) -> Optional['Model']:
- """Get model by name or alias"""
- if name in cls._models:
- return cls._models[name]
- if name in cls._aliases:
- return cls._models[cls._aliases[name]]
- return None
-
- @classmethod
- def all_models(cls) -> Dict[str, 'Model']:
- """Get all registered models"""
- return cls._models.copy()
-
- @classmethod
- def clear(cls):
- """Clear registry (for testing)"""
- cls._models.clear()
- cls._aliases.clear()
-
- @classmethod
- def list_models_by_provider(cls, provider_name: str) -> List[str]:
- """List all models that use specific provider"""
- return [name for name, model in cls._models.items()
- if provider_name in str(model.best_provider)]
-
- @classmethod
- def validate_all_models(cls) -> Dict[str, List[str]]:
- """Validate all models and return issues"""
- issues = {}
- for name, model in cls._models.items():
- model_issues = []
- if not model.name:
- model_issues.append("Empty name")
- if not model.base_provider:
- model_issues.append("Empty base_provider")
- if model.best_provider is None:
- model_issues.append("No best_provider")
- if model_issues:
- issues[name] = model_issues
- return issues
- @dataclass(unsafe_hash=True)
- class Model:
- """
- Represents a machine learning model configuration.
- Attributes:
- name (str): Name of the model.
- base_provider (str): Default provider for the model.
- best_provider (ProviderType): The preferred provider for the model, typically with retry logic.
- """
- name: str
- base_provider: str
- best_provider: ProviderType = None
- def __post_init__(self):
- """Auto-register model after initialization"""
- if self.name:
- ModelRegistry.register(self)
- @staticmethod
- def __all__() -> list[str]:
- """Returns a list of all model names."""
- return list(ModelRegistry.all_models().keys())
- class ImageModel(Model):
- pass
- class AudioModel(Model):
- pass
-
- class VideoModel(Model):
- pass
-
- class VisionModel(Model):
- pass
- ### Default ###
- default = Model(
- name = "",
- base_provider = "",
- best_provider = IterListProvider([
- OIVSCodeSer0501,
- OIVSCodeSer2,
- Blackbox,
- Copilot,
- DeepInfraChat,
- LambdaChat,
- PollinationsAI,
- Together,
- Free2GPT,
- FreeGpt,
- Chatai,
- WeWordle,
- OpenaiChat,
- Cloudflare,
- ])
- )
- default_vision = VisionModel(
- name = "",
- base_provider = "",
- best_provider = IterListProvider([
- Blackbox,
- DeepInfraChat,
- OIVSCodeSer2,
- OIVSCodeSer0501,
- PollinationsAI,
- Together,
- HuggingSpace,
- GeminiPro,
- HuggingFaceAPI,
- CopilotAccount,
- OpenaiAccount,
- Gemini,
- ], shuffle=False)
- )
- # gpt-4
- gpt_4 = Model(
- name = 'gpt-4',
- base_provider = 'OpenAI',
- best_provider = IterListProvider([Blackbox, PollinationsAI, Copilot, Yqcloud, WeWordle, OpenaiChat])
- )
- # gpt-4o
- gpt_4o = VisionModel(
- name = 'gpt-4o',
- base_provider = 'OpenAI',
- best_provider = IterListProvider([Blackbox, PollinationsAI, OpenaiChat])
- )
- gpt_4o_mini = Model(
- name = 'gpt-4o-mini',
- base_provider = 'OpenAI',
- best_provider = IterListProvider([Blackbox, OIVSCodeSer2, PollinationsAI, Chatai, OpenaiChat])
- )
- gpt_4o_mini_audio = AudioModel(
- name = 'gpt-4o-mini-audio',
- base_provider = 'OpenAI',
- best_provider = PollinationsAI
- )
- gpt_4o_mini_tts = AudioModel(
- name = 'gpt-4o-mini-tts',
- base_provider = 'OpenAI',
- best_provider = OpenAIFM
- )
- # o1
- o1 = Model(
- name = 'o1',
- base_provider = 'OpenAI',
- best_provider = IterListProvider([Copilot, OpenaiAccount])
- )
- o1_mini = Model(
- name = 'o1-mini',
- base_provider = 'OpenAI',
- best_provider = OpenaiAccount
- )
- # o3
- o3_mini = Model(
- name = 'o3-mini',
- base_provider = 'OpenAI',
- best_provider = OpenaiChat
- )
- o3_mini_high = Model(
- name = 'o3-mini-high',
- base_provider = 'OpenAI',
- best_provider = OpenaiAccount
- )
- # o4
- o4_mini = Model(
- name = 'o4-mini',
- base_provider = 'OpenAI',
- best_provider = IterListProvider([PollinationsAI, OpenaiChat])
- )
- o4_mini_high = Model(
- name = 'o4-mini-high',
- base_provider = 'OpenAI',
- best_provider = OpenaiChat
- )
- # gpt-4.1
- gpt_4_1 = Model(
- name = 'gpt-4.1',
- base_provider = 'OpenAI',
- best_provider = IterListProvider([PollinationsAI, OpenaiChat])
- )
- gpt_4_1_mini = Model(
- name = 'gpt-4.1-mini',
- base_provider = 'OpenAI',
- best_provider = IterListProvider([Blackbox, OIVSCodeSer0501, PollinationsAI])
- )
- gpt_4_1_nano = Model(
- name = 'gpt-4.1-nano',
- base_provider = 'OpenAI',
- best_provider = IterListProvider([Blackbox, PollinationsAI])
- )
- gpt_4_5 = Model(
- name = 'gpt-4.5',
- base_provider = 'OpenAI',
- best_provider = OpenaiChat
- )
- # dall-e
- dall_e_3 = ImageModel(
- name = 'dall-e-3',
- base_provider = 'OpenAI',
- best_provider = IterListProvider([CopilotAccount, OpenaiAccount, MicrosoftDesigner, BingCreateImages])
- )
- gpt_image = ImageModel(
- name = 'gpt-image',
- base_provider = 'OpenAI',
- best_provider = IterListProvider([PollinationsImage])
- )
- ### Meta ###
- meta = Model(
- name = "meta-ai",
- base_provider = "Meta",
- best_provider = MetaAI
- )
- # llama 2
- llama_2_7b = Model(
- name = "llama-2-7b",
- base_provider = "Meta Llama",
- best_provider = IterListProvider([Cloudflare])
- )
- llama_2_70b = Model(
- name = "llama-2-70b",
- base_provider = "Meta Llama",
- best_provider = Together
- )
- # llama-3
- llama_3_8b = Model(
- name = "llama-3-8b",
- base_provider = "Meta Llama",
- best_provider = IterListProvider([Together, Cloudflare])
- )
- llama_3_70b = Model(
- name = "llama-3-70b",
- base_provider = "Meta Llama",
- best_provider = IterListProvider([Together])
- )
- # llama-3.1
- llama_3_1_8b = Model(
- name = "llama-3.1-8b",
- base_provider = "Meta Llama",
- best_provider = IterListProvider([DeepInfraChat, Together, Cloudflare])
- )
- llama_3_1_70b = Model(
- name = "llama-3.1-70b",
- base_provider = "Meta Llama",
- best_provider = Together
- )
- llama_3_1_405b = Model(
- name = "llama-3.1-405b",
- base_provider = "Meta Llama",
- best_provider = Together
- )
- # llama-3.2
- llama_3_2_1b = Model(
- name = "llama-3.2-1b",
- base_provider = "Meta Llama",
- best_provider = Cloudflare
- )
- llama_3_2_3b = Model(
- name = "llama-3.2-3b",
- base_provider = "Meta Llama",
- best_provider = Together
- )
- llama_3_2_11b = VisionModel(
- name = "llama-3.2-11b",
- base_provider = "Meta Llama",
- best_provider = IterListProvider([Together, HuggingChat, HuggingFace])
- )
- llama_3_2_90b = Model(
- name = "llama-3.2-90b",
- base_provider = "Meta Llama",
- best_provider = IterListProvider([DeepInfraChat, Together])
- )
- # llama-3.3
- llama_3_3_70b = Model(
- name = "llama-3.3-70b",
- base_provider = "Meta Llama",
- best_provider = IterListProvider([DeepInfraChat, LambdaChat, PollinationsAI, Together, HuggingChat, HuggingFace])
- )
- # llama-4
- llama_4_scout = Model(
- name = "llama-4-scout",
- base_provider = "Meta Llama",
- best_provider = IterListProvider([DeepInfraChat, LambdaChat, PollinationsAI, Together, Cloudflare])
- )
- llama_4_maverick = Model(
- name = "llama-4-maverick",
- base_provider = "Meta Llama",
- best_provider = IterListProvider([DeepInfraChat, LambdaChat, Together])
- )
- ### MistralAI ###
- mistral_7b = Model(
- name = "mistral-7b",
- base_provider = "Mistral AI",
- best_provider = Together
- )
- mixtral_8x7b = Model(
- name = "mixtral-8x7b",
- base_provider = "Mistral AI",
- best_provider = Together
- )
- mixtral_8x22b = Model(
- name = "mixtral-8x22b",
- base_provider = "Mistral AI",
- best_provider = DeepInfraChat
- )
- mistral_nemo = Model(
- name = "mistral-nemo",
- base_provider = "Mistral AI",
- best_provider = IterListProvider([HuggingChat, HuggingFace])
- )
- mistral_small_24b = Model(
- name = "mistral-small-24b",
- base_provider = "Mistral AI",
- best_provider = IterListProvider([DeepInfraChat, Together])
- )
- mistral_small_3_1_24b = Model(
- name = "mistral-small-3.1-24b",
- base_provider = "Mistral AI",
- best_provider = PollinationsAI
- )
- ### NousResearch ###
- # hermes-2
- hermes_2_dpo = Model(
- name = "hermes-2-dpo",
- base_provider = "NousResearch",
- best_provider = Together
- )
- # phi-3.5
- phi_3_5_mini = Model(
- name = "phi-3.5-mini",
- base_provider = "Microsoft",
- best_provider = HuggingChat
- )
- # phi-4
- phi_4 = Model(
- name = "phi-4",
- base_provider = "Microsoft",
- best_provider = IterListProvider([DeepInfraChat, PollinationsAI, HuggingSpace])
- )
- phi_4_multimodal = VisionModel(
- name = "phi-4-multimodal",
- base_provider = "Microsoft",
- best_provider = IterListProvider([DeepInfraChat, HuggingSpace])
- )
- phi_4_reasoning_plus = Model(
- name = "phi-4-reasoning-plus",
- base_provider = "Microsoft",
- best_provider = DeepInfraChat
- )
- # wizardlm
- wizardlm_2_7b = Model(
- name = 'wizardlm-2-7b',
- base_provider = 'Microsoft',
- best_provider = DeepInfraChat
- )
- wizardlm_2_8x22b = Model(
- name = 'wizardlm-2-8x22b',
- base_provider = 'Microsoft',
- best_provider = DeepInfraChat
- )
- ### Google DeepMind ###
- # gemini
- gemini = Model(
- name = 'gemini-2.0',
- base_provider = 'Google',
- best_provider = Gemini
- )
- # gemini-1.5
- gemini_1_5_flash = Model(
- name = 'gemini-1.5-flash',
- base_provider = 'Google',
- best_provider = IterListProvider([Free2GPT, FreeGpt, TeachAnything, Websim])
- )
- gemini_1_5_pro = Model(
- name = 'gemini-1.5-pro',
- base_provider = 'Google',
- best_provider = IterListProvider([Free2GPT, FreeGpt, TeachAnything, Websim])
- )
- # gemini-2.0
- gemini_2_0_flash = Model(
- name = 'gemini-2.0-flash',
- base_provider = 'Google',
- best_provider = IterListProvider([Gemini, GeminiPro])
- )
- gemini_2_0_flash_thinking = Model(
- name = 'gemini-2.0-flash-thinking',
- base_provider = 'Google',
- best_provider = IterListProvider([Gemini, GeminiPro])
- )
- gemini_2_0_flash_thinking_with_apps = Model(
- name = 'gemini-2.0-flash-thinking-with-apps',
- base_provider = 'Google',
- best_provider = Gemini
- )
- # gemini-2.5
- gemini_2_5_flash = Model(
- name = 'gemini-2.5-flash',
- base_provider = 'Google',
- best_provider = IterListProvider([Gemini, GeminiPro])
- )
- gemini_2_5_pro = Model(
- name = 'gemini-2.5-pro',
- base_provider = 'Google',
- best_provider = IterListProvider([Gemini])
- )
- # gemma-2
- gemma_2_27b = Model(
- name = 'gemma-2-27b',
- base_provider = 'Google',
- best_provider = Together
- )
- # gemma-3
- gemma_3_12b = Model(
- name = 'gemma-3-12b',
- base_provider = 'Google',
- best_provider = DeepInfraChat
- )
- gemma_3_27b = Model(
- name = 'gemma-3-27b',
- base_provider = 'Google',
- best_provider = DeepInfraChat
- )
- ### Blackbox AI ###
- blackboxai = Model(
- name = 'blackboxai',
- base_provider = 'Blackbox AI',
- best_provider = Blackbox
- )
- ### CohereForAI ###
- command_r = Model(
- name = 'command-r',
- base_provider = 'CohereForAI',
- best_provider = HuggingSpace
- )
- command_r_plus = Model(
- name = 'command-r-plus',
- base_provider = 'CohereForAI',
- best_provider = IterListProvider([HuggingSpace, HuggingChat])
- )
- command_r7b = Model(
- name = 'command-r7b',
- base_provider = 'CohereForAI',
- best_provider = HuggingSpace
- )
- command_a = Model(
- name = 'command-a',
- base_provider = 'CohereForAI',
- best_provider = HuggingSpace
- )
- ### Qwen ###
- # qwen-1.5
- qwen_1_5_7b = Model(
- name = 'qwen-1.5-7b',
- base_provider = 'Qwen',
- best_provider = Cloudflare
- )
- # qwen-2
- qwen_2_72b = Model(
- name = 'qwen-2-72b',
- base_provider = 'Qwen',
- best_provider = IterListProvider([DeepInfraChat, HuggingSpace, Together])
- )
- qwen_2_vl_7b = VisionModel(
- name = "qwen-2-vl-7b",
- base_provider = 'Qwen',
- best_provider = HuggingFaceAPI
- )
- qwen_2_vl_72b = VisionModel(
- name = "qwen-2-vl-72b",
- base_provider = 'Qwen',
- best_provider = Together
- )
- # qwen-2.5
- qwen_2_5 = Model(
- name = 'qwen-2.5',
- base_provider = 'Qwen',
- best_provider = HuggingSpace
- )
- qwen_2_5_7b = Model(
- name = 'qwen-2.5-7b',
- base_provider = 'Qwen',
- best_provider = Together
- )
- qwen_2_5_72b = Model(
- name = 'qwen-2.5-72b',
- base_provider = 'Qwen',
- best_provider = Together
- )
- qwen_2_5_coder_32b = Model(
- name = 'qwen-2.5-coder-32b',
- base_provider = 'Qwen',
- best_provider = IterListProvider([PollinationsAI, LambdaChat, Together, HuggingChat])
- )
- qwen_2_5_1m = Model(
- name = 'qwen-2.5-1m',
- base_provider = 'Qwen',
- best_provider = HuggingSpace
- )
- qwen_2_5_max = Model(
- name = 'qwen-2.5-max',
- base_provider = 'Qwen',
- best_provider = HuggingSpace
- )
- qwen_2_5_vl_72b = Model(
- name = 'qwen-2.5-vl-72b',
- base_provider = 'Qwen',
- best_provider = Together
- )
- # qwen3
- qwen_3_235b = Model(
- name = 'qwen-3-235b',
- base_provider = 'Qwen',
- best_provider = IterListProvider([DeepInfraChat, Together, HuggingSpace])
- )
- qwen_3_32b = Model(
- name = 'qwen-3-32b',
- base_provider = 'Qwen',
- best_provider = IterListProvider([DeepInfraChat, LambdaChat, HuggingSpace])
- )
- qwen_3_30b = Model(
- name = 'qwen-3-30b',
- base_provider = 'Qwen',
- best_provider = IterListProvider([DeepInfraChat, HuggingSpace])
- )
- qwen_3_14b = Model(
- name = 'qwen-3-14b',
- base_provider = 'Qwen',
- best_provider = IterListProvider([DeepInfraChat, HuggingSpace])
- )
- qwen_3_4b = Model(
- name = 'qwen-3-4b',
- base_provider = 'Qwen',
- best_provider = HuggingSpace
- )
- qwen_3_1_7b = Model(
- name = 'qwen-3-1.7b',
- base_provider = 'Qwen',
- best_provider = HuggingSpace
- )
- qwen_3_0_6b = Model(
- name = 'qwen-3-0.6b',
- base_provider = 'Qwen',
- best_provider = HuggingSpace
- )
- ### qwq/qvq ###
- qwq_32b = Model(
- name = 'qwq-32b',
- base_provider = 'Qwen',
- best_provider = IterListProvider([DeepInfraChat, Together, HuggingChat])
- )
- ### DeepSeek ###
- # deepseek-v3
- deepseek_v3 = Model(
- name = 'deepseek-v3',
- base_provider = 'DeepSeek',
- best_provider = IterListProvider([DeepInfraChat, PollinationsAI, Together])
- )
- # deepseek-r1
- deepseek_r1 = Model(
- name = 'deepseek-r1',
- base_provider = 'DeepSeek',
- best_provider = IterListProvider([DeepInfraChat, LambdaChat, PollinationsAI, Together, HuggingChat, HuggingFace])
- )
- deepseek_r1_turbo = Model(
- name = 'deepseek-r1-turbo',
- base_provider = 'DeepSeek',
- best_provider = DeepInfraChat
- )
- deepseek_r1_distill_llama_70b = Model(
- name = 'deepseek-r1-distill-llama-70b',
- base_provider = 'DeepSeek',
- best_provider = IterListProvider([DeepInfraChat, Together])
- )
- deepseek_r1_distill_qwen_1_5b = Model(
- name = 'deepseek-r1-distill-qwen-1.5b',
- base_provider = 'DeepSeek',
- best_provider = Together
- )
- deepseek_r1_distill_qwen_14b = Model(
- name = 'deepseek-r1-distill-qwen-14b',
- base_provider = 'DeepSeek',
- best_provider = Together
- )
- deepseek_r1_distill_qwen_32b = Model(
- name = 'deepseek-r1-distill-qwen-32b',
- base_provider = 'DeepSeek',
- best_provider = IterListProvider([DeepInfraChat])
- )
- # deepseek-v2
- deepseek_prover_v2 = Model(
- name = 'deepseek-prover-v2',
- base_provider = 'DeepSeek',
- best_provider = DeepInfraChat
- )
- deepseek_prover_v2_671b = Model(
- name = 'deepseek-prover-v2-671b',
- base_provider = 'DeepSeek',
- best_provider = DeepInfraChat
- )
- # deepseek-v3-0324
- deepseek_v3_0324 = Model(
- name = 'deepseek-v3-0324',
- base_provider = 'DeepSeek',
- best_provider = IterListProvider([DeepInfraChat, LambdaChat, PollinationsAI])
- )
- # deepseek-r1-0528
- deepseek_r1_0528 = Model(
- name = 'deepseek-r1-0528',
- base_provider = 'DeepSeek',
- best_provider = IterListProvider([DeepInfraChat, LambdaChat])
- )
- # janus
- janus_pro_7b = VisionModel(
- name = DeepseekAI_JanusPro7b.default_model,
- base_provider = 'DeepSeek',
- best_provider = DeepseekAI_JanusPro7b
- )
- ### x.ai ###
- grok_2 = Model(
- name = 'grok-2',
- base_provider = 'x.ai',
- best_provider = Grok
- )
- grok_3 = Model(
- name = 'grok-3',
- base_provider = 'x.ai',
- best_provider = Grok
- )
- grok_3_mini = Model(
- name = 'grok-3-mini',
- base_provider = 'x.ai',
- best_provider = PollinationsAI
- )
- grok_3_r1 = Model(
- name = 'grok-3-r1',
- base_provider = 'x.ai',
- best_provider = Grok
- )
- ### Perplexity AI ###
- sonar = Model(
- name = 'sonar',
- base_provider = 'Perplexity AI',
- best_provider = PerplexityLabs
- )
- sonar_pro = Model(
- name = 'sonar-pro',
- base_provider = 'Perplexity AI',
- best_provider = PerplexityLabs
- )
- sonar_reasoning = Model(
- name = 'sonar-reasoning',
- base_provider = 'Perplexity AI',
- best_provider = PerplexityLabs
- )
- sonar_reasoning_pro = Model(
- name = 'sonar-reasoning-pro',
- base_provider = 'Perplexity AI',
- best_provider = PerplexityLabs
- )
- r1_1776 = Model(
- name = 'r1-1776',
- base_provider = 'Perplexity AI',
- best_provider = IterListProvider([Together, PerplexityLabs])
- )
- ### Nvidia ###
- nemotron_70b = Model(
- name = 'nemotron-70b',
- base_provider = 'Nvidia',
- best_provider = IterListProvider([LambdaChat, Together, HuggingChat, HuggingFace])
- )
- ### Cognitive Computations ###
- # dolphin-2
- dolphin_2_6 = Model(
- name = "dolphin-2.6",
- base_provider = "Cognitive Computations",
- best_provider = DeepInfraChat
- )
- dolphin_2_9 = Model(
- name = "dolphin-2.9",
- base_provider = "Cognitive Computations",
- best_provider = DeepInfraChat
- )
- ### DeepInfra ###
- airoboros_70b = Model(
- name = "airoboros-70b",
- base_provider = "DeepInfra",
- best_provider = DeepInfraChat
- )
- ### Lizpreciatior ###
- lzlv_70b = Model(
- name = "lzlv-70b",
- base_provider = "Lizpreciatior",
- best_provider = DeepInfraChat
- )
- ### Liquid AI ###
- lfm_40b = Model(
- name = "lfm-40b",
- base_provider = "Liquid AI",
- best_provider = LambdaChat
- )
- ### Uncensored AI ###
- evil = Model(
- name = 'evil',
- base_provider = 'Evil Mode - Experimental',
- best_provider = PollinationsAI
- )
- sdxl_turbo = ImageModel(
- name = 'sdxl-turbo',
- base_provider = 'Stability AI',
- best_provider = IterListProvider([HuggingFaceMedia, PollinationsImage, ImageLabs])
- )
- sd_3_5_large = ImageModel(
- name = 'sd-3.5-large',
- base_provider = 'Stability AI',
- best_provider = IterListProvider([HuggingFaceMedia, HuggingSpace])
- )
- ### Black Forest Labs ###
- flux = ImageModel(
- name = 'flux',
- base_provider = 'Black Forest Labs',
- best_provider = IterListProvider([HuggingFaceMedia, PollinationsImage, Websim, Together, HuggingSpace])
- )
- flux_pro = ImageModel(
- name = 'flux-pro',
- base_provider = 'Black Forest Labs',
- best_provider = IterListProvider([PollinationsImage, Together])
- )
- flux_dev = ImageModel(
- name = 'flux-dev',
- base_provider = 'Black Forest Labs',
- best_provider = IterListProvider([PollinationsImage, HuggingSpace, Together, HuggingChat, HuggingFace])
- )
- flux_schnell = ImageModel(
- name = 'flux-schnell',
- base_provider = 'Black Forest Labs',
- best_provider = IterListProvider([PollinationsImage, Together, HuggingChat, HuggingFace])
- )
- flux_redux = ImageModel(
- name = 'flux-redux',
- base_provider = 'Black Forest Labs',
- best_provider = Together
- )
- flux_depth = ImageModel(
- name = 'flux-depth',
- base_provider = 'Black Forest Labs',
- best_provider = Together
- )
- flux_canny = ImageModel(
- name = 'flux-canny',
- base_provider = 'Black Forest Labs',
- best_provider = Together
- )
- flux_kontext_max = ImageModel(
- name = 'flux-kontext-max',
- base_provider = 'Black Forest Labs',
- best_provider = Together
- )
- flux_dev_lora = ImageModel(
- name = 'flux-dev-lora',
- base_provider = 'Black Forest Labs',
- best_provider = Together
- )
- flux_kontext_pro = ImageModel(
- name = 'flux-kontext-pro',
- base_provider = 'Black Forest Labs',
- best_provider = Together
- )
- class ModelUtils:
- """
- Utility class for mapping string identifiers to Model instances.
- Now uses automatic discovery instead of manual mapping.
- """
-
- convert: Dict[str, Model] = {}
-
- @classmethod
- def refresh(cls):
- """Refresh the model registry and update convert"""
- cls.convert = ModelRegistry.all_models()
-
- @classmethod
- def get_model(cls, name: str) -> Optional[Model]:
- """Get model by name or alias"""
- return ModelRegistry.get(name)
-
- @classmethod
- def register_alias(cls, alias: str, model_name: str):
- """Register an alias for a model"""
- ModelRegistry._aliases[alias] = model_name
- # Register special aliases after all models are created
- ModelRegistry._aliases["gemini"] = "gemini-2.0"
- # Fill the convert dictionary
- ModelUtils.convert = ModelRegistry.all_models()
- # Demo models configuration
- demo_models = {
- llama_3_2_11b.name: [llama_3_2_11b, [HuggingChat]],
- qwen_2_vl_7b.name: [qwen_2_vl_7b, [HuggingFaceAPI]],
- deepseek_r1.name: [deepseek_r1, [HuggingFace, PollinationsAI]],
- janus_pro_7b.name: [janus_pro_7b, [HuggingSpace]],
- command_r.name: [command_r, [HuggingSpace]],
- command_r_plus.name: [command_r_plus, [HuggingSpace]],
- command_r7b.name: [command_r7b, [HuggingSpace]],
- qwen_2_5_coder_32b.name: [qwen_2_5_coder_32b, [HuggingFace]],
- qwq_32b.name: [qwq_32b, [HuggingFace]],
- llama_3_3_70b.name: [llama_3_3_70b, [HuggingFace]],
- sd_3_5_large.name: [sd_3_5_large, [HuggingSpace, HuggingFace]],
- flux_dev.name: [flux_dev, [PollinationsImage, HuggingFace, HuggingSpace]],
- flux_schnell.name: [flux_schnell, [PollinationsImage, HuggingFace, HuggingSpace]],
- }
- # Create a list of all models and their providers
- def _get_working_providers(model: Model) -> List:
- """Get list of working providers for a model"""
- if model.best_provider is None:
- return []
-
- if isinstance(model.best_provider, IterListProvider):
- return [p for p in model.best_provider.providers if p.working]
-
- return [model.best_provider] if model.best_provider.working else []
- # Generate __models__ using the auto-discovered models
- __models__ = {
- name: (model, _get_working_providers(model))
- for name, model in ModelRegistry.all_models().items()
- if name and _get_working_providers(model)
- }
- # Generate _all_models list
- _all_models = list(__models__.keys())
- # Backward compatibility - ensure Model.__all__() returns the correct list
- Model.__all__ = staticmethod(lambda: _all_models)
|