123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821 |
- from __future__ import annotations
- from dataclasses import dataclass
- from .Provider import IterListProvider, ProviderType
- from .Provider import (
- AIChatFree,
- Blackbox,
- Blackbox2,
- BingCreateImages,
- ChatGpt,
- ChatGptEs,
- Cloudflare,
- Copilot,
- CopilotAccount,
- DarkAI,
- DDG,
- DeepInfraChat,
- Flux,
- Free2GPT,
- GigaChat,
- Gemini,
- GeminiPro,
- HuggingChat,
- HuggingFace,
- Liaobots,
- Airforce,
- Mhystical,
- MetaAI,
- MicrosoftDesigner,
- OpenaiChat,
- OpenaiAccount,
- PerplexityLabs,
- Pi,
- Pizzagpt,
- PollinationsAI,
- Reka,
- ReplicateHome,
- RubiksAI,
- TeachAnything,
- )
- @dataclass(unsafe_hash=True)
- class Model:
- """
- Represents a machine learning model configuration.
- Attributes:
- name (str): Name of the model.
- base_provider (str): Default provider for the model.
- best_provider (ProviderType): The preferred provider for the model, typically with retry logic.
- """
- name: str
- base_provider: str
- best_provider: ProviderType = None
- @staticmethod
- def __all__() -> list[str]:
- """Returns a list of all model names."""
- return _all_models
- class ImageModel(Model):
- pass
- ### Default ###
- default = Model(
- name = "",
- base_provider = "",
- best_provider = IterListProvider([
- DDG,
- Pizzagpt,
- ReplicateHome,
- Blackbox2,
- Blackbox,
- Free2GPT,
- DeepInfraChat,
- Airforce,
- ChatGptEs,
- Cloudflare,
- Mhystical,
- PollinationsAI,
- ])
- )
- ############
- ### Text ###
- ############
- ### OpenAI ###
- # gpt-3.5
- gpt_35_turbo = Model(
- name = 'gpt-3.5-turbo',
- base_provider = 'OpenAI',
- best_provider = IterListProvider([DarkAI, ChatGpt])
- )
- # gpt-4
- gpt_4 = Model(
- name = 'gpt-4',
- base_provider = 'OpenAI',
- best_provider = IterListProvider([DDG, Blackbox, ChatGptEs, PollinationsAI, Copilot, OpenaiChat, Liaobots, Airforce])
- )
- gpt_4_turbo = Model(
- name = 'gpt-4-turbo',
- base_provider = 'OpenAI',
- best_provider = Airforce
- )
- # gpt-4o
- gpt_4o = Model(
- name = 'gpt-4o',
- base_provider = 'OpenAI',
- best_provider = IterListProvider([Blackbox, ChatGptEs, PollinationsAI, DarkAI, ChatGpt, Airforce, Liaobots, OpenaiChat])
- )
- gpt_4o_mini = Model(
- name = 'gpt-4o-mini',
- base_provider = 'OpenAI',
- best_provider = IterListProvider([DDG, ChatGptEs, Pizzagpt, ChatGpt, Airforce, RubiksAI, Liaobots, OpenaiChat])
- )
- # o1
- o1_preview = Model(
- name = 'o1-preview',
- base_provider = 'OpenAI',
- best_provider = Liaobots
- )
- o1_mini = Model(
- name = 'o1-mini',
- base_provider = 'OpenAI',
- best_provider = IterListProvider([Liaobots, Airforce])
- )
- ### GigaChat ###
- gigachat = Model(
- name = 'GigaChat:latest',
- base_provider = 'gigachat',
- best_provider = GigaChat
- )
- ### Meta ###
- meta = Model(
- name = "meta-ai",
- base_provider = "Meta",
- best_provider = MetaAI
- )
- # llama 2
- llama_2_7b = Model(
- name = "llama-2-7b",
- base_provider = "Meta Llama",
- best_provider = IterListProvider([Cloudflare, Airforce])
- )
- # llama 3
- llama_3_8b = Model(
- name = "llama-3-8b",
- base_provider = "Meta Llama",
- best_provider = Cloudflare
- )
- # llama 3.1
- llama_3_1_8b = Model(
- name = "llama-3.1-8b",
- base_provider = "Meta Llama",
- best_provider = IterListProvider([Blackbox, DeepInfraChat, Cloudflare, Airforce, PerplexityLabs])
- )
- llama_3_1_70b = Model(
- name = "llama-3.1-70b",
- base_provider = "Meta Llama",
- best_provider = IterListProvider([DDG, DeepInfraChat, Blackbox, Blackbox2, TeachAnything, PollinationsAI, DarkAI, Airforce, RubiksAI, PerplexityLabs])
- )
- llama_3_1_405b = Model(
- name = "llama-3.1-405b",
- base_provider = "Meta Llama",
- best_provider = Blackbox
- )
- # llama 3.2
- llama_3_2_1b = Model(
- name = "llama-3.2-1b",
- base_provider = "Meta Llama",
- best_provider = Cloudflare
- )
- llama_3_2_11b = Model(
- name = "llama-3.2-11b",
- base_provider = "Meta Llama",
- best_provider = IterListProvider([HuggingChat, HuggingFace])
- )
- # llama 3.3
- llama_3_3_70b = Model(
- name = "llama-3.3-70b",
- base_provider = "Meta Llama",
- best_provider = IterListProvider([HuggingChat, HuggingFace, PerplexityLabs])
- )
- ### Mistral ###
- mixtral_8x7b = Model(
- name = "mixtral-8x7b",
- base_provider = "Mistral",
- best_provider = DDG
- )
- mistral_nemo = Model(
- name = "mistral-nemo",
- base_provider = "Mistral",
- best_provider = IterListProvider([PollinationsAI, HuggingChat, HuggingFace])
- )
- mistral_large = Model(
- name = "mistral-large",
- base_provider = "Mistral",
- best_provider = PollinationsAI
- )
- ### NousResearch ###
- hermes_2_dpo = Model(
- name = "hermes-2-dpo",
- base_provider = "NousResearch",
- best_provider = Airforce
- )
- hermes_2_pro = Model(
- name = "hermes-2-pro",
- base_provider = "NousResearch",
- best_provider = Airforce
- )
- hermes_3 = Model(
- name = "hermes-3",
- base_provider = "NousResearch",
- best_provider = IterListProvider([HuggingChat, HuggingFace])
- )
- ### Microsoft ###
- phi_2 = Model(
- name = "phi-2",
- base_provider = "Microsoft",
- best_provider = Airforce
- )
- phi_3_5_mini = Model(
- name = "phi-3.5-mini",
- base_provider = "Microsoft",
- best_provider = IterListProvider([HuggingChat, HuggingFace])
- )
- ### Google DeepMind ###
- # gemini
- gemini_pro = Model(
- name = 'gemini-pro',
- base_provider = 'Google DeepMind',
- best_provider = IterListProvider([Blackbox, AIChatFree, Gemini, GeminiPro, Liaobots])
- )
- gemini_flash = Model(
- name = 'gemini-flash',
- base_provider = 'Google DeepMind',
- best_provider = IterListProvider([Blackbox, Gemini, GeminiPro, Liaobots])
- )
- gemini = Model(
- name = 'gemini',
- base_provider = 'Google DeepMind',
- best_provider = Gemini
- )
- # gemma
- gemma_2b = Model(
- name = 'gemma-2b',
- base_provider = 'Google',
- best_provider = ReplicateHome
- )
- ### Anthropic ###
- # claude 3
- claude_3_opus = Model(
- name = 'claude-3-opus',
- base_provider = 'Anthropic',
- best_provider = Liaobots
- )
- claude_3_sonnet = Model(
- name = 'claude-3-sonnet',
- base_provider = 'Anthropic',
- best_provider = Liaobots
- )
- claude_3_haiku = Model(
- name = 'claude-3-haiku',
- base_provider = 'Anthropic',
- best_provider = DDG
- )
- # claude 3.5
- claude_3_5_sonnet = Model(
- name = 'claude-3.5-sonnet',
- base_provider = 'Anthropic',
- best_provider = IterListProvider([Blackbox, PollinationsAI, Liaobots])
- )
- ### Reka AI ###
- reka_core = Model(
- name = 'reka-core',
- base_provider = 'Reka AI',
- best_provider = Reka
- )
- ### Blackbox AI ###
- blackboxai = Model(
- name = 'blackboxai',
- base_provider = 'Blackbox AI',
- best_provider = Blackbox
- )
- blackboxai_pro = Model(
- name = 'blackboxai-pro',
- base_provider = 'Blackbox AI',
- best_provider = Blackbox
- )
- ### CohereForAI ###
- command_r_plus = Model(
- name = 'command-r-plus',
- base_provider = 'CohereForAI',
- best_provider = HuggingChat
- )
- command_r = Model(
- name = 'command-r',
- base_provider = 'CohereForAI',
- best_provider = PollinationsAI
- )
- ### Qwen ###
- # qwen 1_5
- qwen_1_5_7b = Model(
- name = 'qwen-1.5-7b',
- base_provider = 'Qwen',
- best_provider = Cloudflare
- )
- # qwen 2
- qwen_2_72b = Model(
- name = 'qwen-2-72b',
- base_provider = 'Qwen',
- best_provider = DeepInfraChat
- )
- # qwen 2.5
- qwen_2_5_72b = Model(
- name = 'qwen-2.5-72b',
- base_provider = 'Qwen',
- best_provider = IterListProvider([HuggingChat, HuggingFace])
- )
- qwen_2_5_coder_32b = Model(
- name = 'qwen-2.5-coder-32b',
- base_provider = 'Qwen',
- best_provider = IterListProvider([DeepInfraChat, PollinationsAI, HuggingChat, HuggingFace])
- )
- qwq_32b = Model(
- name = 'qwq-32b',
- base_provider = 'Qwen',
- best_provider = IterListProvider([DeepInfraChat, HuggingChat, HuggingFace])
- )
- ### Inflection ###
- pi = Model(
- name = 'pi',
- base_provider = 'Inflection',
- best_provider = Pi
- )
- ### DeepSeek ###
- deepseek_coder = Model(
- name = 'deepseek-coder',
- base_provider = 'DeepSeek',
- best_provider = Airforce
- )
- ### WizardLM ###
- wizardlm_2_8x22b = Model(
- name = 'wizardlm-2-8x22b',
- base_provider = 'WizardLM',
- best_provider = DeepInfraChat
- )
- ### OpenChat ###
- openchat_3_5 = Model(
- name = 'openchat-3.5',
- base_provider = 'OpenChat',
- best_provider = Airforce
- )
- ### x.ai ###
- grok_beta = Model(
- name = 'grok-beta',
- base_provider = 'x.ai',
- best_provider = Liaobots
- )
- ### Perplexity AI ###
- sonar_online = Model(
- name = 'sonar-online',
- base_provider = 'Perplexity AI',
- best_provider = PerplexityLabs
- )
- sonar_chat = Model(
- name = 'sonar-chat',
- base_provider = 'Perplexity AI',
- best_provider = PerplexityLabs
- )
- ### Nvidia ###
- nemotron_70b = Model(
- name = 'nemotron-70b',
- base_provider = 'Nvidia',
- best_provider = IterListProvider([DeepInfraChat, HuggingChat, HuggingFace])
- )
- ### Teknium ###
- openhermes_2_5 = Model(
- name = 'openhermes-2.5',
- base_provider = 'Teknium',
- best_provider = Airforce
- )
- ### Liquid ###
- lfm_40b = Model(
- name = 'lfm-40b',
- base_provider = 'Liquid',
- best_provider = IterListProvider([Airforce, PerplexityLabs])
- )
- ### DiscoResearch ###
- german_7b = Model(
- name = 'german-7b',
- base_provider = 'DiscoResearch',
- best_provider = Airforce
- )
- ### HuggingFaceH4 ###
- zephyr_7b = Model(
- name = 'zephyr-7b',
- base_provider = 'HuggingFaceH4',
- best_provider = Airforce
- )
- ### Inferless ###
- neural_7b = Model(
- name = 'neural-7b',
- base_provider = 'Inferless',
- best_provider = Airforce
- )
- ### PollinationsAI ###
- p1 = Model(
- name = 'p1',
- base_provider = 'PollinationsAI',
- best_provider = PollinationsAI
- )
- ### Uncensored AI ###
- evil = Model(
- name = 'evil',
- base_provider = 'Evil Mode - Experimental',
- best_provider = IterListProvider([PollinationsAI, Airforce])
- )
- ### Other ###
- midijourney = Model(
- name = 'midijourney',
- base_provider = 'Other',
- best_provider = PollinationsAI
- )
- turbo = Model(
- name = 'turbo',
- base_provider = 'Other',
- best_provider = PollinationsAI
- )
- unity = Model(
- name = 'unity',
- base_provider = 'Other',
- best_provider = PollinationsAI
- )
- rtist = Model(
- name = 'rtist',
- base_provider = 'Other',
- best_provider = PollinationsAI
- )
- #############
- ### Image ###
- #############
- ### Stability AI ###
- sdxl = ImageModel(
- name = 'sdxl',
- base_provider = 'Stability AI',
- best_provider = IterListProvider([ReplicateHome, Airforce])
-
- )
- sd_3 = ImageModel(
- name = 'sd-3',
- base_provider = 'Stability AI',
- best_provider = ReplicateHome
- )
- ### Playground ###
- playground_v2_5 = ImageModel(
- name = 'playground-v2.5',
- base_provider = 'Playground AI',
- best_provider = ReplicateHome
- )
- ### Flux AI ###
- flux = ImageModel(
- name = 'flux',
- base_provider = 'Flux AI',
- best_provider = IterListProvider([Blackbox, Blackbox2, PollinationsAI, Airforce])
- )
- flux_pro = ImageModel(
- name = 'flux-pro',
- base_provider = 'Flux AI',
- best_provider = IterListProvider([PollinationsAI, Airforce])
- )
- flux_dev = ImageModel(
- name = 'flux-dev',
- base_provider = 'Flux AI',
- best_provider = IterListProvider([Flux, HuggingChat, HuggingFace])
- )
- flux_realism = ImageModel(
- name = 'flux-realism',
- base_provider = 'Flux AI',
- best_provider = IterListProvider([PollinationsAI, Airforce])
- )
- flux_cablyai = ImageModel(
- name = 'flux-cablyai',
- base_provider = 'Flux AI',
- best_provider = PollinationsAI
- )
- flux_anime = ImageModel(
- name = 'flux-anime',
- base_provider = 'Flux AI',
- best_provider = IterListProvider([PollinationsAI, Airforce])
- )
- flux_3d = ImageModel(
- name = 'flux-3d',
- base_provider = 'Flux AI',
- best_provider = IterListProvider([PollinationsAI, Airforce])
- )
- flux_disney = ImageModel(
- name = 'flux-disney',
- base_provider = 'Flux AI',
- best_provider = Airforce
- )
- flux_pixel = ImageModel(
- name = 'flux-pixel',
- base_provider = 'Flux AI',
- best_provider = Airforce
- )
- flux_4o = ImageModel(
- name = 'flux-4o',
- base_provider = 'Flux AI',
- best_provider = Airforce
- )
- ### OpenAI ###
- dall_e_3 = ImageModel(
- name = 'dall-e-3',
- base_provider = 'OpenAI',
- best_provider = IterListProvider([Airforce, PollinationsAI, CopilotAccount, OpenaiAccount, MicrosoftDesigner, BingCreateImages])
- )
- ### Midjourney ###
- midjourney = ImageModel(
- name = 'midjourney',
- base_provider = 'Midjourney',
- best_provider = IterListProvider([PollinationsAI, Airforce])
- )
- ### Other ###
- any_dark = ImageModel(
- name = 'any-dark',
- base_provider = 'Other',
- best_provider = IterListProvider([PollinationsAI, Airforce])
- )
- class ModelUtils:
- """
- Utility class for mapping string identifiers to Model instances.
- Attributes:
- convert (dict[str, Model]): Dictionary mapping model string identifiers to Model instances.
- """
- convert: dict[str, Model] = {
- ############
- ### Text ###
- ############
- ### OpenAI ###
- # gpt-3
- 'gpt-3': gpt_35_turbo,
- # gpt-3.5
- 'gpt-3.5-turbo': gpt_35_turbo,
- # gpt-4
- 'gpt-4': gpt_4,
- 'gpt-4-turbo': gpt_4_turbo,
-
- # gpt-4o
- 'gpt-4o': gpt_4o,
- 'gpt-4o-mini': gpt_4o_mini,
-
- # o1
- 'o1-preview': o1_preview,
- 'o1-mini': o1_mini,
- ### Meta ###
- "meta-ai": meta,
- # llama-2
- 'llama-2-7b': llama_2_7b,
- # llama-3
- 'llama-3-8b': llama_3_8b,
-
- # llama-3.1
- 'llama-3.1-8b': llama_3_1_8b,
- 'llama-3.1-70b': llama_3_1_70b,
- 'llama-3.1-405b': llama_3_1_405b,
- # llama-3.2
- 'llama-3.2-1b': llama_3_2_1b,
- 'llama-3.2-11b': llama_3_2_11b,
-
- # llama-3.3
- 'llama-3.3-70b': llama_3_3_70b,
-
- ### Mistral ###
- 'mixtral-8x7b': mixtral_8x7b,
- 'mistral-nemo': mistral_nemo,
- 'mistral-large': mistral_large,
- ### NousResearch ###
- 'hermes-2-dpo': hermes_2_dpo,
- 'hermes-2-pro': hermes_2_pro,
- 'hermes-3': hermes_3,
-
- ### Microsoft ###
- 'phi-2': phi_2,
- 'phi-3.5-mini': phi_3_5_mini,
- ### Google ###
- # gemini
- 'gemini': gemini,
- 'gemini-pro': gemini_pro,
- 'gemini-flash': gemini_flash,
- # gemma
- 'gemma-2b': gemma_2b,
- ### Anthropic ###
- # claude 3
- 'claude-3-opus': claude_3_opus,
- 'claude-3-sonnet': claude_3_sonnet,
- 'claude-3-haiku': claude_3_haiku,
- # claude 3.5
- 'claude-3.5-sonnet': claude_3_5_sonnet,
- ### Reka AI ###
- 'reka-core': reka_core,
- ### Blackbox AI ###
- 'blackboxai': blackboxai,
- 'blackboxai-pro': blackboxai_pro,
- ### CohereForAI ###
- 'command-r+': command_r_plus,
- 'command-r': command_r,
- ### GigaChat ###
- 'gigachat': gigachat,
- ### Qwen ###
- # qwen 1_5
- 'qwen-1.5-7b': qwen_1_5_7b,
-
- # qwen 2
- 'qwen-2-72b': qwen_2_72b,
-
- # qwen 2.5
- 'qwen-2.5-72b': qwen_2_5_72b,
- 'qwen-2.5-coder-32b': qwen_2_5_coder_32b,
- 'qwq-32b': qwq_32b,
- ### Inflection ###
- 'pi': pi,
- ### WizardLM ###
- 'wizardlm-2-8x22b': wizardlm_2_8x22b,
- ### OpenChat ###
- 'openchat-3.5': openchat_3_5,
- ### x.ai ###
- 'grok-beta': grok_beta,
- ### Perplexity AI ###
- 'sonar-online': sonar_online,
- 'sonar-chat': sonar_chat,
-
- ### DeepSeek ###
- 'deepseek-coder': deepseek_coder,
- ### TheBloke ###
- 'german-7b': german_7b,
- ### Nvidia ###
- 'nemotron-70b': nemotron_70b,
-
- ### Teknium ###
- 'openhermes-2.5': openhermes_2_5,
-
- ### Liquid ###
- 'lfm-40b': lfm_40b,
-
- ### HuggingFaceH4 ###
- 'zephyr-7b': zephyr_7b,
-
- ### Inferless ###
- 'neural-7b': neural_7b,
-
- ### PollinationsAI ###
- 'p1': p1,
-
- ### Uncensored AI ###
- 'evil': evil,
-
- ### Other ###
- 'midijourney': midijourney,
- 'turbo': turbo,
- 'unity': unity,
- 'rtist': rtist,
-
- #############
- ### Image ###
- #############
- ### Stability AI ###
- 'sdxl': sdxl,
- 'sd-3': sd_3,
- ### Playground ###
- 'playground-v2.5': playground_v2_5,
- ### Flux AI ###
- 'flux': flux,
- 'flux-pro': flux_pro,
- 'flux-dev': flux_dev,
- 'flux-realism': flux_realism,
- 'flux-cablyai': flux_cablyai,
- 'flux-anime': flux_anime,
- 'flux-3d': flux_3d,
- 'flux-disney': flux_disney,
- 'flux-pixel': flux_pixel,
- 'flux-4o': flux_4o,
- ### OpenAI ###
- 'dall-e-3': dall_e_3,
-
- ### Midjourney ###
- 'midjourney': midjourney,
-
- ### Other ###
- 'any-dark': any_dark,
- }
- # Create a list of all working models
- __models__ = {model.name: (model, providers) for model, providers in [
- (model, [provider for provider in providers if provider.working])
- for model, providers in [
- (model, model.best_provider.providers
- if isinstance(model.best_provider, IterListProvider)
- else [model.best_provider]
- if model.best_provider is not None
- else [])
- for model in ModelUtils.convert.values()]
- ] if providers}
- # Update the ModelUtils.convert with the working models
- ModelUtils.convert = {model.name: model for model, _ in __models__.values()}
- _all_models = list(ModelUtils.convert.keys())
|