HuggingFaceAPI.py 832 B

1234567891011121314151617181920212223242526272829
  1. from __future__ import annotations
  2. from .OpenaiAPI import OpenaiAPI
  3. from .HuggingChat import HuggingChat
  4. from ...typing import AsyncResult, Messages
  5. class HuggingFaceAPI(OpenaiAPI):
  6. label = "HuggingFace (Inference API)"
  7. url = "https://api-inference.huggingface.co"
  8. working = True
  9. default_model = "meta-llama/Llama-3.2-11B-Vision-Instruct"
  10. default_vision_model = default_model
  11. models = [
  12. *HuggingChat.models
  13. ]
  14. @classmethod
  15. def create_async_generator(
  16. cls,
  17. model: str,
  18. messages: Messages,
  19. api_base: str = "https://api-inference.huggingface.co/v1",
  20. max_tokens: int = 500,
  21. **kwargs
  22. ) -> AsyncResult:
  23. return super().create_async_generator(
  24. model, messages, api_base=api_base, max_tokens=max_tokens, **kwargs
  25. )