diff --git a/docs/configuration.md b/docs/configuration.md index 153cbc959..6bb70ea99 100644 --- a/docs/configuration.md +++ b/docs/configuration.md @@ -58,6 +58,7 @@ IMAP_PASSWORD=your-password-here |----------|---------|-------------| | `custom` | Any OpenAI-compatible endpoint | — | | `openrouter` | LLM (recommended, access to all models) | [openrouter.ai](https://openrouter.ai) | +| `huggingface` | LLM (Hugging Face Inference Providers) | [huggingface.co/settings/tokens](https://huggingface.co/settings/tokens) | | `volcengine` | LLM (VolcEngine, pay-per-use) | [Coding Plan](https://www.volcengine.com/activity/codingplan?utm_campaign=nanobot&utm_content=nanobot&utm_medium=devrel&utm_source=OWO&utm_term=nanobot) · [volcengine.com](https://www.volcengine.com) | | `byteplus` | LLM (VolcEngine international, pay-per-use) | [Coding Plan](https://www.byteplus.com/en/activity/codingplan?utm_campaign=nanobot&utm_content=nanobot&utm_medium=devrel&utm_source=OWO&utm_term=nanobot) · [byteplus.com](https://www.byteplus.com) | | `anthropic` | LLM (Claude direct) | [console.anthropic.com](https://console.anthropic.com) | diff --git a/nanobot/config/schema.py b/nanobot/config/schema.py index f507b3d80..484023c62 100644 --- a/nanobot/config/schema.py +++ b/nanobot/config/schema.py @@ -126,6 +126,7 @@ class ProvidersConfig(Base): anthropic: ProviderConfig = Field(default_factory=ProviderConfig) openai: ProviderConfig = Field(default_factory=ProviderConfig) openrouter: ProviderConfig = Field(default_factory=ProviderConfig) + huggingface: ProviderConfig = Field(default_factory=ProviderConfig) deepseek: ProviderConfig = Field(default_factory=ProviderConfig) groq: ProviderConfig = Field(default_factory=ProviderConfig) zhipu: ProviderConfig = Field(default_factory=ProviderConfig) diff --git a/nanobot/providers/registry.py b/nanobot/providers/registry.py index 6cb57cb04..807742077 100644 --- a/nanobot/providers/registry.py +++ b/nanobot/providers/registry.py @@ -120,6 +120,18 @@ PROVIDERS: tuple[ProviderSpec, ...] = ( default_api_base="https://openrouter.ai/api/v1", supports_prompt_caching=True, ), + # Hugging Face Inference Providers: OpenAI-compatible router for chat models. + ProviderSpec( + name="huggingface", + keywords=("huggingface", "hugging-face"), + env_key="HF_TOKEN", + display_name="Hugging Face", + backend="openai_compat", + is_gateway=True, + detect_by_key_prefix="hf_", + detect_by_base_keyword="huggingface", + default_api_base="https://router.huggingface.co/v1", + ), # AiHubMix: global gateway, OpenAI-compatible interface. # strip_model_prefix=True: doesn't understand "anthropic/claude-3", # strips to bare "claude-3".