mirror of
https://github.com/HKUDS/nanobot.git
synced 2026-05-03 16:25:53 +00:00
feat(provider): add Xiaomi MiMo LLM support
Register xiaomi_mimo as an OpenAI-compatible provider with its API base URL, add xiaomi_mimo to the provider config schema, and document it in README. Signed-off-by: Lingao Meng <menglingao@xiaomi.com>
This commit is contained in:
parent
7113ad34f4
commit
cf6c979339
@ -875,6 +875,7 @@ Config file: `~/.nanobot/config.json`
|
|||||||
| `dashscope` | LLM (Qwen) | [dashscope.console.aliyun.com](https://dashscope.console.aliyun.com) |
|
| `dashscope` | LLM (Qwen) | [dashscope.console.aliyun.com](https://dashscope.console.aliyun.com) |
|
||||||
| `moonshot` | LLM (Moonshot/Kimi) | [platform.moonshot.cn](https://platform.moonshot.cn) |
|
| `moonshot` | LLM (Moonshot/Kimi) | [platform.moonshot.cn](https://platform.moonshot.cn) |
|
||||||
| `zhipu` | LLM (Zhipu GLM) | [open.bigmodel.cn](https://open.bigmodel.cn) |
|
| `zhipu` | LLM (Zhipu GLM) | [open.bigmodel.cn](https://open.bigmodel.cn) |
|
||||||
|
| `mimo` | LLM (MiMo) | [platform.xiaomimimo.com](https://platform.xiaomimimo.com) |
|
||||||
| `ollama` | LLM (local, Ollama) | — |
|
| `ollama` | LLM (local, Ollama) | — |
|
||||||
| `mistral` | LLM | [docs.mistral.ai](https://docs.mistral.ai/) |
|
| `mistral` | LLM | [docs.mistral.ai](https://docs.mistral.ai/) |
|
||||||
| `stepfun` | LLM (Step Fun/阶跃星辰) | [platform.stepfun.com](https://platform.stepfun.com) |
|
| `stepfun` | LLM (Step Fun/阶跃星辰) | [platform.stepfun.com](https://platform.stepfun.com) |
|
||||||
|
|||||||
@ -81,6 +81,7 @@ class ProvidersConfig(Base):
|
|||||||
minimax: ProviderConfig = Field(default_factory=ProviderConfig)
|
minimax: ProviderConfig = Field(default_factory=ProviderConfig)
|
||||||
mistral: ProviderConfig = Field(default_factory=ProviderConfig)
|
mistral: ProviderConfig = Field(default_factory=ProviderConfig)
|
||||||
stepfun: ProviderConfig = Field(default_factory=ProviderConfig) # Step Fun (阶跃星辰)
|
stepfun: ProviderConfig = Field(default_factory=ProviderConfig) # Step Fun (阶跃星辰)
|
||||||
|
xiaomi_mimo: ProviderConfig = Field(default_factory=ProviderConfig) # Xiaomi MIMO (小米)
|
||||||
aihubmix: ProviderConfig = Field(default_factory=ProviderConfig) # AiHubMix API gateway
|
aihubmix: ProviderConfig = Field(default_factory=ProviderConfig) # AiHubMix API gateway
|
||||||
siliconflow: ProviderConfig = Field(default_factory=ProviderConfig) # SiliconFlow (硅基流动)
|
siliconflow: ProviderConfig = Field(default_factory=ProviderConfig) # SiliconFlow (硅基流动)
|
||||||
volcengine: ProviderConfig = Field(default_factory=ProviderConfig) # VolcEngine (火山引擎)
|
volcengine: ProviderConfig = Field(default_factory=ProviderConfig) # VolcEngine (火山引擎)
|
||||||
|
|||||||
@ -49,7 +49,7 @@ class LLMResponse:
|
|||||||
tool_calls: list[ToolCallRequest] = field(default_factory=list)
|
tool_calls: list[ToolCallRequest] = field(default_factory=list)
|
||||||
finish_reason: str = "stop"
|
finish_reason: str = "stop"
|
||||||
usage: dict[str, int] = field(default_factory=dict)
|
usage: dict[str, int] = field(default_factory=dict)
|
||||||
reasoning_content: str | None = None # Kimi, DeepSeek-R1 etc.
|
reasoning_content: str | None = None # Kimi, DeepSeek-R1, MiMo etc.
|
||||||
thinking_blocks: list[dict] | None = None # Anthropic extended thinking
|
thinking_blocks: list[dict] | None = None # Anthropic extended thinking
|
||||||
|
|
||||||
@property
|
@property
|
||||||
|
|||||||
@ -297,6 +297,15 @@ PROVIDERS: tuple[ProviderSpec, ...] = (
|
|||||||
backend="openai_compat",
|
backend="openai_compat",
|
||||||
default_api_base="https://api.stepfun.com/v1",
|
default_api_base="https://api.stepfun.com/v1",
|
||||||
),
|
),
|
||||||
|
# Xiaomi MIMO (小米): OpenAI-compatible API
|
||||||
|
ProviderSpec(
|
||||||
|
name="xiaomi_mimo",
|
||||||
|
keywords=("xiaomi_mimo", "mimo"),
|
||||||
|
env_key="XIAOMIMIMO_API_KEY",
|
||||||
|
display_name="Xiaomi MIMO",
|
||||||
|
backend="openai_compat",
|
||||||
|
default_api_base="https://api.xiaomimimo.com/v1",
|
||||||
|
),
|
||||||
# === Local deployment (matched by config key, NOT by api_base) =========
|
# === Local deployment (matched by config key, NOT by api_base) =========
|
||||||
# vLLM / any OpenAI-compatible local server
|
# vLLM / any OpenAI-compatible local server
|
||||||
ProviderSpec(
|
ProviderSpec(
|
||||||
|
|||||||
Loading…
x
Reference in New Issue
Block a user