feat: add MiniMax provider support via LiteLLM

This commit is contained in:
Vincent Wu
2026-02-08 03:55:24 +08:00
parent 625fc60282
commit 3c8eadffed
4 changed files with 12 additions and 3 deletions

View File

@@ -352,6 +352,7 @@ Config file: `~/.nanobot/config.json`
| `deepseek` | LLM (DeepSeek direct) | [platform.deepseek.com](https://platform.deepseek.com) |
| `groq` | LLM + **Voice transcription** (Whisper) | [console.groq.com](https://console.groq.com) |
| `gemini` | LLM (Gemini direct) | [aistudio.google.com](https://aistudio.google.com) |
| `minimax` | LLM (MiniMax direct) | [platform.minimax.io](https://platform.minimax.io) |
| `aihubmix` | LLM (API gateway, access to all models) | [aihubmix.com](https://aihubmix.com) |
| `dashscope` | LLM (Qwen) | [dashscope.console.aliyun.com](https://dashscope.console.aliyun.com) |

View File

@@ -640,6 +640,7 @@ def status():
has_openai = bool(config.providers.openai.api_key)
has_gemini = bool(config.providers.gemini.api_key)
has_zhipu = bool(config.providers.zhipu.api_key)
has_minimax = bool(config.providers.minimax.api_key)
has_vllm = bool(config.providers.vllm.api_base)
has_aihubmix = bool(config.providers.aihubmix.api_key)
@@ -648,6 +649,7 @@ def status():
console.print(f"OpenAI API: {'[green]✓[/green]' if has_openai else '[dim]not set[/dim]'}")
console.print(f"Gemini API: {'[green]✓[/green]' if has_gemini else '[dim]not set[/dim]'}")
console.print(f"Zhipu AI API: {'[green]✓[/green]' if has_zhipu else '[dim]not set[/dim]'}")
console.print(f"MiniMax API: {'[green]✓[/green]' if has_minimax else '[dim]not set[/dim]'}")
console.print(f"AiHubMix API: {'[green]✓[/green]' if has_aihubmix else '[dim]not set[/dim]'}")
vllm_status = f"[green]✓ {config.providers.vllm.api_base}[/green]" if has_vllm else "[dim]not set[/dim]"
console.print(f"vLLM/Local: {vllm_status}")

View File

@@ -80,6 +80,7 @@ class ProvidersConfig(BaseModel):
vllm: ProviderConfig = Field(default_factory=ProviderConfig)
gemini: ProviderConfig = Field(default_factory=ProviderConfig)
moonshot: ProviderConfig = Field(default_factory=ProviderConfig)
minimax: ProviderConfig = Field(default_factory=ProviderConfig)
aihubmix: ProviderConfig = Field(default_factory=ProviderConfig) # AiHubMix API gateway
@@ -139,14 +140,15 @@ class Config(BaseSettings):
"openai": p.openai, "gpt": p.openai, "gemini": p.gemini,
"zhipu": p.zhipu, "glm": p.zhipu, "zai": p.zhipu,
"dashscope": p.dashscope, "qwen": p.dashscope,
"groq": p.groq, "moonshot": p.moonshot, "kimi": p.moonshot, "vllm": p.vllm,
"groq": p.groq, "moonshot": p.moonshot, "kimi": p.moonshot,
"minimax": p.minimax, "vllm": p.vllm,
}
for kw, provider in keyword_map.items():
if kw in model and provider.api_key:
return provider
# Fallback: gateways first (can serve any model), then specific providers
all_providers = [p.openrouter, p.aihubmix, p.anthropic, p.openai, p.deepseek,
p.gemini, p.zhipu, p.dashscope, p.moonshot, p.vllm, p.groq]
p.gemini, p.zhipu, p.dashscope, p.moonshot, p.minimax, p.vllm, p.groq]
return next((pr for pr in all_providers if pr.api_key), None)
def get_api_key(self, model: str | None = None) -> str | None:

View File

@@ -13,7 +13,7 @@ class LiteLLMProvider(LLMProvider):
"""
LLM provider using LiteLLM for multi-provider support.
Supports OpenRouter, Anthropic, OpenAI, Gemini, and many other providers through
Supports OpenRouter, Anthropic, OpenAI, Gemini, MiniMax, and many other providers through
a unified interface.
"""
@@ -69,6 +69,9 @@ class LiteLLMProvider(LLMProvider):
elif "moonshot" in default_model or "kimi" in default_model:
os.environ.setdefault("MOONSHOT_API_KEY", api_key)
os.environ.setdefault("MOONSHOT_API_BASE", api_base or "https://api.moonshot.cn/v1")
elif "minimax" in default_model.lower():
os.environ.setdefault("MINIMAX_API_KEY", api_key)
os.environ.setdefault("MINIMAX_API_BASE", api_base or "https://api.minimax.io/v1")
if api_base:
litellm.api_base = api_base
@@ -105,6 +108,7 @@ class LiteLLMProvider(LLMProvider):
(("glm", "zhipu"), "zai", ("zhipu/", "zai/", "openrouter/", "hosted_vllm/")),
(("qwen", "dashscope"), "dashscope", ("dashscope/", "openrouter/")),
(("moonshot", "kimi"), "moonshot", ("moonshot/", "openrouter/")),
(("minimax",), "minimax", ("minimax/", "openrouter/")),
(("gemini",), "gemini", ("gemini/",)),
]
model_lower = model.lower()