feat:Add 3 new LLM providers and optimize the readme.

This commit is contained in:
2026-01-30 16:47:19 +08:00
parent f610c0af8b
commit 2a57946421
13 changed files with 1483 additions and 377 deletions

View File

@@ -89,6 +89,18 @@ pub struct LlmConfig {
#[serde(default)]
pub anthropic: AnthropicConfig,
/// Kimi (Moonshot AI) configuration
#[serde(default)]
pub kimi: KimiConfig,
/// DeepSeek configuration
#[serde(default)]
pub deepseek: DeepSeekConfig,
/// OpenRouter configuration
#[serde(default)]
pub openrouter: OpenRouterConfig,
/// Custom API configuration
#[serde(default)]
pub custom: Option<CustomLlmConfig>,
@@ -113,6 +125,9 @@ impl Default for LlmConfig {
openai: OpenAiConfig::default(),
ollama: OllamaConfig::default(),
anthropic: AnthropicConfig::default(),
kimi: KimiConfig::default(),
deepseek: DeepSeekConfig::default(),
openrouter: OpenRouterConfig::default(),
custom: None,
max_tokens: default_max_tokens(),
temperature: default_temperature(),
@@ -187,6 +202,81 @@ impl Default for AnthropicConfig {
}
}
/// Kimi (Moonshot AI) configuration
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct KimiConfig {
/// API key
pub api_key: Option<String>,
/// Model to use
#[serde(default = "default_kimi_model")]
pub model: String,
/// API base URL (for custom endpoints)
#[serde(default = "default_kimi_base_url")]
pub base_url: String,
}
impl Default for KimiConfig {
fn default() -> Self {
Self {
api_key: None,
model: default_kimi_model(),
base_url: default_kimi_base_url(),
}
}
}
/// DeepSeek configuration
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct DeepSeekConfig {
/// API key
pub api_key: Option<String>,
/// Model to use
#[serde(default = "default_deepseek_model")]
pub model: String,
/// API base URL (for custom endpoints)
#[serde(default = "default_deepseek_base_url")]
pub base_url: String,
}
impl Default for DeepSeekConfig {
fn default() -> Self {
Self {
api_key: None,
model: default_deepseek_model(),
base_url: default_deepseek_base_url(),
}
}
}
/// OpenRouter configuration
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct OpenRouterConfig {
/// API key
pub api_key: Option<String>,
/// Model to use
#[serde(default = "default_openrouter_model")]
pub model: String,
/// API base URL (for custom endpoints)
#[serde(default = "default_openrouter_base_url")]
pub base_url: String,
}
impl Default for OpenRouterConfig {
fn default() -> Self {
Self {
api_key: None,
model: default_openrouter_model(),
base_url: default_openrouter_base_url(),
}
}
}
/// Custom LLM API configuration
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct CustomLlmConfig {
@@ -423,7 +513,7 @@ fn default_max_tokens() -> u32 {
}
fn default_temperature() -> f32 {
0.7
0.6
}
fn default_timeout() -> u64 {
@@ -450,6 +540,30 @@ fn default_anthropic_model() -> String {
"claude-3-sonnet-20240229".to_string()
}
fn default_kimi_model() -> String {
"moonshot-v1-8k".to_string()
}
fn default_kimi_base_url() -> String {
"https://api.moonshot.cn/v1".to_string()
}
fn default_deepseek_model() -> String {
"deepseek-chat".to_string()
}
fn default_deepseek_base_url() -> String {
"https://api.deepseek.com/v1".to_string()
}
fn default_openrouter_model() -> String {
"openai/gpt-3.5-turbo".to_string()
}
fn default_openrouter_base_url() -> String {
"https://openrouter.ai/api/v1".to_string()
}
fn default_commit_format() -> CommitFormat {
CommitFormat::Conventional
}