feat:Add 3 new LLM providers and optimize the readme.

This commit is contained in:
2026-01-30 16:47:19 +08:00
parent f610c0af8b
commit 2a57946421
13 changed files with 1483 additions and 377 deletions

View File

@@ -6,6 +6,20 @@ use dialoguer::{Confirm, Input, Select};
use crate::config::manager::ConfigManager;
use crate::config::{CommitFormat, LlmConfig};
/// Mask API key with asterisks for security
fn mask_api_key(key: Option<&str>) -> String {
match key {
Some(k) => {
if k.len() <= 8 {
"*".repeat(k.len())
} else {
format!("{}***{}", &k[..4], &k[k.len()-4..])
}
}
None => "✗ not set".red().to_string(),
}
}
/// Manage configuration settings
#[derive(Parser)]
pub struct ConfigCommand {
@@ -18,6 +32,9 @@ enum ConfigSubcommand {
/// Show current configuration
Show,
/// List all configuration information (with masked API keys)
List,
/// Edit configuration file
Edit,
@@ -54,6 +71,24 @@ enum ConfigSubcommand {
key: String,
},
/// Set Kimi API key
SetKimiKey {
/// API key
key: String,
},
/// Set DeepSeek API key
SetDeepSeekKey {
/// API key
key: String,
},
/// Set OpenRouter API key
SetOpenRouterKey {
/// API key
key: String,
},
/// Configure Ollama settings
SetOllama {
/// Ollama server URL
@@ -64,6 +99,36 @@ enum ConfigSubcommand {
model: Option<String>,
},
/// Configure Kimi settings
SetKimi {
/// API base URL (for custom endpoints)
#[arg(short, long)]
base_url: Option<String>,
/// Model name
#[arg(short, long)]
model: Option<String>,
},
/// Configure DeepSeek settings
SetDeepSeek {
/// API base URL (for custom endpoints)
#[arg(short, long)]
base_url: Option<String>,
/// Model name
#[arg(short, long)]
model: Option<String>,
},
/// Configure OpenRouter settings
SetOpenRouter {
/// API base URL (for custom endpoints)
#[arg(short, long)]
base_url: Option<String>,
/// Model name
#[arg(short, long)]
model: Option<String>,
},
/// Set commit format
SetCommitFormat {
/// Format (conventional, commitlint)
@@ -114,13 +179,20 @@ impl ConfigCommand {
pub async fn execute(&self) -> Result<()> {
match &self.command {
Some(ConfigSubcommand::Show) => self.show_config().await,
Some(ConfigSubcommand::List) => self.list_config().await,
Some(ConfigSubcommand::Edit) => self.edit_config().await,
Some(ConfigSubcommand::Set { key, value }) => self.set_value(key, value).await,
Some(ConfigSubcommand::Get { key }) => self.get_value(key).await,
Some(ConfigSubcommand::SetLlm { provider }) => self.set_llm(provider.as_deref()).await,
Some(ConfigSubcommand::SetOpenAiKey { key }) => self.set_openai_key(key).await,
Some(ConfigSubcommand::SetAnthropicKey { key }) => self.set_anthropic_key(key).await,
Some(ConfigSubcommand::SetKimiKey { key }) => self.set_kimi_key(key).await,
Some(ConfigSubcommand::SetDeepSeekKey { key }) => self.set_deepseek_key(key).await,
Some(ConfigSubcommand::SetOpenRouterKey { key }) => self.set_openrouter_key(key).await,
Some(ConfigSubcommand::SetOllama { url, model }) => self.set_ollama(url.as_deref(), model.as_deref()).await,
Some(ConfigSubcommand::SetKimi { base_url, model }) => self.set_kimi(base_url.as_deref(), model.as_deref()).await,
Some(ConfigSubcommand::SetDeepSeek { base_url, model }) => self.set_deepseek(base_url.as_deref(), model.as_deref()).await,
Some(ConfigSubcommand::SetOpenRouter { base_url, model }) => self.set_openrouter(base_url.as_deref(), model.as_deref()).await,
Some(ConfigSubcommand::SetCommitFormat { format }) => self.set_commit_format(format).await,
Some(ConfigSubcommand::SetVersionPrefix { prefix }) => self.set_version_prefix(prefix).await,
Some(ConfigSubcommand::SetChangelogPath { path }) => self.set_changelog_path(path).await,
@@ -137,7 +209,7 @@ impl ConfigCommand {
let manager = ConfigManager::new()?;
let config = manager.config();
println!("{}", "\nQuicCommit Configuration".bold());
println!("{}", "\nQuiCommit Configuration".bold());
println!("{}", "".repeat(60));
println!("\n{}", "General:".bold());
@@ -159,13 +231,27 @@ impl ConfigCommand {
}
"openai" => {
println!(" Model: {}", config.llm.openai.model.cyan());
println!(" API key: {}",
if config.llm.openai.api_key.is_some() { "✓ set".green() } else { "✗ not set".red() });
println!(" Base URL: {}", config.llm.openai.base_url);
println!(" API key: {}", mask_api_key(config.llm.openai.api_key.as_deref()));
}
"anthropic" => {
println!(" Model: {}", config.llm.anthropic.model.cyan());
println!(" API key: {}",
if config.llm.anthropic.api_key.is_some() { "✓ set".green() } else { "✗ not set".red() });
println!(" API key: {}", mask_api_key(config.llm.anthropic.api_key.as_deref()));
}
"kimi" => {
println!(" Model: {}", config.llm.kimi.model.cyan());
println!(" Base URL: {}", config.llm.kimi.base_url);
println!(" API key: {}", mask_api_key(config.llm.kimi.api_key.as_deref()));
}
"deepseek" => {
println!(" Model: {}", config.llm.deepseek.model.cyan());
println!(" Base URL: {}", config.llm.deepseek.base_url);
println!(" API key: {}", mask_api_key(config.llm.deepseek.api_key.as_deref()));
}
"openrouter" => {
println!(" Model: {}", config.llm.openrouter.model.cyan());
println!(" Base URL: {}", config.llm.openrouter.base_url);
println!(" API key: {}", mask_api_key(config.llm.openrouter.api_key.as_deref()));
}
_ => {}
}
@@ -192,6 +278,105 @@ impl ConfigCommand {
Ok(())
}
/// List all configuration information with masked API keys
async fn list_config(&self) -> Result<()> {
let manager = ConfigManager::new()?;
let config = manager.config();
println!("{}", "\nQuiCommit Configuration".bold());
println!("{}", "".repeat(80));
println!("\n{}", "📁 General Configuration:".bold().blue());
println!(" Config file: {}", manager.path().display());
println!(" Default profile: {}",
config.default_profile.as_deref().unwrap_or("(none)").cyan());
println!(" Profiles: {} profile(s)", config.profiles.len());
println!(" Repository mappings: {} mapping(s)", config.repo_profiles.len());
println!("\n{}", "🤖 LLM Configuration:".bold().blue());
println!(" Provider: {}", config.llm.provider.cyan());
println!(" Max tokens: {}", config.llm.max_tokens);
println!(" Temperature: {}", config.llm.temperature);
println!(" Timeout: {}s", config.llm.timeout);
println!("\n{}", " LLM Provider Details:".dimmed());
// OpenAI
println!(" 🔹 OpenAI:");
println!(" Model: {}", config.llm.openai.model.cyan());
println!(" Base URL: {}", config.llm.openai.base_url);
println!(" API Key: {}", mask_api_key(config.llm.openai.api_key.as_deref()));
// Anthropic
println!(" 🔹 Anthropic:");
println!(" Model: {}", config.llm.anthropic.model.cyan());
println!(" API Key: {}", mask_api_key(config.llm.anthropic.api_key.as_deref()));
// Kimi
println!(" 🔹 Kimi (Moonshot AI):");
println!(" Model: {}", config.llm.kimi.model.cyan());
println!(" Base URL: {}", config.llm.kimi.base_url);
println!(" API Key: {}", mask_api_key(config.llm.kimi.api_key.as_deref()));
// DeepSeek
println!(" 🔹 DeepSeek:");
println!(" Model: {}", config.llm.deepseek.model.cyan());
println!(" Base URL: {}", config.llm.deepseek.base_url);
println!(" API Key: {}", mask_api_key(config.llm.deepseek.api_key.as_deref()));
// OpenRouter
println!(" 🔹 OpenRouter:");
println!(" Model: {}", config.llm.openrouter.model.cyan());
println!(" Base URL: {}", config.llm.openrouter.base_url);
println!(" API Key: {}", mask_api_key(config.llm.openrouter.api_key.as_deref()));
// Ollama
println!(" 🔹 Ollama:");
println!(" URL: {}", config.llm.ollama.url);
println!(" Model: {}", config.llm.ollama.model.cyan());
println!("\n{}", "📝 Commit Configuration:".bold().blue());
println!(" Format: {}", config.commit.format.to_string().cyan());
println!(" Auto-generate: {}", if config.commit.auto_generate { "✓ yes".green() } else { "✗ no".red() });
println!(" Allow empty: {}", if config.commit.allow_empty { "✓ yes".green() } else { "✗ no".red() });
println!(" GPG sign: {}", if config.commit.gpg_sign { "✓ yes".green() } else { "✗ no".red() });
println!(" Default scope: {}", config.commit.default_scope.as_deref().unwrap_or("(none)").cyan());
println!(" Max subject length: {}", config.commit.max_subject_length);
println!(" Require scope: {}", if config.commit.require_scope { "✓ yes".green() } else { "✗ no".red() });
println!(" Require body: {}", if config.commit.require_body { "✓ yes".green() } else { "✗ no".red() });
if !config.commit.body_required_types.is_empty() {
println!(" Body required types: {}", config.commit.body_required_types.join(", ").cyan());
}
println!("\n{}", "🏷️ Tag Configuration:".bold().blue());
println!(" Version prefix: '{}'", config.tag.version_prefix.cyan());
println!(" Auto-generate: {}", if config.tag.auto_generate { "✓ yes".green() } else { "✗ no".red() });
println!(" GPG sign: {}", if config.tag.gpg_sign { "✓ yes".green() } else { "✗ no".red() });
println!(" Include changelog: {}", if config.tag.include_changelog { "✓ yes".green() } else { "✗ no".red() });
println!(" Annotation template: {}", config.tag.annotation_template.as_deref().unwrap_or("(none)").cyan());
println!("\n{}", "📋 Changelog Configuration:".bold().blue());
println!(" Path: {}", config.changelog.path);
println!(" Auto-generate: {}", if config.changelog.auto_generate { "✓ yes".green() } else { "✗ no".red() });
println!(" Format: {}", format!("{:?}", config.changelog.format).cyan());
println!(" Include hashes: {}", if config.changelog.include_hashes { "✓ yes".green() } else { "✗ no".red() });
println!(" Include authors: {}", if config.changelog.include_authors { "✓ yes".green() } else { "✗ no".red() });
println!(" Group by type: {}", if config.changelog.group_by_type { "✓ yes".green() } else { "✗ no".red() });
if !config.changelog.custom_categories.is_empty() {
println!(" Custom categories: {} category(ies)", config.changelog.custom_categories.len());
}
println!("\n{}", "🎨 Theme Configuration:".bold().blue());
println!(" Colors: {}", if config.theme.colors { "✓ enabled".green() } else { "✗ disabled".red() });
println!(" Icons: {}", if config.theme.icons { "✓ enabled".green() } else { "✗ disabled".red() });
println!(" Date format: {}", config.theme.date_format.cyan());
println!("\n{}", "🔒 Security:".bold().blue());
println!(" Encrypt sensitive: {}", if config.encrypt_sensitive { "✓ yes".green() } else { "✗ no".red() });
Ok(())
}
async fn edit_config(&self) -> Result<()> {
let manager = ConfigManager::new()?;
crate::utils::editor::edit_file(manager.path())?;
@@ -263,7 +448,7 @@ impl ConfigCommand {
let provider = if let Some(p) = provider {
p.to_string()
} else {
let providers = vec!["ollama", "openai", "anthropic"];
let providers = vec!["ollama", "openai", "anthropic", "kimi", "deepseek", "openrouter"];
let idx = Select::new()
.with_prompt("Select LLM provider")
.items(&providers)
@@ -287,12 +472,86 @@ impl ConfigCommand {
.default("gpt-4".to_string())
.interact_text()?;
manager.config_mut().llm.openai.model = model;
let base_url: String = Input::new()
.with_prompt("Base URL (optional)")
.default("https://api.openai.com/v1".to_string())
.interact_text()?;
if base_url != "https://api.openai.com/v1" {
manager.config_mut().llm.openai.base_url = base_url;
}
}
"anthropic" => {
let api_key: String = Input::new()
.with_prompt("Anthropic API key")
.interact_text()?;
manager.set_anthropic_api_key(api_key);
let model: String = Input::new()
.with_prompt("Model")
.default("claude-3-sonnet-20240229".to_string())
.interact_text()?;
manager.config_mut().llm.anthropic.model = model;
}
"kimi" => {
let api_key: String = Input::new()
.with_prompt("Kimi API key")
.interact_text()?;
manager.set_kimi_api_key(api_key);
let model: String = Input::new()
.with_prompt("Model")
.default("moonshot-v1-8k".to_string())
.interact_text()?;
manager.config_mut().llm.kimi.model = model;
let base_url: String = Input::new()
.with_prompt("Base URL (optional)")
.default("https://api.moonshot.cn/v1".to_string())
.interact_text()?;
if base_url != "https://api.moonshot.cn/v1" {
manager.set_kimi_base_url(base_url);
}
}
"deepseek" => {
let api_key: String = Input::new()
.with_prompt("DeepSeek API key")
.interact_text()?;
manager.set_deepseek_api_key(api_key);
let model: String = Input::new()
.with_prompt("Model")
.default("deepseek-chat".to_string())
.interact_text()?;
manager.config_mut().llm.deepseek.model = model;
let base_url: String = Input::new()
.with_prompt("Base URL (optional)")
.default("https://api.deepseek.com/v1".to_string())
.interact_text()?;
if base_url != "https://api.deepseek.com/v1" {
manager.set_deepseek_base_url(base_url);
}
}
"openrouter" => {
let api_key: String = Input::new()
.with_prompt("OpenRouter API key")
.interact_text()?;
manager.set_openrouter_api_key(api_key);
let model: String = Input::new()
.with_prompt("Model")
.default("openai/gpt-3.5-turbo".to_string())
.interact_text()?;
manager.config_mut().llm.openrouter.model = model;
let base_url: String = Input::new()
.with_prompt("Base URL (optional)")
.default("https://openrouter.ai/api/v1".to_string())
.interact_text()?;
if base_url != "https://openrouter.ai/api/v1" {
manager.set_openrouter_base_url(base_url);
}
}
"ollama" => {
let url: String = Input::new()
@@ -332,6 +591,75 @@ impl ConfigCommand {
Ok(())
}
async fn set_kimi_key(&self, key: &str) -> Result<()> {
let mut manager = ConfigManager::new()?;
manager.set_kimi_api_key(key.to_string());
manager.save()?;
println!("{} Kimi API key set", "".green());
Ok(())
}
async fn set_deepseek_key(&self, key: &str) -> Result<()> {
let mut manager = ConfigManager::new()?;
manager.set_deepseek_api_key(key.to_string());
manager.save()?;
println!("{} DeepSeek API key set", "".green());
Ok(())
}
async fn set_openrouter_key(&self, key: &str) -> Result<()> {
let mut manager = ConfigManager::new()?;
manager.set_openrouter_api_key(key.to_string());
manager.save()?;
println!("{} OpenRouter API key set", "".green());
Ok(())
}
async fn set_kimi(&self, base_url: Option<&str>, model: Option<&str>) -> Result<()> {
let mut manager = ConfigManager::new()?;
if let Some(url) = base_url {
manager.set_kimi_base_url(url.to_string());
}
if let Some(m) = model {
manager.config_mut().llm.kimi.model = m.to_string();
}
manager.save()?;
println!("{} Kimi configuration updated", "".green());
Ok(())
}
async fn set_deepseek(&self, base_url: Option<&str>, model: Option<&str>) -> Result<()> {
let mut manager = ConfigManager::new()?;
if let Some(url) = base_url {
manager.set_deepseek_base_url(url.to_string());
}
if let Some(m) = model {
manager.config_mut().llm.deepseek.model = m.to_string();
}
manager.save()?;
println!("{} DeepSeek configuration updated", "".green());
Ok(())
}
async fn set_openrouter(&self, base_url: Option<&str>, model: Option<&str>) -> Result<()> {
let mut manager = ConfigManager::new()?;
if let Some(url) = base_url {
manager.set_openrouter_base_url(url.to_string());
}
if let Some(m) = model {
manager.config_mut().llm.openrouter.model = m.to_string();
}
manager.save()?;
println!("{} OpenRouter configuration updated", "".green());
Ok(())
}
async fn set_ollama(&self, url: Option<&str>, model: Option<&str>) -> Result<()> {
let mut manager = ConfigManager::new()?;

View File

@@ -22,7 +22,7 @@ pub struct InitCommand {
impl InitCommand {
pub async fn execute(&self) -> Result<()> {
println!("{}", "🚀 Initializing QuicCommit...".bold().cyan());
println!("{}", "🚀 Initializing QuiCommit...".bold().cyan());
let config_path = crate::config::AppConfig::default_path()?;
@@ -57,7 +57,7 @@ impl InitCommand {
manager.save()?;
println!("{}", "✅ QuicCommit initialized successfully!".bold().green());
println!("{}", "✅ QuiCommit initialized successfully!".bold().green());
println!("\nConfig file: {}", config_path.display());
println!("\nNext steps:");
println!(" 1. Create a profile: {}", "quicommit profile add".cyan());

View File

@@ -220,6 +220,72 @@ impl ConfigManager {
self.modified = true;
}
/// Get Kimi API key
pub fn kimi_api_key(&self) -> Option<&String> {
self.config.llm.kimi.api_key.as_ref()
}
/// Set Kimi API key
pub fn set_kimi_api_key(&mut self, key: String) {
self.config.llm.kimi.api_key = Some(key);
self.modified = true;
}
/// Get Kimi base URL
pub fn kimi_base_url(&self) -> &str {
&self.config.llm.kimi.base_url
}
/// Set Kimi base URL
pub fn set_kimi_base_url(&mut self, url: String) {
self.config.llm.kimi.base_url = url;
self.modified = true;
}
/// Get DeepSeek API key
pub fn deepseek_api_key(&self) -> Option<&String> {
self.config.llm.deepseek.api_key.as_ref()
}
/// Set DeepSeek API key
pub fn set_deepseek_api_key(&mut self, key: String) {
self.config.llm.deepseek.api_key = Some(key);
self.modified = true;
}
/// Get DeepSeek base URL
pub fn deepseek_base_url(&self) -> &str {
&self.config.llm.deepseek.base_url
}
/// Set DeepSeek base URL
pub fn set_deepseek_base_url(&mut self, url: String) {
self.config.llm.deepseek.base_url = url;
self.modified = true;
}
/// Get OpenRouter API key
pub fn openrouter_api_key(&self) -> Option<&String> {
self.config.llm.openrouter.api_key.as_ref()
}
/// Set OpenRouter API key
pub fn set_openrouter_api_key(&mut self, key: String) {
self.config.llm.openrouter.api_key = Some(key);
self.modified = true;
}
/// Get OpenRouter base URL
pub fn openrouter_base_url(&self) -> &str {
&self.config.llm.openrouter.base_url
}
/// Set OpenRouter base URL
pub fn set_openrouter_base_url(&mut self, url: String) {
self.config.llm.openrouter.base_url = url;
self.modified = true;
}
// Commit configuration
/// Get commit format

View File

@@ -89,6 +89,18 @@ pub struct LlmConfig {
#[serde(default)]
pub anthropic: AnthropicConfig,
/// Kimi (Moonshot AI) configuration
#[serde(default)]
pub kimi: KimiConfig,
/// DeepSeek configuration
#[serde(default)]
pub deepseek: DeepSeekConfig,
/// OpenRouter configuration
#[serde(default)]
pub openrouter: OpenRouterConfig,
/// Custom API configuration
#[serde(default)]
pub custom: Option<CustomLlmConfig>,
@@ -113,6 +125,9 @@ impl Default for LlmConfig {
openai: OpenAiConfig::default(),
ollama: OllamaConfig::default(),
anthropic: AnthropicConfig::default(),
kimi: KimiConfig::default(),
deepseek: DeepSeekConfig::default(),
openrouter: OpenRouterConfig::default(),
custom: None,
max_tokens: default_max_tokens(),
temperature: default_temperature(),
@@ -187,6 +202,81 @@ impl Default for AnthropicConfig {
}
}
/// Kimi (Moonshot AI) configuration
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct KimiConfig {
/// API key
pub api_key: Option<String>,
/// Model to use
#[serde(default = "default_kimi_model")]
pub model: String,
/// API base URL (for custom endpoints)
#[serde(default = "default_kimi_base_url")]
pub base_url: String,
}
impl Default for KimiConfig {
fn default() -> Self {
Self {
api_key: None,
model: default_kimi_model(),
base_url: default_kimi_base_url(),
}
}
}
/// DeepSeek configuration
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct DeepSeekConfig {
/// API key
pub api_key: Option<String>,
/// Model to use
#[serde(default = "default_deepseek_model")]
pub model: String,
/// API base URL (for custom endpoints)
#[serde(default = "default_deepseek_base_url")]
pub base_url: String,
}
impl Default for DeepSeekConfig {
fn default() -> Self {
Self {
api_key: None,
model: default_deepseek_model(),
base_url: default_deepseek_base_url(),
}
}
}
/// OpenRouter configuration
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct OpenRouterConfig {
/// API key
pub api_key: Option<String>,
/// Model to use
#[serde(default = "default_openrouter_model")]
pub model: String,
/// API base URL (for custom endpoints)
#[serde(default = "default_openrouter_base_url")]
pub base_url: String,
}
impl Default for OpenRouterConfig {
fn default() -> Self {
Self {
api_key: None,
model: default_openrouter_model(),
base_url: default_openrouter_base_url(),
}
}
}
/// Custom LLM API configuration
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct CustomLlmConfig {
@@ -423,7 +513,7 @@ fn default_max_tokens() -> u32 {
}
fn default_temperature() -> f32 {
0.7
0.6
}
fn default_timeout() -> u64 {
@@ -450,6 +540,30 @@ fn default_anthropic_model() -> String {
"claude-3-sonnet-20240229".to_string()
}
fn default_kimi_model() -> String {
"moonshot-v1-8k".to_string()
}
fn default_kimi_base_url() -> String {
"https://api.moonshot.cn/v1".to_string()
}
fn default_deepseek_model() -> String {
"deepseek-chat".to_string()
}
fn default_deepseek_base_url() -> String {
"https://api.deepseek.com/v1".to_string()
}
fn default_openrouter_model() -> String {
"openai/gpt-3.5-turbo".to_string()
}
fn default_openrouter_base_url() -> String {
"https://openrouter.ai/api/v1".to_string()
}
fn default_commit_format() -> CommitFormat {
CommitFormat::Conventional
}

215
src/llm/deepseek.rs Normal file
View File

@@ -0,0 +1,215 @@
use super::{create_http_client, LlmProvider};
use anyhow::{bail, Context, Result};
use async_trait::async_trait;
use serde::{Deserialize, Serialize};
use std::time::Duration;
/// DeepSeek API client
pub struct DeepSeekClient {
base_url: String,
api_key: String,
model: String,
client: reqwest::Client,
}
#[derive(Debug, Serialize)]
struct ChatCompletionRequest {
model: String,
messages: Vec<Message>,
#[serde(skip_serializing_if = "Option::is_none")]
max_tokens: Option<u32>,
#[serde(skip_serializing_if = "Option::is_none")]
temperature: Option<f32>,
stream: bool,
}
#[derive(Debug, Serialize, Deserialize)]
struct Message {
role: String,
content: String,
}
#[derive(Debug, Deserialize)]
struct ChatCompletionResponse {
choices: Vec<Choice>,
}
#[derive(Debug, Deserialize)]
struct Choice {
message: Message,
}
#[derive(Debug, Deserialize)]
struct ErrorResponse {
error: ApiError,
}
#[derive(Debug, Deserialize)]
struct ApiError {
message: String,
#[serde(rename = "type")]
error_type: String,
}
impl DeepSeekClient {
/// Create new DeepSeek client
pub fn new(api_key: &str, model: &str) -> Result<Self> {
let client = create_http_client(Duration::from_secs(60))?;
Ok(Self {
base_url: "https://api.deepseek.com/v1".to_string(),
api_key: api_key.to_string(),
model: model.to_string(),
client,
})
}
/// Create with custom base URL
pub fn with_base_url(api_key: &str, model: &str, base_url: &str) -> Result<Self> {
let client = create_http_client(Duration::from_secs(60))?;
Ok(Self {
base_url: base_url.trim_end_matches('/').to_string(),
api_key: api_key.to_string(),
model: model.to_string(),
client,
})
}
/// Set timeout
pub fn with_timeout(mut self, timeout: Duration) -> Result<Self> {
self.client = create_http_client(timeout)?;
Ok(self)
}
/// Validate API key
pub async fn validate_key(&self) -> Result<bool> {
let url = format!("{}/models", self.base_url);
let response = self.client
.get(&url)
.header("Authorization", format!("Bearer {}", self.api_key))
.send()
.await
.context("Failed to validate DeepSeek API key")?;
if response.status().is_success() {
Ok(true)
} else if response.status().as_u16() == 401 {
Ok(false)
} else {
let status = response.status();
let text = response.text().await.unwrap_or_default();
bail!("DeepSeek API error: {} - {}", status, text)
}
}
}
#[async_trait]
impl LlmProvider for DeepSeekClient {
async fn generate(&self, prompt: &str) -> Result<String> {
let messages = vec![
Message {
role: "user".to_string(),
content: prompt.to_string(),
},
];
self.chat_completion(messages).await
}
async fn generate_with_system(&self, system: &str, user: &str) -> Result<String> {
let mut messages = vec![];
if !system.is_empty() {
messages.push(Message {
role: "system".to_string(),
content: system.to_string(),
});
}
messages.push(Message {
role: "user".to_string(),
content: user.to_string(),
});
self.chat_completion(messages).await
}
async fn is_available(&self) -> bool {
self.validate_key().await.unwrap_or(false)
}
fn name(&self) -> &str {
"deepseek"
}
}
impl DeepSeekClient {
async fn chat_completion(&self, messages: Vec<Message>) -> Result<String> {
let url = format!("{}/chat/completions", self.base_url);
let request = ChatCompletionRequest {
model: self.model.clone(),
messages,
max_tokens: Some(500),
temperature: Some(0.7),
stream: false,
};
let response = self.client
.post(&url)
.header("Authorization", format!("Bearer {}", self.api_key))
.header("Content-Type", "application/json")
.json(&request)
.send()
.await
.context("Failed to send request to DeepSeek")?;
let status = response.status();
if !status.is_success() {
let text = response.text().await.unwrap_or_default();
// Try to parse error
if let Ok(error) = serde_json::from_str::<ErrorResponse>(&text) {
bail!("DeepSeek API error: {} ({})", error.error.message, error.error.error_type);
}
bail!("DeepSeek API error: {} - {}", status, text);
}
let result: ChatCompletionResponse = response
.json()
.await
.context("Failed to parse DeepSeek response")?;
result.choices
.into_iter()
.next()
.map(|c| c.message.content.trim().to_string())
.ok_or_else(|| anyhow::anyhow!("No response from DeepSeek"))
}
}
/// Available DeepSeek models
pub const DEEPSEEK_MODELS: &[&str] = &[
"deepseek-chat",
"deepseek-coder",
];
/// Check if a model name is valid
pub fn is_valid_model(model: &str) -> bool {
DEEPSEEK_MODELS.contains(&model)
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_model_validation() {
assert!(is_valid_model("deepseek-chat"));
assert!(!is_valid_model("invalid-model"));
}
}

216
src/llm/kimi.rs Normal file
View File

@@ -0,0 +1,216 @@
use super::{create_http_client, LlmProvider};
use anyhow::{bail, Context, Result};
use async_trait::async_trait;
use serde::{Deserialize, Serialize};
use std::time::Duration;
/// Kimi API client (Moonshot AI)
pub struct KimiClient {
base_url: String,
api_key: String,
model: String,
client: reqwest::Client,
}
#[derive(Debug, Serialize)]
struct ChatCompletionRequest {
model: String,
messages: Vec<Message>,
#[serde(skip_serializing_if = "Option::is_none")]
max_tokens: Option<u32>,
#[serde(skip_serializing_if = "Option::is_none")]
temperature: Option<f32>,
stream: bool,
}
#[derive(Debug, Serialize, Deserialize)]
struct Message {
role: String,
content: String,
}
#[derive(Debug, Deserialize)]
struct ChatCompletionResponse {
choices: Vec<Choice>,
}
#[derive(Debug, Deserialize)]
struct Choice {
message: Message,
}
#[derive(Debug, Deserialize)]
struct ErrorResponse {
error: ApiError,
}
#[derive(Debug, Deserialize)]
struct ApiError {
message: String,
#[serde(rename = "type")]
error_type: String,
}
impl KimiClient {
/// Create new Kimi client
pub fn new(api_key: &str, model: &str) -> Result<Self> {
let client = create_http_client(Duration::from_secs(60))?;
Ok(Self {
base_url: "https://api.moonshot.cn/v1".to_string(),
api_key: api_key.to_string(),
model: model.to_string(),
client,
})
}
/// Create with custom base URL
pub fn with_base_url(api_key: &str, model: &str, base_url: &str) -> Result<Self> {
let client = create_http_client(Duration::from_secs(60))?;
Ok(Self {
base_url: base_url.trim_end_matches('/').to_string(),
api_key: api_key.to_string(),
model: model.to_string(),
client,
})
}
/// Set timeout
pub fn with_timeout(mut self, timeout: Duration) -> Result<Self> {
self.client = create_http_client(timeout)?;
Ok(self)
}
/// Validate API key
pub async fn validate_key(&self) -> Result<bool> {
let url = format!("{}/models", self.base_url);
let response = self.client
.get(&url)
.header("Authorization", format!("Bearer {}", self.api_key))
.send()
.await
.context("Failed to validate Kimi API key")?;
if response.status().is_success() {
Ok(true)
} else if response.status().as_u16() == 401 {
Ok(false)
} else {
let status = response.status();
let text = response.text().await.unwrap_or_default();
bail!("Kimi API error: {} - {}", status, text)
}
}
}
#[async_trait]
impl LlmProvider for KimiClient {
async fn generate(&self, prompt: &str) -> Result<String> {
let messages = vec![
Message {
role: "user".to_string(),
content: prompt.to_string(),
},
];
self.chat_completion(messages).await
}
async fn generate_with_system(&self, system: &str, user: &str) -> Result<String> {
let mut messages = vec![];
if !system.is_empty() {
messages.push(Message {
role: "system".to_string(),
content: system.to_string(),
});
}
messages.push(Message {
role: "user".to_string(),
content: user.to_string(),
});
self.chat_completion(messages).await
}
async fn is_available(&self) -> bool {
self.validate_key().await.unwrap_or(false)
}
fn name(&self) -> &str {
"kimi"
}
}
impl KimiClient {
async fn chat_completion(&self, messages: Vec<Message>) -> Result<String> {
let url = format!("{}/chat/completions", self.base_url);
let request = ChatCompletionRequest {
model: self.model.clone(),
messages,
max_tokens: Some(500),
temperature: Some(0.7),
stream: false,
};
let response = self.client
.post(&url)
.header("Authorization", format!("Bearer {}", self.api_key))
.header("Content-Type", "application/json")
.json(&request)
.send()
.await
.context("Failed to send request to Kimi")?;
let status = response.status();
if !status.is_success() {
let text = response.text().await.unwrap_or_default();
// Try to parse error
if let Ok(error) = serde_json::from_str::<ErrorResponse>(&text) {
bail!("Kimi API error: {} ({})", error.error.message, error.error.error_type);
}
bail!("Kimi API error: {} - {}", status, text);
}
let result: ChatCompletionResponse = response
.json()
.await
.context("Failed to parse Kimi response")?;
result.choices
.into_iter()
.next()
.map(|c| c.message.content.trim().to_string())
.ok_or_else(|| anyhow::anyhow!("No response from Kimi"))
}
}
/// Available Kimi models
pub const KIMI_MODELS: &[&str] = &[
"moonshot-v1-8k",
"moonshot-v1-32k",
"moonshot-v1-128k",
];
/// Check if a model name is valid
pub fn is_valid_model(model: &str) -> bool {
KIMI_MODELS.contains(&model)
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_model_validation() {
assert!(is_valid_model("moonshot-v1-8k"));
assert!(!is_valid_model("invalid-model"));
}
}

View File

@@ -6,10 +6,16 @@ use std::time::Duration;
pub mod ollama;
pub mod openai;
pub mod anthropic;
pub mod kimi;
pub mod deepseek;
pub mod openrouter;
pub use ollama::OllamaClient;
pub use openai::OpenAiClient;
pub use anthropic::AnthropicClient;
pub use kimi::KimiClient;
pub use deepseek::DeepSeekClient;
pub use openrouter::OpenRouterClient;
/// LLM provider trait
#[async_trait]
@@ -77,6 +83,21 @@ impl LlmClient {
.ok_or_else(|| anyhow::anyhow!("Anthropic API key not configured"))?;
Box::new(AnthropicClient::new(api_key, &config.anthropic.model)?)
}
"kimi" => {
let api_key = config.kimi.api_key.as_ref()
.ok_or_else(|| anyhow::anyhow!("Kimi API key not configured"))?;
Box::new(KimiClient::with_base_url(api_key, &config.kimi.model, &config.kimi.base_url)?)
}
"deepseek" => {
let api_key = config.deepseek.api_key.as_ref()
.ok_or_else(|| anyhow::anyhow!("DeepSeek API key not configured"))?;
Box::new(DeepSeekClient::with_base_url(api_key, &config.deepseek.model, &config.deepseek.base_url)?)
}
"openrouter" => {
let api_key = config.openrouter.api_key.as_ref()
.ok_or_else(|| anyhow::anyhow!("OpenRouter API key not configured"))?;
Box::new(OpenRouterClient::with_base_url(api_key, &config.openrouter.model, &config.openrouter.base_url)?)
}
_ => bail!("Unknown LLM provider: {}", config.provider),
};

229
src/llm/openrouter.rs Normal file
View File

@@ -0,0 +1,229 @@
use super::{create_http_client, LlmProvider};
use anyhow::{bail, Context, Result};
use async_trait::async_trait;
use serde::{Deserialize, Serialize};
use std::time::Duration;
/// OpenRouter API client
pub struct OpenRouterClient {
base_url: String,
api_key: String,
model: String,
client: reqwest::Client,
}
#[derive(Debug, Serialize)]
struct ChatCompletionRequest {
model: String,
messages: Vec<Message>,
#[serde(skip_serializing_if = "Option::is_none")]
max_tokens: Option<u32>,
#[serde(skip_serializing_if = "Option::is_none")]
temperature: Option<f32>,
stream: bool,
}
#[derive(Debug, Serialize, Deserialize)]
struct Message {
role: String,
content: String,
}
#[derive(Debug, Deserialize)]
struct ChatCompletionResponse {
choices: Vec<Choice>,
}
#[derive(Debug, Deserialize)]
struct Choice {
message: Message,
}
#[derive(Debug, Deserialize)]
struct ErrorResponse {
error: ApiError,
}
#[derive(Debug, Deserialize)]
struct ApiError {
message: String,
#[serde(rename = "type")]
error_type: String,
}
impl OpenRouterClient {
/// Create new OpenRouter client
pub fn new(api_key: &str, model: &str) -> Result<Self> {
let client = create_http_client(Duration::from_secs(60))?;
Ok(Self {
base_url: "https://openrouter.ai/api/v1".to_string(),
api_key: api_key.to_string(),
model: model.to_string(),
client,
})
}
/// Create with custom base URL
pub fn with_base_url(api_key: &str, model: &str, base_url: &str) -> Result<Self> {
let client = create_http_client(Duration::from_secs(60))?;
Ok(Self {
base_url: base_url.trim_end_matches('/').to_string(),
api_key: api_key.to_string(),
model: model.to_string(),
client,
})
}
/// Set timeout
pub fn with_timeout(mut self, timeout: Duration) -> Result<Self> {
self.client = create_http_client(timeout)?;
Ok(self)
}
/// Validate API key
pub async fn validate_key(&self) -> Result<bool> {
let url = format!("{}/models", self.base_url);
let response = self.client
.get(&url)
.header("Authorization", format!("Bearer {}", self.api_key))
.header("HTTP-Referer", "https://quicommit.dev")
.header("X-Title", "QuiCommit")
.send()
.await
.context("Failed to validate OpenRouter API key")?;
if response.status().is_success() {
Ok(true)
} else if response.status().as_u16() == 401 {
Ok(false)
} else {
let status = response.status();
let text = response.text().await.unwrap_or_default();
bail!("OpenRouter API error: {} - {}", status, text)
}
}
}
#[async_trait]
impl LlmProvider for OpenRouterClient {
async fn generate(&self, prompt: &str) -> Result<String> {
let messages = vec![
Message {
role: "user".to_string(),
content: prompt.to_string(),
},
];
self.chat_completion(messages).await
}
async fn generate_with_system(&self, system: &str, user: &str) -> Result<String> {
let mut messages = vec![];
if !system.is_empty() {
messages.push(Message {
role: "system".to_string(),
content: system.to_string(),
});
}
messages.push(Message {
role: "user".to_string(),
content: user.to_string(),
});
self.chat_completion(messages).await
}
async fn is_available(&self) -> bool {
self.validate_key().await.unwrap_or(false)
}
fn name(&self) -> &str {
"openrouter"
}
}
impl OpenRouterClient {
async fn chat_completion(&self, messages: Vec<Message>) -> Result<String> {
let url = format!("{}/chat/completions", self.base_url);
let request = ChatCompletionRequest {
model: self.model.clone(),
messages,
max_tokens: Some(500),
temperature: Some(0.7),
stream: false,
};
let response = self.client
.post(&url)
.header("Authorization", format!("Bearer {}", self.api_key))
.header("Content-Type", "application/json")
.header("HTTP-Referer", "https://quicommit.dev")
.header("X-Title", "QuiCommit")
.json(&request)
.send()
.await
.context("Failed to send request to OpenRouter")?;
let status = response.status();
if !status.is_success() {
let text = response.text().await.unwrap_or_default();
// Try to parse error
if let Ok(error) = serde_json::from_str::<ErrorResponse>(&text) {
bail!("OpenRouter API error: {} ({})", error.error.message, error.error.error_type);
}
bail!("OpenRouter API error: {} - {}", status, text);
}
let result: ChatCompletionResponse = response
.json()
.await
.context("Failed to parse OpenRouter response")?;
result.choices
.into_iter()
.next()
.map(|c| c.message.content.trim().to_string())
.ok_or_else(|| anyhow::anyhow!("No response from OpenRouter"))
}
}
/// Popular OpenRouter models
pub const OPENROUTER_MODELS: &[&str] = &[
"openai/gpt-3.5-turbo",
"openai/gpt-4",
"openai/gpt-4-turbo",
"anthropic/claude-3-opus",
"anthropic/claude-3-sonnet",
"anthropic/claude-3-haiku",
"google/gemini-pro",
"meta-llama/llama-2-70b-chat",
"mistralai/mixtral-8x7b-instruct",
"01-ai/yi-34b-chat",
];
/// Check if a model name is valid
pub fn is_valid_model(model: &str) -> bool {
// Since OpenRouter supports many models, we'll allow any model name
// but provide some popular ones as suggestions
true
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_model_validation() {
assert!(is_valid_model("openai/gpt-4"));
assert!(is_valid_model("custom/model"));
}
}

View File

@@ -14,7 +14,7 @@ use commands::{
init::InitCommand, profile::ProfileCommand, tag::TagCommand,
};
/// QuicCommit - AI-powered Git assistant
/// QuiCommit - AI-powered Git assistant
///
/// A powerful tool that helps you generate conventional commits, tags, and changelogs
/// using AI (LLM APIs or local Ollama models). Manage multiple Git profiles for different