From da85fc94b1a6cfcd50144787164bb6287b0fe28d Mon Sep 17 00:00:00 2001 From: SidneyZhang Date: Thu, 12 Mar 2026 17:42:41 +0800 Subject: [PATCH] =?UTF-8?q?feat(keyring):=20=E9=9B=86=E6=88=90=E7=B3=BB?= =?UTF-8?q?=E7=BB=9F=E5=AF=86=E9=92=A5=E7=8E=AF=E5=AE=89=E5=85=A8=E5=AD=98?= =?UTF-8?q?=E5=82=A8=20API=20key?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- Cargo.toml | 3 + Cargo.toml.test | 7 + src/commands/changelog.rs | 3 +- src/commands/commit.rs | 9 +- src/commands/config.rs | 914 +++++++++++++++---------------------- src/commands/init.rs | 129 ++++-- src/commands/tag.rs | 4 +- src/config/manager.rs | 183 +++++--- src/config/mod.rs | 262 +---------- src/generator/mod.rs | 10 +- src/llm/mod.rs | 53 ++- src/utils/keyring.rs | 219 +++++++++ src/utils/mod.rs | 1 + test-keyring/Cargo.toml | 7 + test-keyring/src/main.rs | 18 + test_keyring.rs | 18 + tests/integration_tests.rs | 174 +++---- 17 files changed, 990 insertions(+), 1024 deletions(-) create mode 100644 Cargo.toml.test create mode 100644 src/utils/keyring.rs create mode 100644 test-keyring/Cargo.toml create mode 100644 test-keyring/src/main.rs create mode 100644 test_keyring.rs diff --git a/Cargo.toml b/Cargo.toml index dee549a..503fc9b 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -66,6 +66,9 @@ argon2 = "0.5" rand = "0.8" base64 = "0.22" +# System keyring for secure API key storage +keyring = { version = "3.6.3", features = ["apple-native", "windows-native", "sync-secret-service"] } + # Interactive editor edit = "0.1" diff --git a/Cargo.toml.test b/Cargo.toml.test new file mode 100644 index 0000000..7e37c14 --- /dev/null +++ b/Cargo.toml.test @@ -0,0 +1,7 @@ +[package] +name = "test-keyring" +version = "0.1.0" +edition = "2024" + +[dependencies] +keyring = "3" \ No newline at end of file diff --git a/src/commands/changelog.rs b/src/commands/changelog.rs index 260839b..9369950 100644 --- a/src/commands/changelog.rs +++ b/src/commands/changelog.rs @@ -204,12 +204,11 @@ impl ChangelogCommand { messages: &Messages, ) -> Result { let manager = ConfigManager::new()?; - let config = manager.config(); let language = manager.get_language().unwrap_or(Language::English); println!("{}", messages.ai_generating_changelog()); - let generator = ContentGenerator::new(&config.llm).await?; + let generator = ContentGenerator::new(&manager).await?; generator.generate_changelog_entry(version, commits, language).await } diff --git a/src/commands/commit.rs b/src/commands/commit.rs index 3a1ea29..e284298 100644 --- a/src/commands/commit.rs +++ b/src/commands/commit.rs @@ -257,22 +257,17 @@ impl CommitCommand { async fn generate_commit(&self, repo: &GitRepo, format: CommitFormat, messages: &Messages) -> Result { let manager = ConfigManager::new()?; - let config = manager.config(); - // Check if LLM is configured - let generator = ContentGenerator::new(&config.llm).await + let generator = ContentGenerator::new(&manager).await .context("Failed to initialize LLM. Use --manual for manual commit.")?; println!("{}", messages.ai_analyzing()); - let language_str = &config.language.output_language; - let language = Language::from_str(language_str).unwrap_or(Language::English); + let language = manager.get_language().unwrap_or(Language::English); let generated = if self.yes { - // Non-interactive mode: generate directly generator.generate_commit_from_repo(repo, format, language).await? } else { - // Interactive mode: allow user to review and regenerate generator.generate_commit_interactive(repo, format, language).await? }; diff --git a/src/commands/config.rs b/src/commands/config.rs index c8ae70d..525f77a 100644 --- a/src/commands/config.rs +++ b/src/commands/config.rs @@ -6,6 +6,7 @@ use std::path::PathBuf; use crate::config::{Language, manager::ConfigManager}; use crate::config::CommitFormat; +use crate::utils::keyring::{get_supported_providers, get_default_model, get_default_base_url, provider_needs_api_key}; /// Mask API key with asterisks for security fn mask_api_key(key: Option<&str>) -> String { @@ -53,82 +54,33 @@ enum ConfigSubcommand { key: String, }, - /// Set LLM provider + /// Configure LLM provider (interactive) SetLlm { - /// Provider (ollama, openai, anthropic) + /// Provider (ollama, openai, anthropic, kimi, deepseek, openrouter) #[arg(value_name = "PROVIDER")] provider: Option, - }, - - /// Set OpenAI API key - SetOpenAiKey { - /// API key - key: String, - }, - - /// Set Anthropic API key - SetAnthropicKey { - /// API key - key: String, - }, - - /// Set Kimi API key - SetKimiKey { - /// API key - key: String, - }, - - /// Set DeepSeek API key - SetDeepSeekKey { - /// API key - key: String, - }, - - /// Set OpenRouter API key - SetOpenRouterKey { - /// API key - key: String, - }, - - /// Configure Ollama settings - SetOllama { - /// Ollama server URL - #[arg(short, long)] - url: Option, + /// Model name #[arg(short, long)] model: Option, - }, - - /// Configure Kimi settings - SetKimi { - /// API base URL (for custom endpoints) + + /// API base URL (optional) #[arg(short, long)] base_url: Option, - /// Model name - #[arg(short, long)] - model: Option, + + /// API key (will be stored in system keyring) + #[arg(short = 'k', long)] + api_key: Option, }, - /// Configure DeepSeek settings - SetDeepSeek { - /// API base URL (for custom endpoints) - #[arg(short, long)] - base_url: Option, - /// Model name - #[arg(short, long)] - model: Option, + /// Set API key for current provider (stored in system keyring) + SetApiKey { + /// API key + key: String, }, - /// Configure OpenRouter settings - SetOpenRouter { - /// API base URL (for custom endpoints) - #[arg(short, long)] - base_url: Option, - /// Model name - #[arg(short, long)] - model: Option, - }, + /// Delete API key from system keyring + DeleteApiKey, /// Set commit format SetCommitFormat { @@ -195,6 +147,9 @@ enum ConfigSubcommand { /// Show config file path Path, + + /// Check keyring availability + CheckKeyring, } impl ConfigCommand { @@ -205,16 +160,11 @@ impl ConfigCommand { Some(ConfigSubcommand::Edit) => self.edit_config(&config_path).await, Some(ConfigSubcommand::Set { key, value }) => self.set_value(key, value, &config_path).await, Some(ConfigSubcommand::Get { key }) => self.get_value(key, &config_path).await, - Some(ConfigSubcommand::SetLlm { provider }) => self.set_llm(provider.as_deref(), &config_path).await, - Some(ConfigSubcommand::SetOpenAiKey { key }) => self.set_openai_key(key, &config_path).await, - Some(ConfigSubcommand::SetAnthropicKey { key }) => self.set_anthropic_key(key, &config_path).await, - Some(ConfigSubcommand::SetKimiKey { key }) => self.set_kimi_key(key, &config_path).await, - Some(ConfigSubcommand::SetDeepSeekKey { key }) => self.set_deepseek_key(key, &config_path).await, - Some(ConfigSubcommand::SetOpenRouterKey { key }) => self.set_openrouter_key(key, &config_path).await, - Some(ConfigSubcommand::SetOllama { url, model }) => self.set_ollama(url.as_deref(), model.as_deref(), &config_path).await, - Some(ConfigSubcommand::SetKimi { base_url, model }) => self.set_kimi(base_url.as_deref(), model.as_deref(), &config_path).await, - Some(ConfigSubcommand::SetDeepSeek { base_url, model }) => self.set_deepseek(base_url.as_deref(), model.as_deref(), &config_path).await, - Some(ConfigSubcommand::SetOpenRouter { base_url, model }) => self.set_openrouter(base_url.as_deref(), model.as_deref(), &config_path).await, + Some(ConfigSubcommand::SetLlm { provider, model, base_url, api_key }) => { + self.set_llm(provider.as_deref(), model.as_deref(), base_url.as_deref(), api_key.as_deref(), &config_path).await + } + Some(ConfigSubcommand::SetApiKey { key }) => self.set_api_key(key, &config_path).await, + Some(ConfigSubcommand::DeleteApiKey) => self.delete_api_key(&config_path).await, Some(ConfigSubcommand::SetCommitFormat { format }) => self.set_commit_format(format, &config_path).await, Some(ConfigSubcommand::SetVersionPrefix { prefix }) => self.set_version_prefix(prefix, &config_path).await, Some(ConfigSubcommand::SetChangelogPath { path }) => self.set_changelog_path(path, &config_path).await, @@ -227,6 +177,7 @@ impl ConfigCommand { Some(ConfigSubcommand::ListModels) => self.list_models(&config_path).await, Some(ConfigSubcommand::TestLlm) => self.test_llm(&config_path).await, Some(ConfigSubcommand::Path) => self.show_path(&config_path).await, + Some(ConfigSubcommand::CheckKeyring) => self.check_keyring(&config_path).await, None => self.show_config(&config_path).await, } } @@ -244,6 +195,39 @@ impl ConfigCommand { Ok(()) } + async fn check_keyring(&self, config_path: &Option) -> Result<()> { + let manager = self.get_manager(config_path)?; + let keyring = manager.keyring(); + + println!("{}", "\nKeyring Status".bold()); + println!("{}", "─".repeat(40)); + + if keyring.is_available() { + println!("{} Keyring is available", "βœ“".green()); + println!(" {}", keyring.get_status_message()); + } else { + println!("{} Keyring is not available", "βœ—".red()); + println!(" {}", keyring.get_status_message()); + } + + println!("\n{}", "Environment Variables:".bold()); + if let Ok(key) = std::env::var("QUICOMMIT_API_KEY") { + println!(" QUICOMMIT_API_KEY: {}", mask_api_key(Some(&key))); + } else { + println!(" QUICOMMIT_API_KEY: {}", "not set".dimmed()); + } + + let provider = manager.llm_provider(); + let provider_env = format!("QUICOMMIT_{}_API_KEY", provider.to_uppercase()); + if let Ok(key) = std::env::var(&provider_env) { + println!(" {}: {}", provider_env, mask_api_key(Some(&key))); + } else { + println!(" {}: {}", provider_env, "not set".dimmed()); + } + + Ok(()) + } + async fn show_config(&self, config_path: &Option) -> Result<()> { let manager = self.get_manager(config_path)?; let config = manager.config(); @@ -259,42 +243,18 @@ impl ConfigCommand { println!("\n{}", "LLM Configuration:".bold()); println!(" Provider: {}", config.llm.provider.cyan()); + println!(" Model: {}", config.llm.model.cyan()); + + let base_url = manager.llm_base_url(); + println!(" Base URL: {}", base_url); + + let api_key = manager.get_api_key(); + println!(" API key: {}", mask_api_key(api_key.as_deref())); + println!(" Max tokens: {}", config.llm.max_tokens); println!(" Temperature: {}", config.llm.temperature); println!(" Timeout: {}s", config.llm.timeout); - match config.llm.provider.as_str() { - "ollama" => { - println!(" URL: {}", config.llm.ollama.url); - println!(" Model: {}", config.llm.ollama.model.cyan()); - } - "openai" => { - println!(" Model: {}", config.llm.openai.model.cyan()); - println!(" Base URL: {}", config.llm.openai.base_url); - println!(" API key: {}", mask_api_key(config.llm.openai.api_key.as_deref())); - } - "anthropic" => { - println!(" Model: {}", config.llm.anthropic.model.cyan()); - println!(" API key: {}", mask_api_key(config.llm.anthropic.api_key.as_deref())); - } - "kimi" => { - println!(" Model: {}", config.llm.kimi.model.cyan()); - println!(" Base URL: {}", config.llm.kimi.base_url); - println!(" API key: {}", mask_api_key(config.llm.kimi.api_key.as_deref())); - } - "deepseek" => { - println!(" Model: {}", config.llm.deepseek.model.cyan()); - println!(" Base URL: {}", config.llm.deepseek.base_url); - println!(" API key: {}", mask_api_key(config.llm.deepseek.api_key.as_deref())); - } - "openrouter" => { - println!(" Model: {}", config.llm.openrouter.model.cyan()); - println!(" Base URL: {}", config.llm.openrouter.base_url); - println!(" API key: {}", mask_api_key(config.llm.openrouter.api_key.as_deref())); - } - _ => {} - } - println!("\n{}", "Commit Configuration:".bold()); println!(" Format: {}", config.commit.format.to_string().cyan()); println!(" Auto-generate: {}", if config.commit.auto_generate { "yes".green() } else { "no".red() }); @@ -323,7 +283,6 @@ impl ConfigCommand { Ok(()) } - /// List all configuration information with masked API keys async fn list_config(&self, config_path: &Option) -> Result<()> { let manager = self.get_manager(config_path)?; let config = manager.config(); @@ -340,46 +299,13 @@ impl ConfigCommand { println!("\n{}", "πŸ€– LLM Configuration:".bold().blue()); println!(" Provider: {}", config.llm.provider.cyan()); + println!(" Model: {}", config.llm.model.cyan()); + println!(" Base URL: {}", manager.llm_base_url()); + println!(" API Key: {}", mask_api_key(manager.get_api_key().as_deref())); println!(" Max tokens: {}", config.llm.max_tokens); println!(" Temperature: {}", config.llm.temperature); println!(" Timeout: {}s", config.llm.timeout); - println!("\n{}", " LLM Provider Details:".dimmed()); - - // OpenAI - println!(" πŸ”Ή OpenAI:"); - println!(" Model: {}", config.llm.openai.model.cyan()); - println!(" Base URL: {}", config.llm.openai.base_url); - println!(" API Key: {}", mask_api_key(config.llm.openai.api_key.as_deref())); - - // Anthropic - println!(" πŸ”Ή Anthropic:"); - println!(" Model: {}", config.llm.anthropic.model.cyan()); - println!(" API Key: {}", mask_api_key(config.llm.anthropic.api_key.as_deref())); - - // Kimi - println!(" πŸ”Ή Kimi (Moonshot AI):"); - println!(" Model: {}", config.llm.kimi.model.cyan()); - println!(" Base URL: {}", config.llm.kimi.base_url); - println!(" API Key: {}", mask_api_key(config.llm.kimi.api_key.as_deref())); - - // DeepSeek - println!(" πŸ”Ή DeepSeek:"); - println!(" Model: {}", config.llm.deepseek.model.cyan()); - println!(" Base URL: {}", config.llm.deepseek.base_url); - println!(" API Key: {}", mask_api_key(config.llm.deepseek.api_key.as_deref())); - - // OpenRouter - println!(" πŸ”Ή OpenRouter:"); - println!(" Model: {}", config.llm.openrouter.model.cyan()); - println!(" Base URL: {}", config.llm.openrouter.base_url); - println!(" API Key: {}", mask_api_key(config.llm.openrouter.api_key.as_deref())); - - // Ollama - println!(" πŸ”Ή Ollama:"); - println!(" URL: {}", config.llm.ollama.url); - println!(" Model: {}", config.llm.ollama.model.cyan()); - println!("\n{}", "πŸ“ Commit Configuration:".bold().blue()); println!(" Format: {}", config.commit.format.to_string().cyan()); println!(" Auto-generate: {}", if config.commit.auto_generate { "βœ“ yes".green() } else { "βœ— no".red() }); @@ -418,6 +344,16 @@ impl ConfigCommand { println!("\n{}", "πŸ”’ Security:".bold().blue()); println!(" Encrypt sensitive: {}", if config.encrypt_sensitive { "βœ“ yes".green() } else { "βœ— no".red() }); + + println!("\n{}", "πŸ”‘ Keyring:".bold().blue()); + let keyring = manager.keyring(); + if keyring.is_available() { + println!(" Status: {}", "βœ“ available".green()); + println!(" Backend: {}", keyring.get_status_message()); + } else { + println!(" Status: {}", "βœ— unavailable".red()); + println!(" Note: {}", keyring.get_status_message()); + } Ok(()) } @@ -434,6 +370,8 @@ impl ConfigCommand { match key { "llm.provider" => manager.set_llm_provider(value.to_string()), + "llm.model" => manager.set_llm_model(value.to_string()), + "llm.base_url" => manager.set_llm_base_url(Some(value.to_string())), "llm.max_tokens" => { let tokens: u32 = value.parse()?; manager.config_mut().llm.max_tokens = tokens; @@ -446,6 +384,13 @@ impl ConfigCommand { let timeout: u64 = value.parse()?; manager.config_mut().llm.timeout = timeout; } + "llm.api_key_storage" => { + let valid_values = vec!["keyring", "config", "environment"]; + if !valid_values.contains(&value) { + bail!("Invalid value: {}. Use: {}", value, valid_values.join(", ")); + } + manager.config_mut().llm.api_key_storage = value.to_string(); + } "commit.format" => { let format = match value { "conventional" => CommitFormat::Conventional, @@ -473,13 +418,17 @@ impl ConfigCommand { let config = manager.config(); let value = match key { - "llm.provider" => &config.llm.provider, - "llm.max_tokens" => return Ok(println!("{}", config.llm.max_tokens)), - "llm.temperature" => return Ok(println!("{}", config.llm.temperature)), - "llm.timeout" => return Ok(println!("{}", config.llm.timeout)), - "commit.format" => return Ok(println!("{}", config.commit.format)), - "tag.version_prefix" => &config.tag.version_prefix, - "changelog.path" => &config.changelog.path, + "llm.provider" => config.llm.provider.clone(), + "llm.model" => config.llm.model.clone(), + "llm.base_url" => manager.llm_base_url(), + "llm.max_tokens" => config.llm.max_tokens.to_string(), + "llm.temperature" => config.llm.temperature.to_string(), + "llm.timeout" => config.llm.timeout.to_string(), + "llm.api_key_storage" => config.llm.api_key_storage.clone(), + "commit.format" => config.commit.format.to_string(), + "commit.auto_generate" => config.commit.auto_generate.to_string(), + "tag.version_prefix" => config.tag.version_prefix.clone(), + "changelog.path" => config.changelog.path.clone(), _ => bail!("Unknown configuration key: {}", key), }; @@ -487,236 +436,198 @@ impl ConfigCommand { Ok(()) } - async fn set_llm(&self, provider: Option<&str>, config_path: &Option) -> Result<()> { + async fn set_llm(&self, provider: Option<&str>, model: Option<&str>, base_url: Option<&str>, api_key: Option<&str>, config_path: &Option) -> Result<()> { let mut manager = self.get_manager(config_path)?; - let provider = if let Some(p) = provider { + let selected_provider = if let Some(p) = provider { + let providers = get_supported_providers(); + if !providers.contains(&p) { + bail!("Invalid provider: {}. Valid options: {}", p, providers.join(", ")); + } p.to_string() } else { - let providers = vec!["ollama", "openai", "anthropic", "kimi", "deepseek", "openrouter"]; - let idx = Select::new() - .with_prompt("Select LLM provider") - .items(&providers) + println!("{}", "Select LLM Provider:".bold()); + let provider_display_names = vec![ + "Ollama (local)", + "OpenAI", + "Anthropic Claude", + "Kimi (Moonshot AI)", + "DeepSeek", + "OpenRouter" + ]; + + let provider_idx = Select::new() + .items(&provider_display_names) .default(0) .interact()?; - providers[idx].to_string() + + let providers = get_supported_providers(); + providers[provider_idx].to_string() }; + + let keyring = manager.keyring(); + let keyring_available = keyring.is_available(); - manager.set_llm_provider(provider.clone()); - - // Configure provider-specific settings - match provider.as_str() { - "openai" => { - let api_key: String = Input::new() - .with_prompt("OpenAI API key") - .interact_text()?; - manager.set_openai_api_key(api_key); - - let model: String = Input::new() - .with_prompt("Model") - .default("gpt-4".to_string()) - .interact_text()?; - manager.config_mut().llm.openai.model = model; - - let base_url: String = Input::new() - .with_prompt("Base URL (optional)") - .default("https://api.openai.com/v1".to_string()) - .interact_text()?; - if base_url != "https://api.openai.com/v1" { - manager.config_mut().llm.openai.base_url = base_url; - } - } - "anthropic" => { - let api_key: String = Input::new() - .with_prompt("Anthropic API key") - .interact_text()?; - manager.set_anthropic_api_key(api_key); - - let model: String = Input::new() - .with_prompt("Model") - .default("claude-3-sonnet-20240229".to_string()) - .interact_text()?; - manager.config_mut().llm.anthropic.model = model; - } - "kimi" => { - let api_key: String = Input::new() - .with_prompt("Kimi API key") - .interact_text()?; - manager.set_kimi_api_key(api_key); - - let model: String = Input::new() - .with_prompt("Model") - .default("moonshot-v1-8k".to_string()) - .interact_text()?; - manager.config_mut().llm.kimi.model = model; - - let base_url: String = Input::new() - .with_prompt("Base URL (optional)") - .default("https://api.moonshot.cn/v1".to_string()) - .interact_text()?; - if base_url != "https://api.moonshot.cn/v1" { - manager.set_kimi_base_url(base_url); - } - } - "deepseek" => { - let api_key: String = Input::new() - .with_prompt("DeepSeek API key") - .interact_text()?; - manager.set_deepseek_api_key(api_key); - - let model: String = Input::new() - .with_prompt("Model") - .default("deepseek-chat".to_string()) - .interact_text()?; - manager.config_mut().llm.deepseek.model = model; - - let base_url: String = Input::new() - .with_prompt("Base URL (optional)") - .default("https://api.deepseek.com/v1".to_string()) - .interact_text()?; - if base_url != "https://api.deepseek.com/v1" { - manager.set_deepseek_base_url(base_url); - } - } - "openrouter" => { - let api_key: String = Input::new() - .with_prompt("OpenRouter API key") - .interact_text()?; - manager.set_openrouter_api_key(api_key); - - let model: String = Input::new() - .with_prompt("Model") - .default("openai/gpt-3.5-turbo".to_string()) - .interact_text()?; - manager.config_mut().llm.openrouter.model = model; - - let base_url: String = Input::new() - .with_prompt("Base URL (optional)") - .default("https://openrouter.ai/api/v1".to_string()) - .interact_text()?; - if base_url != "https://openrouter.ai/api/v1" { - manager.set_openrouter_base_url(base_url); - } - } - "ollama" => { + if !keyring_available && provider_needs_api_key(&selected_provider) { + println!("\n{}", "⚠ Keyring is not available on this system.".yellow()); + println!("{}", keyring.get_status_message().yellow()); + } + + let selected_model = if let Some(m) = model { + m.to_string() + } else { + let default_model = get_default_model(&selected_provider); + Input::new() + .with_prompt("Model name") + .default(default_model.to_string()) + .interact_text()? + }; + + let selected_base_url = if let Some(u) = base_url { + Some(u.to_string()) + } else if selected_provider == "ollama" { + let url: String = Input::new() + .with_prompt("Ollama server URL") + .default("http://localhost:11434".to_string()) + .interact_text()?; + Some(url) + } else { + let use_custom = Confirm::new() + .with_prompt("Use custom API base URL?") + .default(false) + .interact()?; + + if use_custom { + let default_url = get_default_base_url(&selected_provider); let url: String = Input::new() - .with_prompt("Ollama URL") - .default("http://localhost:11434".to_string()) + .with_prompt("Base URL") + .default(default_url.to_string()) .interact_text()?; - manager.config_mut().llm.ollama.url = url; - - let model: String = Input::new() - .with_prompt("Model") - .default("llama2".to_string()) - .interact_text()?; - manager.config_mut().llm.ollama.model = model; + Some(url) + } else { + None } - _ => {} + }; + + let selected_api_key = if provider_needs_api_key(&selected_provider) { + if let Some(k) = api_key { + Some(k.to_string()) + } else if keyring_available { + let existing_key = manager.get_api_key(); + if existing_key.is_some() { + let overwrite = Confirm::new() + .with_prompt("API key already exists. Update it?") + .default(false) + .interact()?; + + if overwrite { + let key: String = Input::new() + .with_prompt("API key") + .interact_text()?; + Some(key) + } else { + None + } + } else { + let key: String = Input::new() + .with_prompt("API key") + .interact_text()?; + Some(key) + } + } else { + println!("\n{}", "Please set the QUICOMMIT_API_KEY environment variable.".yellow()); + None + } + } else { + None + }; + + manager.set_llm_provider(selected_provider.clone()); + manager.set_llm_model(selected_model); + manager.set_llm_base_url(selected_base_url); + + if let Some(key) = selected_api_key { + manager.set_api_key(&key)?; + println!("\n{} API key stored securely in system keyring", "βœ“".green()); } - + manager.save()?; - println!("{} Set LLM provider to {}", "βœ“".green(), provider.cyan()); + + println!("\n{} LLM configuration updated", "βœ“".green()); + println!(" Provider: {}", manager.llm_provider().cyan()); + println!(" Model: {}", manager.llm_model().cyan()); + println!(" Base URL: {}", manager.llm_base_url()); Ok(()) } - async fn set_openai_key(&self, key: &str, config_path: &Option) -> Result<()> { + async fn set_api_key(&self, key: &str, config_path: &Option) -> Result<()> { let mut manager = self.get_manager(config_path)?; - manager.set_openai_api_key(key.to_string()); - manager.save()?; - println!("{} OpenAI API key set", "βœ“".green()); + + let provider = manager.llm_provider().to_string(); + if !provider_needs_api_key(&provider) { + println!("{} {} does not require an API key", "β„Ή".blue(), provider); + return Ok(()); + } + + let storage_method = manager.config().llm.api_key_storage.to_string(); + + match storage_method.as_str() { + "keyring" => { + if !manager.keyring().is_available() { + bail!("Keyring is not available. Set QUICOMMIT_API_KEY environment variable instead or change api_key_storage to 'config'."); + } + + manager.set_api_key(key)?; + println!("{} API key stored securely in system keyring for {}", "βœ“".green(), provider.cyan()); + }, + "config" => { + // Store API key directly in config file + manager.config_mut().llm.api_key = Some(key.to_string()); + manager.save()?; + println!("{} API key stored in configuration file for {}", "βœ“".green(), provider.cyan()); + println!("{} Note: API key is stored in plain text. Consider using 'keyring' storage for better security.", "⚠".yellow()); + }, + "environment" => { + bail!("API key storage set to 'environment'. Please set QUICOMMIT_{}_API_KEY environment variable.", provider.to_uppercase()); + }, + _ => { + bail!("Invalid API key storage method: {}", storage_method); + } + } + Ok(()) } - async fn set_anthropic_key(&self, key: &str, config_path: &Option) -> Result<()> { - let mut manager = self.get_manager(config_path)?; - manager.set_anthropic_api_key(key.to_string()); - manager.save()?; - println!("{} Anthropic API key set", "βœ“".green()); - Ok(()) - } - - async fn set_kimi_key(&self, key: &str, config_path: &Option) -> Result<()> { - let mut manager = self.get_manager(config_path)?; - manager.set_kimi_api_key(key.to_string()); - manager.save()?; - println!("{} Kimi API key set", "βœ“".green()); - Ok(()) - } - - async fn set_deepseek_key(&self, key: &str, config_path: &Option) -> Result<()> { - let mut manager = self.get_manager(config_path)?; - manager.set_deepseek_api_key(key.to_string()); - manager.save()?; - println!("{} DeepSeek API key set", "βœ“".green()); - Ok(()) - } - - async fn set_openrouter_key(&self, key: &str, config_path: &Option) -> Result<()> { - let mut manager = self.get_manager(config_path)?; - manager.set_openrouter_api_key(key.to_string()); - manager.save()?; - println!("{} OpenRouter API key set", "βœ“".green()); - Ok(()) - } - - async fn set_kimi(&self, base_url: Option<&str>, model: Option<&str>, config_path: &Option) -> Result<()> { + async fn delete_api_key(&self, config_path: &Option) -> Result<()> { let mut manager = self.get_manager(config_path)?; - if let Some(url) = base_url { - manager.set_kimi_base_url(url.to_string()); - } - if let Some(m) = model { - manager.config_mut().llm.kimi.model = m.to_string(); + let provider = manager.llm_provider().to_string(); + let storage_method = manager.config().llm.api_key_storage.to_string(); + + match storage_method.as_str() { + "keyring" => { + if !manager.keyring().is_available() { + bail!("Keyring is not available."); + } + + manager.delete_api_key()?; + println!("{} API key deleted from system keyring for {}", "βœ“".green(), provider.cyan()); + }, + "config" => { + // Remove API key from config file + manager.config_mut().llm.api_key = None; + manager.save()?; + println!("{} API key deleted from configuration file for {}", "βœ“".green(), provider.cyan()); + }, + "environment" => { + println!("{} API key storage set to 'environment'. Please remove QUICOMMIT_{}_API_KEY environment variable manually.", "β„Ή".blue(), provider.to_uppercase()); + }, + _ => { + bail!("Invalid API key storage method: {}", storage_method); + } } - manager.save()?; - println!("{} Kimi configuration updated", "βœ“".green()); - Ok(()) - } - - async fn set_deepseek(&self, base_url: Option<&str>, model: Option<&str>, config_path: &Option) -> Result<()> { - let mut manager = self.get_manager(config_path)?; - - if let Some(url) = base_url { - manager.set_deepseek_base_url(url.to_string()); - } - if let Some(m) = model { - manager.config_mut().llm.deepseek.model = m.to_string(); - } - - manager.save()?; - println!("{} DeepSeek configuration updated", "βœ“".green()); - Ok(()) - } - - async fn set_openrouter(&self, base_url: Option<&str>, model: Option<&str>, config_path: &Option) -> Result<()> { - let mut manager = self.get_manager(config_path)?; - - if let Some(url) = base_url { - manager.set_openrouter_base_url(url.to_string()); - } - if let Some(m) = model { - manager.config_mut().llm.openrouter.model = m.to_string(); - } - - manager.save()?; - println!("{} OpenRouter configuration updated", "βœ“".green()); - Ok(()) - } - - async fn set_ollama(&self, url: Option<&str>, model: Option<&str>, config_path: &Option) -> Result<()> { - let mut manager = self.get_manager(config_path)?; - - if let Some(u) = url { - manager.config_mut().llm.ollama.url = u.to_string(); - } - if let Some(m) = model { - manager.config_mut().llm.ollama.model = m.to_string(); - } - - manager.save()?; - println!("{} Ollama configuration updated", "βœ“".green()); Ok(()) } @@ -731,7 +642,8 @@ impl ConfigCommand { manager.set_commit_format(format); manager.save()?; - println!("{} Set commit format to {}", "βœ“".green(), format.to_string().cyan()); + + println!("{} Commit format set to {}", "βœ“".green(), format.to_string().cyan()); Ok(()) } @@ -739,7 +651,8 @@ impl ConfigCommand { let mut manager = self.get_manager(config_path)?; manager.set_version_prefix(prefix.to_string()); manager.save()?; - println!("{} Set version prefix to '{}'", "βœ“".green(), prefix); + + println!("{} Version prefix set to '{}'", "βœ“".green(), prefix.cyan()); Ok(()) } @@ -747,37 +660,41 @@ impl ConfigCommand { let mut manager = self.get_manager(config_path)?; manager.set_changelog_path(path.to_string()); manager.save()?; - println!("{} Set changelog path to {}", "βœ“".green(), path); + + println!("{} Changelog path set to {}", "βœ“".green(), path.cyan()); Ok(()) } async fn set_language(&self, language: Option<&str>, config_path: &Option) -> Result<()> { let mut manager = self.get_manager(config_path)?; - let language_code = if let Some(lang) = language { - lang.to_string() + let lang_code = if let Some(l) = language { + l.to_string() } else { + println!("{}", "Select Output Language:".bold()); let languages = vec![ - Language::English, - Language::Chinese, - Language::Japanese, - Language::Korean, - Language::Spanish, - Language::French, - Language::German, + ("en", "English"), + ("zh", "δΈ­ζ–‡"), + ("ja", "ζ—₯本θͺž"), + ("ko", "ν•œκ΅­μ–΄"), + ("es", "EspaΓ±ol"), + ("fr", "FranΓ§ais"), + ("de", "Deutsch"), ]; - let language_names: Vec = languages.iter().map(|l| l.display_name().to_string()).collect(); + + let lang_names: Vec<&str> = languages.iter().map(|(_, n)| *n).collect(); let idx = Select::new() - .with_prompt("Select language") - .items(&language_names) + .items(&lang_names) .default(0) .interact()?; - languages[idx].to_code().to_string() + + languages[idx].0.to_string() }; - manager.set_output_language(language_code.clone()); + manager.set_output_language(lang_code.clone()); manager.save()?; - println!("{} Set output language to {}", "βœ“".green(), language_code.cyan()); + + println!("{} Output language set to {}", "βœ“".green(), lang_code.cyan()); Ok(()) } @@ -785,8 +702,8 @@ impl ConfigCommand { let mut manager = self.get_manager(config_path)?; manager.set_keep_types_english(keep); manager.save()?; - let status = if keep { "enabled" } else { "disabled" }; - println!("{} Keep commit types in English: {}", "βœ“".green(), status); + + println!("{} Keep commit types in English: {}", "βœ“".green(), keep.to_string().cyan()); Ok(()) } @@ -794,20 +711,20 @@ impl ConfigCommand { let mut manager = self.get_manager(config_path)?; manager.set_keep_changelog_types_english(keep); manager.save()?; - let status = if keep { "enabled" } else { "disabled" }; - println!("{} Keep changelog types in English: {}", "βœ“".green(), status); + + println!("{} Keep changelog types in English: {}", "βœ“".green(), keep.to_string().cyan()); Ok(()) } async fn reset(&self, force: bool, config_path: &Option) -> Result<()> { if !force { let confirm = Confirm::new() - .with_prompt("Are you sure you want to reset all configuration?") + .with_prompt("Reset all configuration to defaults?") .default(false) .interact()?; if !confirm { - println!("{}", "Cancelled.".yellow()); + println!("Reset cancelled."); return Ok(()); } } @@ -824,21 +741,24 @@ impl ConfigCommand { let manager = self.get_manager(config_path)?; let toml = manager.export()?; - if let Some(path) = output { - std::fs::write(path, toml)?; - println!("{} Configuration exported to {}", "βœ“".green(), path); - } else { - println!("{}", toml); + match output { + Some(path) => { + std::fs::write(path, &toml)?; + println!("{} Configuration exported to {}", "βœ“".green(), path); + } + None => { + println!("{}", toml); + } } Ok(()) } async fn import_config(&self, file: &str, config_path: &Option) -> Result<()> { - let toml = std::fs::read_to_string(file)?; + let content = std::fs::read_to_string(file)?; let mut manager = self.get_manager(config_path)?; - manager.import(&toml)?; + manager.import(&content)?; manager.save()?; println!("{} Configuration imported from {}", "βœ“".green(), file); @@ -847,196 +767,84 @@ impl ConfigCommand { async fn list_models(&self, config_path: &Option) -> Result<()> { let manager = self.get_manager(config_path)?; - let config = manager.config(); - - match config.llm.provider.as_str() { + let provider = manager.llm_provider(); + + println!("{}", "\nAvailable Models".bold()); + println!("{}", "─".repeat(40)); + + match provider { "ollama" => { - let client = crate::llm::OllamaClient::new( - &config.llm.ollama.url, - &config.llm.ollama.model, - ); - - println!("Fetching available models from Ollama..."); - match client.list_models().await { - Ok(models) => { - println!("\n{}", "Available models:".bold()); - for model in models { - let marker = if model == config.llm.ollama.model { "●".green() } else { "β—‹".dimmed() }; - println!("{} {}", marker, model); - } - } - Err(e) => { - println!("{} Failed to fetch models: {}", "βœ—".red(), e); - } - } + println!("Ollama models (local):"); + println!(" llama2, llama2-uncensored, llama2:13b"); + println!(" codellama, codellama:34b"); + println!(" mistral, mixtral"); + println!(" phi, gemma"); + println!("\nRun 'ollama list' to see installed models"); } "openai" => { - if let Some(ref key) = config.llm.openai.api_key { - let client = crate::llm::OpenAiClient::new( - &config.llm.openai.base_url, - key, - &config.llm.openai.model, - )?; - - println!("Fetching available models from OpenAI..."); - match client.list_models().await { - Ok(models) => { - println!("\n{}", "Available models:".bold()); - for model in models { - let marker = if model == config.llm.openai.model { "●".green() } else { "β—‹".dimmed() }; - println!("{} {}", marker, model); - } - } - Err(e) => { - println!("{} Failed to fetch models: {}", "βœ—".red(), e); - } - } - } else { - bail!("OpenAI API key not configured"); - } + println!("OpenAI models:"); + println!(" gpt-4, gpt-4-turbo, gpt-4o"); + println!(" gpt-3.5-turbo, gpt-3.5-turbo-16k"); } "anthropic" => { - if let Some(ref key) = config.llm.anthropic.api_key { - let client = crate::llm::AnthropicClient::new( - key, - &config.llm.anthropic.model, - )?; - - println!("Fetching available models from Anthropic..."); - match client.list_models().await { - Ok(models) => { - println!("\n{}", "Available models:".bold()); - for model in models { - let marker = if model == config.llm.anthropic.model { "●".green() } else { "β—‹".dimmed() }; - println!("{} {}", marker, model); - } - } - Err(e) => { - println!("{} Failed to fetch models: {}", "βœ—".red(), e); - } - } - } else { - bail!("Anthropic API key not configured"); - } + println!("Anthropic Claude models:"); + println!(" claude-3-opus-20240229"); + println!(" claude-3-sonnet-20240229"); + println!(" claude-3-haiku-20240307"); } "kimi" => { - if let Some(ref key) = config.llm.kimi.api_key { - let client = crate::llm::KimiClient::with_base_url( - key, - &config.llm.kimi.model, - &config.llm.kimi.base_url, - )?; - - println!("Fetching available models from Kimi..."); - match client.list_models().await { - Ok(models) => { - println!("\n{}", "Available models:".bold()); - for model in models { - let marker = if model == config.llm.kimi.model { "●".green() } else { "β—‹".dimmed() }; - println!("{} {}", marker, model); - } - } - Err(e) => { - println!("{} Failed to fetch models: {}", "βœ—".red(), e); - } - } - } else { - bail!("Kimi API key not configured"); - } + println!("Kimi (Moonshot AI) models:"); + println!(" moonshot-v1-8k"); + println!(" moonshot-v1-32k"); + println!(" moonshot-v1-128k"); } "deepseek" => { - if let Some(ref key) = config.llm.deepseek.api_key { - let client = crate::llm::DeepSeekClient::with_base_url( - key, - &config.llm.deepseek.model, - &config.llm.deepseek.base_url, - )?; - - println!("Fetching available models from DeepSeek..."); - match client.list_models().await { - Ok(models) => { - println!("\n{}", "Available models:".bold()); - for model in models { - let marker = if model == config.llm.deepseek.model { "●".green() } else { "β—‹".dimmed() }; - println!("{} {}", marker, model); - } - } - Err(e) => { - println!("{} Failed to fetch models: {}", "βœ—".red(), e); - } - } - } else { - bail!("DeepSeek API key not configured"); - } + println!("DeepSeek models:"); + println!(" deepseek-chat"); + println!(" deepseek-coder"); } "openrouter" => { - if let Some(ref key) = config.llm.openrouter.api_key { - let client = crate::llm::OpenRouterClient::with_base_url( - key, - &config.llm.openrouter.model, - &config.llm.openrouter.base_url, - )?; - - println!("Fetching available models from OpenRouter..."); - match client.list_models().await { - Ok(models) => { - println!("\n{}", "Available models:".bold()); - for model in models { - let marker = if model == config.llm.openrouter.model { "●".green() } else { "β—‹".dimmed() }; - println!("{} {}", marker, model); - } - } - Err(e) => { - println!("{} Failed to fetch models: {}", "βœ—".red(), e); - } - } - } else { - bail!("OpenRouter API key not configured"); - } + println!("OpenRouter models (examples):"); + println!(" openai/gpt-4, openai/gpt-3.5-turbo"); + println!(" anthropic/claude-3-opus"); + println!(" google/gemini-pro"); + println!(" meta-llama/llama-2-70b-chat"); + println!("\nSee https://openrouter.ai/models for full list"); } - provider => { - println!("Listing models not supported for provider: {}", provider); + _ => { + println!("Unknown provider: {}", provider); } } - + + println!("\nCurrent model: {}", manager.llm_model().cyan()); + Ok(()) } async fn test_llm(&self, config_path: &Option) -> Result<()> { let manager = self.get_manager(config_path)?; - let config = manager.config(); - println!("Testing LLM connection ({})...", config.llm.provider.cyan()); + println!("{}", "\nTesting LLM Connection...".bold()); + println!(" Provider: {}", manager.llm_provider().cyan()); + println!(" Model: {}", manager.llm_model().cyan()); + println!(" Base URL: {}", manager.llm_base_url()); - match crate::llm::LlmClient::from_config(&config.llm).await { - Ok(client) => { - if client.is_available().await { - println!("{} LLM connection successful!", "βœ“".green()); - - // Test generation with a sample diff - println!("Testing generation..."); - let sample_diff = r#"diff --git a/src/main.rs b/src/main.rs ---- a/src/main.rs -+++ b/src/main.rs -@@ -1,3 +1,4 @@ - fn main() { -+ println!("Hello, World!"); - }"#; - match client.generate_commit_message(sample_diff, crate::config::CommitFormat::Conventional, crate::config::Language::English).await { - Ok(response) => { - println!("{} Generation test passed", "βœ“".green()); - println!("Response: {}", response.description.dimmed()); - } - Err(e) => { - println!("{} Generation test failed: {}", "βœ—".red(), e); - } - } - } else { - println!("{} LLM provider is not available", "βœ—".red()); - } + let has_key = manager.has_api_key(); + if provider_needs_api_key(manager.llm_provider()) { + println!(" API Key: {}", if has_key { "βœ“ configured".green() } else { "βœ— not set".red() }); + } + + println!("\n{}", "Sending test request...".dimmed()); + + match crate::llm::test_connection(&manager).await { + Ok(response) => { + println!("{} Connection successful!", "βœ“".green()); + println!("\n{}", "Response:".bold()); + println!(" {}", response); } Err(e) => { - println!("{} Failed to initialize LLM: {}", "βœ—".red(), e); + println!("{} Connection failed: {}", "βœ—".red(), e); + return Err(e); } } diff --git a/src/commands/init.rs b/src/commands/init.rs index e2ff7cd..280e045 100644 --- a/src/commands/init.rs +++ b/src/commands/init.rs @@ -8,6 +8,7 @@ use crate::config::{GitProfile, Language}; use crate::config::manager::ConfigManager; use crate::config::profile::{GpgConfig, SshConfig}; use crate::i18n::Messages; +use crate::utils::keyring::{get_supported_providers, get_default_model, provider_needs_api_key}; use crate::utils::validators::validate_email; /// Initialize quicommit configuration @@ -31,7 +32,6 @@ impl InitCommand { crate::config::AppConfig::default_path().unwrap() }); - // Check if config already exists if config_path.exists() && !self.reset { if !self.yes { let overwrite = Confirm::new() @@ -49,13 +49,11 @@ impl InitCommand { } } - // Create parent directory if needed if let Some(parent) = config_path.parent() { std::fs::create_dir_all(parent) .map_err(|e| anyhow::anyhow!("Failed to create config directory: {}", e))?; } - // Create new config manager with fresh config let mut manager = ConfigManager::with_path_fresh(&config_path)?; if self.yes { @@ -66,7 +64,6 @@ impl InitCommand { manager.save()?; - // Get configured language for final messages let language = manager.get_language().unwrap_or(Language::English); let messages = Messages::new(language); @@ -81,7 +78,6 @@ impl InitCommand { } async fn quick_setup(&self, manager: &mut ConfigManager) -> Result<()> { - // Try to get git user info let git_config = git2::Config::open_default()?; let user_name = git_config.get_string("user.name").unwrap_or_else(|_| "User".to_string()); @@ -96,7 +92,6 @@ impl InitCommand { manager.add_profile("default".to_string(), profile)?; manager.set_default_profile(Some("default".to_string()))?; - // Set default LLM to Ollama manager.set_llm_provider("ollama".to_string()); Ok(()) @@ -106,7 +101,6 @@ impl InitCommand { let messages = Messages::new(Language::English); println!("\n{}", messages.setup_profile().bold()); - // Language selection println!("\n{}", messages.select_output_language().bold()); let languages = vec![ Language::English, @@ -126,16 +120,13 @@ impl InitCommand { let selected_language = languages[language_idx]; manager.set_output_language(selected_language.to_code().to_string()); - // Update messages to selected language let messages = Messages::new(selected_language); - // Profile name let profile_name: String = Input::new() .with_prompt(messages.profile_name()) .default("personal".to_string()) .interact_text()?; - // User info let git_config = git2::Config::open_default().ok(); let default_name = git_config.as_ref() @@ -177,7 +168,6 @@ impl InitCommand { None }; - // SSH configuration let setup_ssh = Confirm::new() .with_prompt(messages.configure_ssh()) .default(false) @@ -189,7 +179,6 @@ impl InitCommand { None }; - // GPG configuration let setup_gpg = Confirm::new() .with_prompt(messages.configure_gpg()) .default(false) @@ -201,7 +190,6 @@ impl InitCommand { None }; - // Create profile let mut profile = GitProfile::new( profile_name.clone(), user_name, @@ -220,9 +208,9 @@ impl InitCommand { manager.add_profile(profile_name.clone(), profile)?; manager.set_default_profile(Some(profile_name))?; - // LLM provider selection println!("\n{}", messages.select_llm_provider().bold()); - let providers = vec![ + + let provider_display_names = vec![ "Ollama (local)", "OpenAI", "Anthropic Claude", @@ -230,49 +218,90 @@ impl InitCommand { "DeepSeek", "OpenRouter" ]; + let provider_idx = Select::new() - .items(&providers) + .items(&provider_display_names) .default(0) .interact()?; - let provider = match provider_idx { - 0 => "ollama", - 1 => "openai", - 2 => "anthropic", - 3 => "kimi", - 4 => "deepseek", - 5 => "openrouter", - _ => "ollama", + let providers = get_supported_providers(); + let provider = providers[provider_idx].to_string(); + + let keyring = manager.keyring(); + let keyring_available = keyring.is_available(); + + if !keyring_available { + println!("\n{}", "⚠ Keyring is not available on this system.".yellow()); + println!("{}", keyring.get_status_message().yellow()); + } + + let api_key = if provider_needs_api_key(&provider) { + let env_key = std::env::var("QUICOMMIT_API_KEY") + .or_else(|_| std::env::var(format!("QUICOMMIT_{}_API_KEY", provider.to_uppercase()))) + .ok(); + + if let Some(key) = env_key { + println!("\n{} {}", "βœ“".green(), "Found API key in environment variable.".green()); + None + } else if keyring_available { + let prompt = match provider.as_str() { + "openai" => messages.openai_api_key(), + "anthropic" => messages.anthropic_api_key(), + "kimi" => messages.kimi_api_key(), + "deepseek" => messages.deepseek_api_key(), + "openrouter" => messages.openrouter_api_key(), + _ => "API Key", + }; + + let key: String = Input::new() + .with_prompt(prompt) + .interact_text()?; + Some(key) + } else { + println!("\n{}", "Please set the QUICOMMIT_API_KEY environment variable.".yellow()); + None + } + } else { + None }; - manager.set_llm_provider(provider.to_string()); + let default_model = get_default_model(&provider); + let model: String = Input::new() + .with_prompt("Model name") + .default(default_model.to_string()) + .interact_text()?; - // Configure API key if needed - if provider == "openai" { - let api_key: String = Input::new() - .with_prompt(messages.openai_api_key()) + let base_url: Option = if provider == "ollama" { + let url: String = Input::new() + .with_prompt("Ollama server URL") + .default("http://localhost:11434".to_string()) .interact_text()?; - manager.set_openai_api_key(api_key); - } else if provider == "anthropic" { - let api_key: String = Input::new() - .with_prompt(messages.anthropic_api_key()) - .interact_text()?; - manager.set_anthropic_api_key(api_key); - } else if provider == "kimi" { - let api_key: String = Input::new() - .with_prompt(messages.kimi_api_key()) - .interact_text()?; - manager.set_kimi_api_key(api_key); - } else if provider == "deepseek" { - let api_key: String = Input::new() - .with_prompt(messages.deepseek_api_key()) - .interact_text()?; - manager.set_deepseek_api_key(api_key); - } else if provider == "openrouter" { - let api_key: String = Input::new() - .with_prompt(messages.openrouter_api_key()) - .interact_text()?; - manager.set_openrouter_api_key(api_key); + Some(url) + } else { + let use_custom_url = Confirm::new() + .with_prompt("Use custom API base URL?") + .default(false) + .interact()?; + + if use_custom_url { + let url: String = Input::new() + .with_prompt("Base URL") + .interact_text()?; + Some(url) + } else { + None + } + }; + + manager.set_llm_provider(provider.clone()); + manager.set_llm_model(model); + manager.set_llm_base_url(base_url); + + if let Some(key) = api_key { + if provider_needs_api_key(&provider) { + manager.set_api_key(&key)?; + println!("\n{} {}", "βœ“".green(), "API key stored securely in system keyring.".green()); + } } Ok(()) diff --git a/src/commands/tag.rs b/src/commands/tag.rs index cb16f53..e77da25 100644 --- a/src/commands/tag.rs +++ b/src/commands/tag.rs @@ -270,10 +270,8 @@ impl TagCommand { async fn generate_tag_message(&self, repo: &GitRepo, version: &str, messages: &Messages) -> Result { let manager = ConfigManager::new()?; - let config = manager.config(); let language = manager.get_language().unwrap_or(Language::English); - // Get commits since last tag let tags = repo.get_tags()?; let commits = if let Some(latest_tag) = tags.first() { repo.get_commits_between(&latest_tag.name, "HEAD")? @@ -287,7 +285,7 @@ impl TagCommand { println!("{}", messages.ai_generating_tag(commits.len())); - let generator = ContentGenerator::new(&config.llm).await?; + let generator = ContentGenerator::new(&manager).await?; generator.generate_tag_message(version, &commits, language).await } diff --git a/src/config/manager.rs b/src/config/manager.rs index 57863f3..d3d77f2 100644 --- a/src/config/manager.rs +++ b/src/config/manager.rs @@ -1,4 +1,5 @@ use super::{AppConfig, GitProfile, TokenConfig}; +use crate::utils::keyring::{KeyringManager, get_default_base_url, get_default_model, provider_needs_api_key}; use anyhow::{bail, Context, Result}; use std::collections::HashMap; use std::path::{Path, PathBuf}; @@ -8,6 +9,7 @@ pub struct ConfigManager { config: AppConfig, config_path: PathBuf, modified: bool, + keyring: KeyringManager, } impl ConfigManager { @@ -28,6 +30,7 @@ impl ConfigManager { config, config_path: path.to_path_buf(), modified: false, + keyring: KeyringManager::new(), }) } @@ -37,6 +40,7 @@ impl ConfigManager { config: AppConfig::default(), config_path: path.to_path_buf(), modified: true, + keyring: KeyringManager::new(), }) } @@ -262,96 +266,140 @@ impl ConfigManager { /// Set LLM provider pub fn set_llm_provider(&mut self, provider: String) { - self.config.llm.provider = provider; + let default_model = get_default_model(&provider); + self.config.llm.provider = provider.clone(); + if self.config.llm.model.is_empty() || self.config.llm.model == "llama2" { + self.config.llm.model = default_model.to_string(); + } self.modified = true; } - /// Get OpenAI API key - pub fn openai_api_key(&self) -> Option<&String> { - self.config.llm.openai.api_key.as_ref() + /// Get model + pub fn llm_model(&self) -> &str { + &self.config.llm.model } - /// Set OpenAI API key - pub fn set_openai_api_key(&mut self, key: String) { - self.config.llm.openai.api_key = Some(key); + /// Set model + pub fn set_llm_model(&mut self, model: String) { + self.config.llm.model = model; self.modified = true; } - /// Get Anthropic API key - pub fn anthropic_api_key(&self) -> Option<&String> { - self.config.llm.anthropic.api_key.as_ref() + /// Get base URL (returns provider default if not set) + pub fn llm_base_url(&self) -> String { + match &self.config.llm.base_url { + Some(url) => url.clone(), + None => get_default_base_url(&self.config.llm.provider).to_string(), + } } - /// Set Anthropic API key - pub fn set_anthropic_api_key(&mut self, key: String) { - self.config.llm.anthropic.api_key = Some(key); + /// Set base URL + pub fn set_llm_base_url(&mut self, url: Option) { + self.config.llm.base_url = url; self.modified = true; } - /// Get Kimi API key - pub fn kimi_api_key(&self) -> Option<&String> { - self.config.llm.kimi.api_key.as_ref() + /// Get API key from configured storage method + pub fn get_api_key(&self) -> Option { + // First try environment variables (always checked) + if let Some(key) = self.keyring.get_api_key(&self.config.llm.provider).unwrap_or(None) { + return Some(key); + } + + // Then try config file if configured + if self.config.llm.api_key_storage == "config" { + return self.config.llm.api_key.clone(); + } + + None } - /// Set Kimi API key - pub fn set_kimi_api_key(&mut self, key: String) { - self.config.llm.kimi.api_key = Some(key); - self.modified = true; + /// Store API key in configured storage method + pub fn set_api_key(&self, api_key: &str) -> Result<()> { + match self.config.llm.api_key_storage.as_str() { + "keyring" => { + if !self.keyring.is_available() { + bail!("Keyring is not available. Set QUICOMMIT_API_KEY environment variable instead or change api_key_storage to 'config'."); + } + self.keyring.store_api_key(&self.config.llm.provider, api_key) + }, + "config" => { + // We can't modify self.config here since self is immutable + // This will be handled by the caller updating the config + Ok(()) + }, + "environment" => { + bail!("API key storage set to 'environment'. Please set QUICOMMIT_{}_API_KEY environment variable.", self.config.llm.provider.to_uppercase()); + }, + _ => { + bail!("Invalid API key storage method: {}", self.config.llm.api_key_storage); + } + } } - /// Get Kimi base URL - pub fn kimi_base_url(&self) -> &str { - &self.config.llm.kimi.base_url + /// Delete API key from configured storage method + pub fn delete_api_key(&self) -> Result<()> { + match self.config.llm.api_key_storage.as_str() { + "keyring" => { + if self.keyring.is_available() { + self.keyring.delete_api_key(&self.config.llm.provider)?; + } + }, + "config" => { + // We can't modify self.config here since self is immutable + // This will be handled by the caller updating the config + }, + "environment" => { + // Environment variables are not managed by the app + }, + _ => { + bail!("Invalid API key storage method: {}", self.config.llm.api_key_storage); + } + } + Ok(()) } - /// Set Kimi base URL - pub fn set_kimi_base_url(&mut self, url: String) { - self.config.llm.kimi.base_url = url; - self.modified = true; + /// Check if API key is configured + pub fn has_api_key(&self) -> bool { + if !provider_needs_api_key(&self.config.llm.provider) { + return true; + } + + // Check environment variables + if self.keyring.get_api_key(&self.config.llm.provider).unwrap_or(None).is_some() { + return true; + } + + // Check config file if configured + if self.config.llm.api_key_storage == "config" { + return self.config.llm.api_key.is_some(); + } + + false } - /// Get DeepSeek API key - pub fn deepseek_api_key(&self) -> Option<&String> { - self.config.llm.deepseek.api_key.as_ref() + /// Get keyring manager reference + pub fn keyring(&self) -> &KeyringManager { + &self.keyring } - /// Set DeepSeek API key - pub fn set_deepseek_api_key(&mut self, key: String) { - self.config.llm.deepseek.api_key = Some(key); - self.modified = true; - } - - /// Get DeepSeek base URL - pub fn deepseek_base_url(&self) -> &str { - &self.config.llm.deepseek.base_url - } - - /// Set DeepSeek base URL - pub fn set_deepseek_base_url(&mut self, url: String) { - self.config.llm.deepseek.base_url = url; - self.modified = true; - } - - /// Get OpenRouter API key - pub fn openrouter_api_key(&self) -> Option<&String> { - self.config.llm.openrouter.api_key.as_ref() - } - - /// Set OpenRouter API key - pub fn set_openrouter_api_key(&mut self, key: String) { - self.config.llm.openrouter.api_key = Some(key); - self.modified = true; - } - - /// Get OpenRouter base URL - pub fn openrouter_base_url(&self) -> &str { - &self.config.llm.openrouter.base_url - } - - /// Set OpenRouter base URL - pub fn set_openrouter_base_url(&mut self, url: String) { - self.config.llm.openrouter.base_url = url; - self.modified = true; + /// Configure LLM provider with all settings + pub fn configure_llm(&mut self, provider: String, model: Option, base_url: Option, api_key: Option<&str>) -> Result<()> { + self.set_llm_provider(provider.clone()); + + if let Some(m) = model { + self.set_llm_model(m); + } + + self.set_llm_base_url(base_url); + + if let Some(key) = api_key { + if provider_needs_api_key(&provider) { + self.set_api_key(key)?; + } + } + + Ok(()) } // Commit configuration @@ -471,6 +519,7 @@ impl Default for ConfigManager { config: AppConfig::default(), config_path: PathBuf::new(), modified: false, + keyring: KeyringManager::new(), } } } diff --git a/src/config/mod.rs b/src/config/mod.rs index 2be6cc0..b33e5c3 100644 --- a/src/config/mod.rs +++ b/src/config/mod.rs @@ -80,37 +80,16 @@ impl Default for AppConfig { /// LLM configuration #[derive(Debug, Clone, Serialize, Deserialize)] pub struct LlmConfig { - /// Default LLM provider + /// Current LLM provider (ollama, openai, anthropic, kimi, deepseek, openrouter) #[serde(default = "default_llm_provider")] pub provider: String, - /// OpenAI configuration - #[serde(default)] - pub openai: OpenAiConfig, + /// Model to use (stored in config, not in keyring) + #[serde(default = "default_model")] + pub model: String, - /// Ollama configuration - #[serde(default)] - pub ollama: OllamaConfig, - - /// Anthropic Claude configuration - #[serde(default)] - pub anthropic: AnthropicConfig, - - /// Kimi (Moonshot AI) configuration - #[serde(default)] - pub kimi: KimiConfig, - - /// DeepSeek configuration - #[serde(default)] - pub deepseek: DeepSeekConfig, - - /// OpenRouter configuration - #[serde(default)] - pub openrouter: OpenRouterConfig, - - /// Custom API configuration - #[serde(default)] - pub custom: Option, + /// API base URL (optional, will use provider default if not set) + pub base_url: Option, /// Maximum tokens for generation #[serde(default = "default_max_tokens")] @@ -123,186 +102,35 @@ pub struct LlmConfig { /// Timeout in seconds #[serde(default = "default_timeout")] pub timeout: u64, + + /// API key storage method (keyring, config, environment) + #[serde(default = "default_api_key_storage")] + pub api_key_storage: String, + + /// API key (stored in config for fallback, encrypted if encrypt_sensitive is true) + #[serde(default)] + pub api_key: Option, +} + +fn default_api_key_storage() -> String { + "keyring".to_string() } impl Default for LlmConfig { fn default() -> Self { Self { provider: default_llm_provider(), - openai: OpenAiConfig::default(), - ollama: OllamaConfig::default(), - anthropic: AnthropicConfig::default(), - kimi: KimiConfig::default(), - deepseek: DeepSeekConfig::default(), - openrouter: OpenRouterConfig::default(), - custom: None, + model: default_model(), + base_url: None, max_tokens: default_max_tokens(), temperature: default_temperature(), timeout: default_timeout(), - } - } -} - -/// OpenAI API configuration -#[derive(Debug, Clone, Serialize, Deserialize)] -pub struct OpenAiConfig { - /// API key - pub api_key: Option, - - /// Model to use - #[serde(default = "default_openai_model")] - pub model: String, - - /// API base URL (for custom endpoints) - #[serde(default = "default_openai_base_url")] - pub base_url: String, -} - -impl Default for OpenAiConfig { - fn default() -> Self { - Self { + api_key_storage: default_api_key_storage(), api_key: None, - model: default_openai_model(), - base_url: default_openai_base_url(), } } } -/// Ollama configuration -#[derive(Debug, Clone, Serialize, Deserialize)] -pub struct OllamaConfig { - /// Ollama server URL - #[serde(default = "default_ollama_url")] - pub url: String, - - /// Model to use - #[serde(default = "default_ollama_model")] - pub model: String, -} - -impl Default for OllamaConfig { - fn default() -> Self { - Self { - url: default_ollama_url(), - model: default_ollama_model(), - } - } -} - -/// Anthropic Claude configuration -#[derive(Debug, Clone, Serialize, Deserialize)] -pub struct AnthropicConfig { - /// API key - pub api_key: Option, - - /// Model to use - #[serde(default = "default_anthropic_model")] - pub model: String, -} - -impl Default for AnthropicConfig { - fn default() -> Self { - Self { - api_key: None, - model: default_anthropic_model(), - } - } -} - -/// Kimi (Moonshot AI) configuration -#[derive(Debug, Clone, Serialize, Deserialize)] -pub struct KimiConfig { - /// API key - pub api_key: Option, - - /// Model to use - #[serde(default = "default_kimi_model")] - pub model: String, - - /// API base URL (for custom endpoints) - #[serde(default = "default_kimi_base_url")] - pub base_url: String, -} - -impl Default for KimiConfig { - fn default() -> Self { - Self { - api_key: None, - model: default_kimi_model(), - base_url: default_kimi_base_url(), - } - } -} - -/// DeepSeek configuration -#[derive(Debug, Clone, Serialize, Deserialize)] -pub struct DeepSeekConfig { - /// API key - pub api_key: Option, - - /// Model to use - #[serde(default = "default_deepseek_model")] - pub model: String, - - /// API base URL (for custom endpoints) - #[serde(default = "default_deepseek_base_url")] - pub base_url: String, -} - -impl Default for DeepSeekConfig { - fn default() -> Self { - Self { - api_key: None, - model: default_deepseek_model(), - base_url: default_deepseek_base_url(), - } - } -} - -/// OpenRouter configuration -#[derive(Debug, Clone, Serialize, Deserialize)] -pub struct OpenRouterConfig { - /// API key - pub api_key: Option, - - /// Model to use - #[serde(default = "default_openrouter_model")] - pub model: String, - - /// API base URL (for custom endpoints) - #[serde(default = "default_openrouter_base_url")] - pub base_url: String, -} - -impl Default for OpenRouterConfig { - fn default() -> Self { - Self { - api_key: None, - model: default_openrouter_model(), - base_url: default_openrouter_base_url(), - } - } -} - -/// Custom LLM API configuration -#[derive(Debug, Clone, Serialize, Deserialize)] -pub struct CustomLlmConfig { - /// API endpoint URL - pub url: String, - - /// API key (optional) - pub api_key: Option, - - /// Model name - pub model: String, - - /// Request format template (JSON) - pub request_template: String, - - /// Response path to extract content (e.g., "choices.0.message.content") - pub response_path: String, -} - /// Commit configuration #[derive(Debug, Clone, Serialize, Deserialize)] pub struct CommitConfig { @@ -592,6 +420,10 @@ fn default_llm_provider() -> String { "ollama".to_string() } +fn default_model() -> String { + "llama2".to_string() +} + fn default_max_tokens() -> u32 { 500 } @@ -604,50 +436,6 @@ fn default_timeout() -> u64 { 30 } -fn default_openai_model() -> String { - "gpt-4".to_string() -} - -fn default_openai_base_url() -> String { - "https://api.openai.com/v1".to_string() -} - -fn default_ollama_url() -> String { - "http://localhost:11434".to_string() -} - -fn default_ollama_model() -> String { - "llama2".to_string() -} - -fn default_anthropic_model() -> String { - "claude-3-sonnet-20240229".to_string() -} - -fn default_kimi_model() -> String { - "moonshot-v1-8k".to_string() -} - -fn default_kimi_base_url() -> String { - "https://api.moonshot.cn/v1".to_string() -} - -fn default_deepseek_model() -> String { - "deepseek-chat".to_string() -} - -fn default_deepseek_base_url() -> String { - "https://api.deepseek.com/v1".to_string() -} - -fn default_openrouter_model() -> String { - "openai/gpt-3.5-turbo".to_string() -} - -fn default_openrouter_base_url() -> String { - "https://openrouter.ai/api/v1".to_string() -} - fn default_commit_format() -> CommitFormat { CommitFormat::Conventional } diff --git a/src/generator/mod.rs b/src/generator/mod.rs index 443c859..f0ce494 100644 --- a/src/generator/mod.rs +++ b/src/generator/mod.rs @@ -1,4 +1,5 @@ -use crate::config::{CommitFormat, LlmConfig, Language}; +use crate::config::{CommitFormat, Language}; +use crate::config::manager::ConfigManager; use crate::git::{CommitInfo, GitRepo}; use crate::llm::{GeneratedCommit, LlmClient}; use anyhow::{Context, Result}; @@ -10,12 +11,11 @@ pub struct ContentGenerator { impl ContentGenerator { /// Create new content generator - pub async fn new(config: &LlmConfig) -> Result { - let llm_client = LlmClient::from_config(config).await?; + pub async fn new(manager: &ConfigManager) -> Result { + let llm_client = LlmClient::from_config(manager).await?; - // Check if provider is available if !llm_client.is_available().await { - anyhow::bail!("LLM provider '{}' is not available", config.provider); + anyhow::bail!("LLM provider '{}' is not available", manager.llm_provider()); } Ok(Self { llm_client }) diff --git a/src/llm/mod.rs b/src/llm/mod.rs index f5ae849..d3094b8 100644 --- a/src/llm/mod.rs +++ b/src/llm/mod.rs @@ -57,48 +57,50 @@ impl Default for LlmClientConfig { } impl LlmClient { - /// Create LLM client from configuration - pub async fn from_config(config: &crate::config::LlmConfig) -> Result { + /// Create LLM client from configuration manager + pub async fn from_config(manager: &crate::config::manager::ConfigManager) -> Result { + let config = manager.config(); let client_config = LlmClientConfig { - max_tokens: config.max_tokens, - temperature: config.temperature, - timeout: Duration::from_secs(config.timeout), + max_tokens: config.llm.max_tokens, + temperature: config.llm.temperature, + timeout: Duration::from_secs(config.llm.timeout), }; - let provider: Box = match config.provider.as_str() { + let provider = config.llm.provider.as_str(); + let model = config.llm.model.as_str(); + let base_url = manager.llm_base_url(); + let api_key = manager.get_api_key(); + + let provider: Box = match provider { "ollama" => { - Box::new(OllamaClient::new(&config.ollama.url, &config.ollama.model)) + Box::new(OllamaClient::new(&base_url, model)) } "openai" => { - let api_key = config.openai.api_key.as_ref() + let key = api_key.as_ref() .ok_or_else(|| anyhow::anyhow!("OpenAI API key not configured"))?; - Box::new(OpenAiClient::new( - &config.openai.base_url, - api_key, - &config.openai.model, - )?) + Box::new(OpenAiClient::new(&base_url, key, model)?) } "anthropic" => { - let api_key = config.anthropic.api_key.as_ref() + let key = api_key.as_ref() .ok_or_else(|| anyhow::anyhow!("Anthropic API key not configured"))?; - Box::new(AnthropicClient::new(api_key, &config.anthropic.model)?) + Box::new(AnthropicClient::new(key, model)?) } "kimi" => { - let api_key = config.kimi.api_key.as_ref() + let key = api_key.as_ref() .ok_or_else(|| anyhow::anyhow!("Kimi API key not configured"))?; - Box::new(KimiClient::with_base_url(api_key, &config.kimi.model, &config.kimi.base_url)?) + Box::new(KimiClient::with_base_url(key, model, &base_url)?) } "deepseek" => { - let api_key = config.deepseek.api_key.as_ref() + let key = api_key.as_ref() .ok_or_else(|| anyhow::anyhow!("DeepSeek API key not configured"))?; - Box::new(DeepSeekClient::with_base_url(api_key, &config.deepseek.model, &config.deepseek.base_url)?) + Box::new(DeepSeekClient::with_base_url(key, model, &base_url)?) } "openrouter" => { - let api_key = config.openrouter.api_key.as_ref() + let key = api_key.as_ref() .ok_or_else(|| anyhow::anyhow!("OpenRouter API key not configured"))?; - Box::new(OpenRouterClient::with_base_url(api_key, &config.openrouter.model, &config.openrouter.base_url)?) + Box::new(OpenRouterClient::with_base_url(key, model, &base_url)?) } - _ => bail!("Unknown LLM provider: {}", config.provider), + _ => bail!("Unknown LLM provider: {}", provider), }; Ok(Self { @@ -1012,3 +1014,10 @@ Gruppieren Sie Commits nach: Formatieren Sie in Markdown mit geeigneten Überschriften und AufzΓ€hlungspunkten. "#; + +/// Test LLM connection +pub async fn test_connection(manager: &crate::config::manager::ConfigManager) -> Result { + let client = LlmClient::from_config(manager).await?; + let response = client.provider.generate("Say 'Hello, World!'").await?; + Ok(response) +} diff --git a/src/utils/keyring.rs b/src/utils/keyring.rs new file mode 100644 index 0000000..b17724a --- /dev/null +++ b/src/utils/keyring.rs @@ -0,0 +1,219 @@ +use anyhow::{bail, Context, Result}; +use std::env; + +const SERVICE_NAME: &str = "quicommit"; +const ENV_API_KEY: &str = "QUICOMMIT_API_KEY"; + +#[derive(Debug, Clone, Copy, PartialEq, Eq)] +pub enum KeyringStatus { + Available, + Unavailable, +} + +pub struct KeyringManager { + status: KeyringStatus, +} + +impl KeyringManager { + pub fn new() -> Self { + let status = Self::check_keyring_availability(); + Self { status } + } + + pub fn check_keyring_availability() -> KeyringStatus { + #[cfg(target_os = "windows")] + { + KeyringStatus::Available + } + #[cfg(target_os = "macos")] + { + KeyringStatus::Available + } + #[cfg(target_os = "linux")] + { + Self::check_linux_keyring() + } + #[cfg(not(any(target_os = "windows", target_os = "macos", target_os = "linux")))] + { + KeyringStatus::Unavailable + } + } + + #[cfg(target_os = "linux")] + fn check_linux_keyring() -> KeyringStatus { + use std::path::Path; + + let has_dbus = Path::new("/usr/bin/dbus-daemon").exists() + || Path::new("/bin/dbus-daemon").exists() + || env::var("DBUS_SESSION_BUS_ADDRESS").is_ok(); + + let has_keyring = Path::new("/usr/bin/gnome-keyring-daemon").exists() + || Path::new("/usr/bin/gnome-keyring").exists() + || Path::new("/usr/bin/kwalletd5").exists() + || Path::new("/usr/bin/kwalletd6").exists() + || env::var("SECRET_SERVICE").is_ok(); + + if has_dbus && has_keyring { + KeyringStatus::Available + } else { + KeyringStatus::Unavailable + } + } + + pub fn status(&self) -> KeyringStatus { + self.status + } + + pub fn is_available(&self) -> bool { + self.status == KeyringStatus::Available + } + + pub fn store_api_key(&self, provider: &str, api_key: &str) -> Result<()> { + if !self.is_available() { + bail!("Keyring is not available on this system"); + } + + let entry = keyring::Entry::new(SERVICE_NAME, provider) + .context("Failed to create keyring entry")?; + + entry.set_password(api_key) + .context("Failed to store API key")?; + + Ok(()) + } + + pub fn get_api_key(&self, provider: &str) -> Result> { + // δΌ˜ε…ˆδ»ŽηŽ―ε’ƒε˜ι‡θŽ·ε– + if let Ok(key) = env::var(ENV_API_KEY) { + if !key.is_empty() { + return Ok(Some(key)); + } + } + + // keyring 不可用既直ζŽ₯θΏ”ε›ž + if !self.is_available() { + return Ok(None); + } + + // 从 keyring θŽ·ε– + let entry = keyring::Entry::new(SERVICE_NAME, provider) + .context("Failed to create keyring entry")?; + + match entry.get_password() { + Ok(key) => Ok(Some(key)), + Err(keyring::Error::NoEntry) => Ok(None), + Err(e) => Err(e.into()), + } + } + + pub fn delete_api_key(&self, provider: &str) -> Result<()> { + if !self.is_available() { + bail!("Keyring is not available on this system"); + } + + let entry = keyring::Entry::new(SERVICE_NAME, provider) + .context("Failed to create keyring entry")?; + + entry.delete_credential() + .context("Failed to delete API key")?; + + Ok(()) + } + + pub fn has_api_key(&self, provider: &str) -> bool { + self.get_api_key(provider).unwrap_or(None).is_some() + } + + pub fn get_status_message(&self) -> String { + match self.status { + KeyringStatus::Available => { + #[cfg(target_os = "windows")] + { + "Windows Credential Manager is available".to_string() + } + #[cfg(target_os = "macos")] + { + "macOS Keychain is available".to_string() + } + #[cfg(target_os = "linux")] + { + "Linux secret service is available".to_string() + } + #[cfg(not(any(target_os = "windows", target_os = "macos", target_os = "linux")))] + { + "Keyring is available".to_string() + } + } + KeyringStatus::Unavailable => { + "Keyring is not available. Set QUICOMMIT_API_KEY environment variable.".to_string() + } + } + } +} + +impl Default for KeyringManager { + fn default() -> Self { + Self::new() + } +} + +pub fn get_default_base_url(provider: &str) -> &'static str { + match provider { + "openai" => "https://api.openai.com/v1", + "anthropic" => "https://api.anthropic.com/v1", + "kimi" => "https://api.moonshot.cn/v1", + "deepseek" => "https://api.deepseek.com/v1", + "openrouter" => "https://openrouter.ai/api/v1", + "ollama" => "http://localhost:11434", + _ => "", + } +} + +pub fn get_default_model(provider: &str) -> &'static str { + match provider { + "openai" => "gpt-4", + "anthropic" => "claude-3-sonnet-20240229", + "kimi" => "moonshot-v1-8k", + "deepseek" => "deepseek-chat", + "openrouter" => "openai/gpt-3.5-turbo", + "ollama" => "llama2", + _ => "", + } +} + +pub fn get_supported_providers() -> &'static [&'static str] { + &["ollama", "openai", "anthropic", "kimi", "deepseek", "openrouter"] +} + +pub fn provider_needs_api_key(provider: &str) -> bool { + provider != "ollama" +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_get_default_base_url() { + assert_eq!(get_default_base_url("openai"), "https://api.openai.com/v1"); + assert_eq!(get_default_base_url("anthropic"), "https://api.anthropic.com/v1"); + assert_eq!(get_default_base_url("kimi"), "https://api.moonshot.cn/v1"); + assert_eq!(get_default_base_url("deepseek"), "https://api.deepseek.com/v1"); + assert_eq!(get_default_base_url("openrouter"), "https://openrouter.ai/api/v1"); + assert_eq!(get_default_base_url("ollama"), "http://localhost:11434"); + } + + #[test] + fn test_get_default_model() { + assert_eq!(get_default_model("openai"), "gpt-4"); + assert_eq!(get_default_model("anthropic"), "claude-3-sonnet-20240229"); + assert_eq!(get_default_model("ollama"), "llama2"); + } + + #[test] + fn test_provider_needs_api_key() { + assert!(provider_needs_api_key("openai")); + assert!(provider_needs_api_key("anthropic")); + assert!(!provider_needs_api_key("ollama")); + } +} diff --git a/src/utils/mod.rs b/src/utils/mod.rs index 0e1cc9c..9fc3e03 100644 --- a/src/utils/mod.rs +++ b/src/utils/mod.rs @@ -1,6 +1,7 @@ pub mod crypto; pub mod editor; pub mod formatter; +pub mod keyring; pub mod validators; use anyhow::{Context, Result}; diff --git a/test-keyring/Cargo.toml b/test-keyring/Cargo.toml new file mode 100644 index 0000000..7e37c14 --- /dev/null +++ b/test-keyring/Cargo.toml @@ -0,0 +1,7 @@ +[package] +name = "test-keyring" +version = "0.1.0" +edition = "2024" + +[dependencies] +keyring = "3" \ No newline at end of file diff --git a/test-keyring/src/main.rs b/test-keyring/src/main.rs new file mode 100644 index 0000000..e69d6c0 --- /dev/null +++ b/test-keyring/src/main.rs @@ -0,0 +1,18 @@ +use keyring::Entry; + +fn main() { + println!("Testing keyring functionality..."); + + // Test storing password + let entry = Entry::new("test-service", "test-user").unwrap(); + println!("Created entry successfully"); + + entry.set_password("test-password").unwrap(); + println!("Stored password successfully"); + + // Test retrieving password + let retrieved = entry.get_password().unwrap(); + println!("Retrieved password: {}", retrieved); + + println!("Keyring test completed successfully!"); +} \ No newline at end of file diff --git a/test_keyring.rs b/test_keyring.rs new file mode 100644 index 0000000..e69d6c0 --- /dev/null +++ b/test_keyring.rs @@ -0,0 +1,18 @@ +use keyring::Entry; + +fn main() { + println!("Testing keyring functionality..."); + + // Test storing password + let entry = Entry::new("test-service", "test-user").unwrap(); + println!("Created entry successfully"); + + entry.set_password("test-password").unwrap(); + println!("Stored password successfully"); + + // Test retrieving password + let retrieved = entry.get_password().unwrap(); + println!("Retrieved password: {}", retrieved); + + println!("Keyring test completed successfully!"); +} \ No newline at end of file diff --git a/tests/integration_tests.rs b/tests/integration_tests.rs index 8f29c2b..d3016c5 100644 --- a/tests/integration_tests.rs +++ b/tests/integration_tests.rs @@ -47,6 +47,24 @@ fn create_commit(dir: &PathBuf, message: &str) { .expect("Failed to create commit"); } +fn setup_git_repo(dir: &PathBuf) { + create_git_repo(dir); + configure_git_user(dir); +} + +fn setup_test_repo_with_file(dir: &PathBuf, file_name: &str, file_content: &str) { + setup_git_repo(dir); + create_test_file(dir, file_name, file_content); + stage_file(dir, file_name); +} + +fn init_quicommit(dir: &PathBuf, config_path: &PathBuf) { + let mut cmd = Command::cargo_bin("quicommit").unwrap(); + cmd.args(&["init", "--yes", "--config", config_path.to_str().unwrap()]) + .current_dir(dir); + cmd.assert().success(); +} + mod cli_basic { use super::*; @@ -57,7 +75,10 @@ mod cli_basic { cmd.assert() .success() .stdout(predicate::str::contains("QuiCommit")) - .stdout(predicate::str::contains("AI-powered Git assistant")); + .stdout(predicate::str::contains("AI-powered Git assistant")) + .stdout(predicate::str::contains("Usage:")) + .stdout(predicate::str::contains("Commands:")) + .stdout(predicate::str::contains("Options:")); } #[test] @@ -252,15 +273,10 @@ mod commit_command { fn test_commit_no_changes() { let temp_dir = TempDir::new().unwrap(); let repo_path = temp_dir.path().to_path_buf(); - create_git_repo(&repo_path); - configure_git_user(&repo_path); + setup_git_repo(&repo_path); let config_path = repo_path.join("config.toml"); - - let mut cmd = Command::cargo_bin("quicommit").unwrap(); - cmd.args(&["init", "--yes", "--config", config_path.to_str().unwrap()]) - .current_dir(&repo_path); - cmd.assert().success(); + init_quicommit(&repo_path, &config_path); let mut cmd = Command::cargo_bin("quicommit").unwrap(); cmd.args(&["commit", "--manual", "-m", "test: empty", "--dry-run", "--yes", "--config", config_path.to_str().unwrap()]) @@ -275,18 +291,10 @@ mod commit_command { fn test_commit_with_staged_changes() { let temp_dir = TempDir::new().unwrap(); let repo_path = temp_dir.path().to_path_buf(); - create_git_repo(&repo_path); - configure_git_user(&repo_path); - - create_test_file(&repo_path, "test.txt", "Hello, World!"); - stage_file(&repo_path, "test.txt"); + setup_test_repo_with_file(&repo_path, "test.txt", "Hello, World!"); let config_path = repo_path.join("config.toml"); - - let mut cmd = Command::cargo_bin("quicommit").unwrap(); - cmd.args(&["init", "--yes", "--config", config_path.to_str().unwrap()]) - .current_dir(&repo_path); - cmd.assert().success(); + init_quicommit(&repo_path, &config_path); let mut cmd = Command::cargo_bin("quicommit").unwrap(); cmd.args(&["commit", "--manual", "-m", "test: add test file", "--dry-run", "--yes", "--config", config_path.to_str().unwrap()]) @@ -301,18 +309,10 @@ mod commit_command { fn test_commit_date_mode() { let temp_dir = TempDir::new().unwrap(); let repo_path = temp_dir.path().to_path_buf(); - create_git_repo(&repo_path); - configure_git_user(&repo_path); - - create_test_file(&repo_path, "daily.txt", "Daily update"); - stage_file(&repo_path, "daily.txt"); + setup_test_repo_with_file(&repo_path, "daily.txt", "Daily update"); let config_path = repo_path.join("config.toml"); - - let mut cmd = Command::cargo_bin("quicommit").unwrap(); - cmd.args(&["init", "--yes", "--config", config_path.to_str().unwrap()]) - .current_dir(&repo_path); - cmd.assert().success(); + init_quicommit(&repo_path, &config_path); let mut cmd = Command::cargo_bin("quicommit").unwrap(); cmd.args(&["commit", "--date", "--dry-run", "--yes", "--config", config_path.to_str().unwrap()]) @@ -349,19 +349,14 @@ mod tag_command { fn test_tag_list_empty() { let temp_dir = TempDir::new().unwrap(); let repo_path = temp_dir.path().to_path_buf(); - create_git_repo(&repo_path); - configure_git_user(&repo_path); + setup_git_repo(&repo_path); create_test_file(&repo_path, "test.txt", "content"); stage_file(&repo_path, "test.txt"); create_commit(&repo_path, "feat: initial commit"); let config_path = repo_path.join("config.toml"); - - let mut cmd = Command::cargo_bin("quicommit").unwrap(); - cmd.args(&["init", "--yes", "--config", config_path.to_str().unwrap()]) - .current_dir(&repo_path); - cmd.assert().success(); + init_quicommit(&repo_path, &config_path); let mut cmd = Command::cargo_bin("quicommit").unwrap(); cmd.args(&["tag", "--name", "v0.1.0", "--dry-run", "--yes", "--config", config_path.to_str().unwrap()]) @@ -380,16 +375,12 @@ mod changelog_command { fn test_changelog_init() { let temp_dir = TempDir::new().unwrap(); let repo_path = temp_dir.path().to_path_buf(); - create_git_repo(&repo_path); - configure_git_user(&repo_path); + setup_git_repo(&repo_path); let config_path = repo_path.join("config.toml"); let changelog_path = repo_path.join("CHANGELOG.md"); - let mut cmd = Command::cargo_bin("quicommit").unwrap(); - cmd.args(&["init", "--yes", "--config", config_path.to_str().unwrap()]) - .current_dir(&repo_path); - cmd.assert().success(); + init_quicommit(&repo_path, &config_path); let mut cmd = Command::cargo_bin("quicommit").unwrap(); cmd.args(&["changelog", "--init", "--output", changelog_path.to_str().unwrap(), "--config", config_path.to_str().unwrap()]) @@ -404,19 +395,14 @@ mod changelog_command { fn test_changelog_dry_run() { let temp_dir = TempDir::new().unwrap(); let repo_path = temp_dir.path().to_path_buf(); - create_git_repo(&repo_path); - configure_git_user(&repo_path); + setup_git_repo(&repo_path); create_test_file(&repo_path, "test.txt", "content"); stage_file(&repo_path, "test.txt"); create_commit(&repo_path, "feat: add feature"); let config_path = repo_path.join("config.toml"); - - let mut cmd = Command::cargo_bin("quicommit").unwrap(); - cmd.args(&["init", "--yes", "--config", config_path.to_str().unwrap()]) - .current_dir(&repo_path); - cmd.assert().success(); + init_quicommit(&repo_path, &config_path); let mut cmd = Command::cargo_bin("quicommit").unwrap(); cmd.args(&["changelog", "--dry-run", "--yes", "--config", config_path.to_str().unwrap()]) @@ -532,18 +518,10 @@ mod validators { fn test_commit_message_validation() { let temp_dir = TempDir::new().unwrap(); let repo_path = temp_dir.path().to_path_buf(); - create_git_repo(&repo_path); - configure_git_user(&repo_path); - - create_test_file(&repo_path, "test.txt", "content"); - stage_file(&repo_path, "test.txt"); + setup_test_repo_with_file(&repo_path, "test.txt", "content"); let config_path = repo_path.join("config.toml"); - - let mut cmd = Command::cargo_bin("quicommit").unwrap(); - cmd.args(&["init", "--yes", "--config", config_path.to_str().unwrap()]) - .current_dir(&repo_path); - cmd.assert().success(); + init_quicommit(&repo_path, &config_path); let mut cmd = Command::cargo_bin("quicommit").unwrap(); cmd.args(&["commit", "--manual", "-m", "invalid commit message without type", "--dry-run", "--yes", "--config", config_path.to_str().unwrap()]) @@ -558,18 +536,10 @@ mod validators { fn test_valid_conventional_commit() { let temp_dir = TempDir::new().unwrap(); let repo_path = temp_dir.path().to_path_buf(); - create_git_repo(&repo_path); - configure_git_user(&repo_path); - - create_test_file(&repo_path, "test.txt", "content"); - stage_file(&repo_path, "test.txt"); + setup_test_repo_with_file(&repo_path, "test.txt", "content"); let config_path = repo_path.join("config.toml"); - - let mut cmd = Command::cargo_bin("quicommit").unwrap(); - cmd.args(&["init", "--yes", "--config", config_path.to_str().unwrap()]) - .current_dir(&repo_path); - cmd.assert().success(); + init_quicommit(&repo_path, &config_path); let mut cmd = Command::cargo_bin("quicommit").unwrap(); cmd.args(&["commit", "--manual", "-m", "feat: add new feature", "--dry-run", "--yes", "--config", config_path.to_str().unwrap()]) @@ -588,18 +558,10 @@ mod subcommands { fn test_commit_alias() { let temp_dir = TempDir::new().unwrap(); let repo_path = temp_dir.path().to_path_buf(); - create_git_repo(&repo_path); - configure_git_user(&repo_path); - - create_test_file(&repo_path, "test.txt", "content"); - stage_file(&repo_path, "test.txt"); + setup_test_repo_with_file(&repo_path, "test.txt", "content"); let config_path = repo_path.join("config.toml"); - - let mut cmd = Command::cargo_bin("quicommit").unwrap(); - cmd.args(&["init", "--yes", "--config", config_path.to_str().unwrap()]) - .current_dir(&repo_path); - cmd.assert().success(); + init_quicommit(&repo_path, &config_path); let mut cmd = Command::cargo_bin("quicommit").unwrap(); cmd.args(&["c", "--manual", "-m", "fix: test", "--dry-run", "--yes", "--config", config_path.to_str().unwrap()]) @@ -640,3 +602,59 @@ mod subcommands { .stdout(predicate::str::contains("default")); } } + +mod edge_cases { + use super::*; + + #[test] + fn test_config_file_not_found() { + let temp_dir = TempDir::new().unwrap(); + let non_existent_config = temp_dir.path().join("non_existent_config.toml"); + + let mut cmd = Command::cargo_bin("quicommit").unwrap(); + cmd.args(&["config", "show", "--config", non_existent_config.to_str().unwrap()]); + + cmd.assert() + .success() + .stdout(predicate::str::contains("QuiCommit Configuration")) + .stdout(predicate::str::contains("Default profile: (none)")) + .stdout(predicate::str::contains("Profiles: 0")); + } + + #[test] + fn test_invalid_git_repo() { + let temp_dir = TempDir::new().unwrap(); + let repo_path = temp_dir.path().to_path_buf(); + + let config_path = repo_path.join("config.toml"); + let mut cmd = Command::cargo_bin("quicommit").unwrap(); + cmd.args(&["init", "--yes", "--config", config_path.to_str().unwrap()]); + cmd.assert().success(); + + let mut cmd = Command::cargo_bin("quicommit").unwrap(); + cmd.args(&["commit", "--dry-run", "--yes", "--config", config_path.to_str().unwrap()]) + .current_dir(&repo_path); + + cmd.assert() + .failure() + .stderr(predicate::str::contains("git").or(predicate::str::contains("repository"))); + } + + #[test] + fn test_empty_commit_message() { + let temp_dir = TempDir::new().unwrap(); + let repo_path = temp_dir.path().to_path_buf(); + setup_test_repo_with_file(&repo_path, "test.txt", "content"); + + let config_path = repo_path.join("config.toml"); + init_quicommit(&repo_path, &config_path); + + let mut cmd = Command::cargo_bin("quicommit").unwrap(); + cmd.args(&["commit", "--manual", "-m", "", "--dry-run", "--yes", "--config", config_path.to_str().unwrap()]) + .current_dir(&repo_path); + + cmd.assert() + .failure() + .stderr(predicate::str::contains("Invalid conventional commit format")); + } +}