feat(keyring): 集成系统密钥环安全存储 API key
This commit is contained in:
@@ -204,12 +204,11 @@ impl ChangelogCommand {
|
||||
messages: &Messages,
|
||||
) -> Result<String> {
|
||||
let manager = ConfigManager::new()?;
|
||||
let config = manager.config();
|
||||
let language = manager.get_language().unwrap_or(Language::English);
|
||||
|
||||
println!("{}", messages.ai_generating_changelog());
|
||||
|
||||
let generator = ContentGenerator::new(&config.llm).await?;
|
||||
let generator = ContentGenerator::new(&manager).await?;
|
||||
generator.generate_changelog_entry(version, commits, language).await
|
||||
}
|
||||
|
||||
|
||||
@@ -257,22 +257,17 @@ impl CommitCommand {
|
||||
|
||||
async fn generate_commit(&self, repo: &GitRepo, format: CommitFormat, messages: &Messages) -> Result<String> {
|
||||
let manager = ConfigManager::new()?;
|
||||
let config = manager.config();
|
||||
|
||||
// Check if LLM is configured
|
||||
let generator = ContentGenerator::new(&config.llm).await
|
||||
let generator = ContentGenerator::new(&manager).await
|
||||
.context("Failed to initialize LLM. Use --manual for manual commit.")?;
|
||||
|
||||
println!("{}", messages.ai_analyzing());
|
||||
|
||||
let language_str = &config.language.output_language;
|
||||
let language = Language::from_str(language_str).unwrap_or(Language::English);
|
||||
let language = manager.get_language().unwrap_or(Language::English);
|
||||
|
||||
let generated = if self.yes {
|
||||
// Non-interactive mode: generate directly
|
||||
generator.generate_commit_from_repo(repo, format, language).await?
|
||||
} else {
|
||||
// Interactive mode: allow user to review and regenerate
|
||||
generator.generate_commit_interactive(repo, format, language).await?
|
||||
};
|
||||
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -8,6 +8,7 @@ use crate::config::{GitProfile, Language};
|
||||
use crate::config::manager::ConfigManager;
|
||||
use crate::config::profile::{GpgConfig, SshConfig};
|
||||
use crate::i18n::Messages;
|
||||
use crate::utils::keyring::{get_supported_providers, get_default_model, provider_needs_api_key};
|
||||
use crate::utils::validators::validate_email;
|
||||
|
||||
/// Initialize quicommit configuration
|
||||
@@ -31,7 +32,6 @@ impl InitCommand {
|
||||
crate::config::AppConfig::default_path().unwrap()
|
||||
});
|
||||
|
||||
// Check if config already exists
|
||||
if config_path.exists() && !self.reset {
|
||||
if !self.yes {
|
||||
let overwrite = Confirm::new()
|
||||
@@ -49,13 +49,11 @@ impl InitCommand {
|
||||
}
|
||||
}
|
||||
|
||||
// Create parent directory if needed
|
||||
if let Some(parent) = config_path.parent() {
|
||||
std::fs::create_dir_all(parent)
|
||||
.map_err(|e| anyhow::anyhow!("Failed to create config directory: {}", e))?;
|
||||
}
|
||||
|
||||
// Create new config manager with fresh config
|
||||
let mut manager = ConfigManager::with_path_fresh(&config_path)?;
|
||||
|
||||
if self.yes {
|
||||
@@ -66,7 +64,6 @@ impl InitCommand {
|
||||
|
||||
manager.save()?;
|
||||
|
||||
// Get configured language for final messages
|
||||
let language = manager.get_language().unwrap_or(Language::English);
|
||||
let messages = Messages::new(language);
|
||||
|
||||
@@ -81,7 +78,6 @@ impl InitCommand {
|
||||
}
|
||||
|
||||
async fn quick_setup(&self, manager: &mut ConfigManager) -> Result<()> {
|
||||
// Try to get git user info
|
||||
let git_config = git2::Config::open_default()?;
|
||||
|
||||
let user_name = git_config.get_string("user.name").unwrap_or_else(|_| "User".to_string());
|
||||
@@ -96,7 +92,6 @@ impl InitCommand {
|
||||
manager.add_profile("default".to_string(), profile)?;
|
||||
manager.set_default_profile(Some("default".to_string()))?;
|
||||
|
||||
// Set default LLM to Ollama
|
||||
manager.set_llm_provider("ollama".to_string());
|
||||
|
||||
Ok(())
|
||||
@@ -106,7 +101,6 @@ impl InitCommand {
|
||||
let messages = Messages::new(Language::English);
|
||||
println!("\n{}", messages.setup_profile().bold());
|
||||
|
||||
// Language selection
|
||||
println!("\n{}", messages.select_output_language().bold());
|
||||
let languages = vec![
|
||||
Language::English,
|
||||
@@ -126,16 +120,13 @@ impl InitCommand {
|
||||
let selected_language = languages[language_idx];
|
||||
manager.set_output_language(selected_language.to_code().to_string());
|
||||
|
||||
// Update messages to selected language
|
||||
let messages = Messages::new(selected_language);
|
||||
|
||||
// Profile name
|
||||
let profile_name: String = Input::new()
|
||||
.with_prompt(messages.profile_name())
|
||||
.default("personal".to_string())
|
||||
.interact_text()?;
|
||||
|
||||
// User info
|
||||
let git_config = git2::Config::open_default().ok();
|
||||
|
||||
let default_name = git_config.as_ref()
|
||||
@@ -177,7 +168,6 @@ impl InitCommand {
|
||||
None
|
||||
};
|
||||
|
||||
// SSH configuration
|
||||
let setup_ssh = Confirm::new()
|
||||
.with_prompt(messages.configure_ssh())
|
||||
.default(false)
|
||||
@@ -189,7 +179,6 @@ impl InitCommand {
|
||||
None
|
||||
};
|
||||
|
||||
// GPG configuration
|
||||
let setup_gpg = Confirm::new()
|
||||
.with_prompt(messages.configure_gpg())
|
||||
.default(false)
|
||||
@@ -201,7 +190,6 @@ impl InitCommand {
|
||||
None
|
||||
};
|
||||
|
||||
// Create profile
|
||||
let mut profile = GitProfile::new(
|
||||
profile_name.clone(),
|
||||
user_name,
|
||||
@@ -220,9 +208,9 @@ impl InitCommand {
|
||||
manager.add_profile(profile_name.clone(), profile)?;
|
||||
manager.set_default_profile(Some(profile_name))?;
|
||||
|
||||
// LLM provider selection
|
||||
println!("\n{}", messages.select_llm_provider().bold());
|
||||
let providers = vec![
|
||||
|
||||
let provider_display_names = vec![
|
||||
"Ollama (local)",
|
||||
"OpenAI",
|
||||
"Anthropic Claude",
|
||||
@@ -230,49 +218,90 @@ impl InitCommand {
|
||||
"DeepSeek",
|
||||
"OpenRouter"
|
||||
];
|
||||
|
||||
let provider_idx = Select::new()
|
||||
.items(&providers)
|
||||
.items(&provider_display_names)
|
||||
.default(0)
|
||||
.interact()?;
|
||||
|
||||
let provider = match provider_idx {
|
||||
0 => "ollama",
|
||||
1 => "openai",
|
||||
2 => "anthropic",
|
||||
3 => "kimi",
|
||||
4 => "deepseek",
|
||||
5 => "openrouter",
|
||||
_ => "ollama",
|
||||
let providers = get_supported_providers();
|
||||
let provider = providers[provider_idx].to_string();
|
||||
|
||||
let keyring = manager.keyring();
|
||||
let keyring_available = keyring.is_available();
|
||||
|
||||
if !keyring_available {
|
||||
println!("\n{}", "⚠ Keyring is not available on this system.".yellow());
|
||||
println!("{}", keyring.get_status_message().yellow());
|
||||
}
|
||||
|
||||
let api_key = if provider_needs_api_key(&provider) {
|
||||
let env_key = std::env::var("QUICOMMIT_API_KEY")
|
||||
.or_else(|_| std::env::var(format!("QUICOMMIT_{}_API_KEY", provider.to_uppercase())))
|
||||
.ok();
|
||||
|
||||
if let Some(key) = env_key {
|
||||
println!("\n{} {}", "✓".green(), "Found API key in environment variable.".green());
|
||||
None
|
||||
} else if keyring_available {
|
||||
let prompt = match provider.as_str() {
|
||||
"openai" => messages.openai_api_key(),
|
||||
"anthropic" => messages.anthropic_api_key(),
|
||||
"kimi" => messages.kimi_api_key(),
|
||||
"deepseek" => messages.deepseek_api_key(),
|
||||
"openrouter" => messages.openrouter_api_key(),
|
||||
_ => "API Key",
|
||||
};
|
||||
|
||||
let key: String = Input::new()
|
||||
.with_prompt(prompt)
|
||||
.interact_text()?;
|
||||
Some(key)
|
||||
} else {
|
||||
println!("\n{}", "Please set the QUICOMMIT_API_KEY environment variable.".yellow());
|
||||
None
|
||||
}
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
manager.set_llm_provider(provider.to_string());
|
||||
let default_model = get_default_model(&provider);
|
||||
let model: String = Input::new()
|
||||
.with_prompt("Model name")
|
||||
.default(default_model.to_string())
|
||||
.interact_text()?;
|
||||
|
||||
// Configure API key if needed
|
||||
if provider == "openai" {
|
||||
let api_key: String = Input::new()
|
||||
.with_prompt(messages.openai_api_key())
|
||||
let base_url: Option<String> = if provider == "ollama" {
|
||||
let url: String = Input::new()
|
||||
.with_prompt("Ollama server URL")
|
||||
.default("http://localhost:11434".to_string())
|
||||
.interact_text()?;
|
||||
manager.set_openai_api_key(api_key);
|
||||
} else if provider == "anthropic" {
|
||||
let api_key: String = Input::new()
|
||||
.with_prompt(messages.anthropic_api_key())
|
||||
.interact_text()?;
|
||||
manager.set_anthropic_api_key(api_key);
|
||||
} else if provider == "kimi" {
|
||||
let api_key: String = Input::new()
|
||||
.with_prompt(messages.kimi_api_key())
|
||||
.interact_text()?;
|
||||
manager.set_kimi_api_key(api_key);
|
||||
} else if provider == "deepseek" {
|
||||
let api_key: String = Input::new()
|
||||
.with_prompt(messages.deepseek_api_key())
|
||||
.interact_text()?;
|
||||
manager.set_deepseek_api_key(api_key);
|
||||
} else if provider == "openrouter" {
|
||||
let api_key: String = Input::new()
|
||||
.with_prompt(messages.openrouter_api_key())
|
||||
.interact_text()?;
|
||||
manager.set_openrouter_api_key(api_key);
|
||||
Some(url)
|
||||
} else {
|
||||
let use_custom_url = Confirm::new()
|
||||
.with_prompt("Use custom API base URL?")
|
||||
.default(false)
|
||||
.interact()?;
|
||||
|
||||
if use_custom_url {
|
||||
let url: String = Input::new()
|
||||
.with_prompt("Base URL")
|
||||
.interact_text()?;
|
||||
Some(url)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
};
|
||||
|
||||
manager.set_llm_provider(provider.clone());
|
||||
manager.set_llm_model(model);
|
||||
manager.set_llm_base_url(base_url);
|
||||
|
||||
if let Some(key) = api_key {
|
||||
if provider_needs_api_key(&provider) {
|
||||
manager.set_api_key(&key)?;
|
||||
println!("\n{} {}", "✓".green(), "API key stored securely in system keyring.".green());
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
|
||||
@@ -270,10 +270,8 @@ impl TagCommand {
|
||||
|
||||
async fn generate_tag_message(&self, repo: &GitRepo, version: &str, messages: &Messages) -> Result<String> {
|
||||
let manager = ConfigManager::new()?;
|
||||
let config = manager.config();
|
||||
let language = manager.get_language().unwrap_or(Language::English);
|
||||
|
||||
// Get commits since last tag
|
||||
let tags = repo.get_tags()?;
|
||||
let commits = if let Some(latest_tag) = tags.first() {
|
||||
repo.get_commits_between(&latest_tag.name, "HEAD")?
|
||||
@@ -287,7 +285,7 @@ impl TagCommand {
|
||||
|
||||
println!("{}", messages.ai_generating_tag(commits.len()));
|
||||
|
||||
let generator = ContentGenerator::new(&config.llm).await?;
|
||||
let generator = ContentGenerator::new(&manager).await?;
|
||||
generator.generate_tag_message(version, &commits, language).await
|
||||
}
|
||||
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
use super::{AppConfig, GitProfile, TokenConfig};
|
||||
use crate::utils::keyring::{KeyringManager, get_default_base_url, get_default_model, provider_needs_api_key};
|
||||
use anyhow::{bail, Context, Result};
|
||||
use std::collections::HashMap;
|
||||
use std::path::{Path, PathBuf};
|
||||
@@ -8,6 +9,7 @@ pub struct ConfigManager {
|
||||
config: AppConfig,
|
||||
config_path: PathBuf,
|
||||
modified: bool,
|
||||
keyring: KeyringManager,
|
||||
}
|
||||
|
||||
impl ConfigManager {
|
||||
@@ -28,6 +30,7 @@ impl ConfigManager {
|
||||
config,
|
||||
config_path: path.to_path_buf(),
|
||||
modified: false,
|
||||
keyring: KeyringManager::new(),
|
||||
})
|
||||
}
|
||||
|
||||
@@ -37,6 +40,7 @@ impl ConfigManager {
|
||||
config: AppConfig::default(),
|
||||
config_path: path.to_path_buf(),
|
||||
modified: true,
|
||||
keyring: KeyringManager::new(),
|
||||
})
|
||||
}
|
||||
|
||||
@@ -262,96 +266,140 @@ impl ConfigManager {
|
||||
|
||||
/// Set LLM provider
|
||||
pub fn set_llm_provider(&mut self, provider: String) {
|
||||
self.config.llm.provider = provider;
|
||||
let default_model = get_default_model(&provider);
|
||||
self.config.llm.provider = provider.clone();
|
||||
if self.config.llm.model.is_empty() || self.config.llm.model == "llama2" {
|
||||
self.config.llm.model = default_model.to_string();
|
||||
}
|
||||
self.modified = true;
|
||||
}
|
||||
|
||||
/// Get OpenAI API key
|
||||
pub fn openai_api_key(&self) -> Option<&String> {
|
||||
self.config.llm.openai.api_key.as_ref()
|
||||
/// Get model
|
||||
pub fn llm_model(&self) -> &str {
|
||||
&self.config.llm.model
|
||||
}
|
||||
|
||||
/// Set OpenAI API key
|
||||
pub fn set_openai_api_key(&mut self, key: String) {
|
||||
self.config.llm.openai.api_key = Some(key);
|
||||
/// Set model
|
||||
pub fn set_llm_model(&mut self, model: String) {
|
||||
self.config.llm.model = model;
|
||||
self.modified = true;
|
||||
}
|
||||
|
||||
/// Get Anthropic API key
|
||||
pub fn anthropic_api_key(&self) -> Option<&String> {
|
||||
self.config.llm.anthropic.api_key.as_ref()
|
||||
/// Get base URL (returns provider default if not set)
|
||||
pub fn llm_base_url(&self) -> String {
|
||||
match &self.config.llm.base_url {
|
||||
Some(url) => url.clone(),
|
||||
None => get_default_base_url(&self.config.llm.provider).to_string(),
|
||||
}
|
||||
}
|
||||
|
||||
/// Set Anthropic API key
|
||||
pub fn set_anthropic_api_key(&mut self, key: String) {
|
||||
self.config.llm.anthropic.api_key = Some(key);
|
||||
/// Set base URL
|
||||
pub fn set_llm_base_url(&mut self, url: Option<String>) {
|
||||
self.config.llm.base_url = url;
|
||||
self.modified = true;
|
||||
}
|
||||
|
||||
/// Get Kimi API key
|
||||
pub fn kimi_api_key(&self) -> Option<&String> {
|
||||
self.config.llm.kimi.api_key.as_ref()
|
||||
/// Get API key from configured storage method
|
||||
pub fn get_api_key(&self) -> Option<String> {
|
||||
// First try environment variables (always checked)
|
||||
if let Some(key) = self.keyring.get_api_key(&self.config.llm.provider).unwrap_or(None) {
|
||||
return Some(key);
|
||||
}
|
||||
|
||||
// Then try config file if configured
|
||||
if self.config.llm.api_key_storage == "config" {
|
||||
return self.config.llm.api_key.clone();
|
||||
}
|
||||
|
||||
None
|
||||
}
|
||||
|
||||
/// Set Kimi API key
|
||||
pub fn set_kimi_api_key(&mut self, key: String) {
|
||||
self.config.llm.kimi.api_key = Some(key);
|
||||
self.modified = true;
|
||||
/// Store API key in configured storage method
|
||||
pub fn set_api_key(&self, api_key: &str) -> Result<()> {
|
||||
match self.config.llm.api_key_storage.as_str() {
|
||||
"keyring" => {
|
||||
if !self.keyring.is_available() {
|
||||
bail!("Keyring is not available. Set QUICOMMIT_API_KEY environment variable instead or change api_key_storage to 'config'.");
|
||||
}
|
||||
self.keyring.store_api_key(&self.config.llm.provider, api_key)
|
||||
},
|
||||
"config" => {
|
||||
// We can't modify self.config here since self is immutable
|
||||
// This will be handled by the caller updating the config
|
||||
Ok(())
|
||||
},
|
||||
"environment" => {
|
||||
bail!("API key storage set to 'environment'. Please set QUICOMMIT_{}_API_KEY environment variable.", self.config.llm.provider.to_uppercase());
|
||||
},
|
||||
_ => {
|
||||
bail!("Invalid API key storage method: {}", self.config.llm.api_key_storage);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Get Kimi base URL
|
||||
pub fn kimi_base_url(&self) -> &str {
|
||||
&self.config.llm.kimi.base_url
|
||||
/// Delete API key from configured storage method
|
||||
pub fn delete_api_key(&self) -> Result<()> {
|
||||
match self.config.llm.api_key_storage.as_str() {
|
||||
"keyring" => {
|
||||
if self.keyring.is_available() {
|
||||
self.keyring.delete_api_key(&self.config.llm.provider)?;
|
||||
}
|
||||
},
|
||||
"config" => {
|
||||
// We can't modify self.config here since self is immutable
|
||||
// This will be handled by the caller updating the config
|
||||
},
|
||||
"environment" => {
|
||||
// Environment variables are not managed by the app
|
||||
},
|
||||
_ => {
|
||||
bail!("Invalid API key storage method: {}", self.config.llm.api_key_storage);
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Set Kimi base URL
|
||||
pub fn set_kimi_base_url(&mut self, url: String) {
|
||||
self.config.llm.kimi.base_url = url;
|
||||
self.modified = true;
|
||||
/// Check if API key is configured
|
||||
pub fn has_api_key(&self) -> bool {
|
||||
if !provider_needs_api_key(&self.config.llm.provider) {
|
||||
return true;
|
||||
}
|
||||
|
||||
// Check environment variables
|
||||
if self.keyring.get_api_key(&self.config.llm.provider).unwrap_or(None).is_some() {
|
||||
return true;
|
||||
}
|
||||
|
||||
// Check config file if configured
|
||||
if self.config.llm.api_key_storage == "config" {
|
||||
return self.config.llm.api_key.is_some();
|
||||
}
|
||||
|
||||
false
|
||||
}
|
||||
|
||||
/// Get DeepSeek API key
|
||||
pub fn deepseek_api_key(&self) -> Option<&String> {
|
||||
self.config.llm.deepseek.api_key.as_ref()
|
||||
/// Get keyring manager reference
|
||||
pub fn keyring(&self) -> &KeyringManager {
|
||||
&self.keyring
|
||||
}
|
||||
|
||||
/// Set DeepSeek API key
|
||||
pub fn set_deepseek_api_key(&mut self, key: String) {
|
||||
self.config.llm.deepseek.api_key = Some(key);
|
||||
self.modified = true;
|
||||
}
|
||||
|
||||
/// Get DeepSeek base URL
|
||||
pub fn deepseek_base_url(&self) -> &str {
|
||||
&self.config.llm.deepseek.base_url
|
||||
}
|
||||
|
||||
/// Set DeepSeek base URL
|
||||
pub fn set_deepseek_base_url(&mut self, url: String) {
|
||||
self.config.llm.deepseek.base_url = url;
|
||||
self.modified = true;
|
||||
}
|
||||
|
||||
/// Get OpenRouter API key
|
||||
pub fn openrouter_api_key(&self) -> Option<&String> {
|
||||
self.config.llm.openrouter.api_key.as_ref()
|
||||
}
|
||||
|
||||
/// Set OpenRouter API key
|
||||
pub fn set_openrouter_api_key(&mut self, key: String) {
|
||||
self.config.llm.openrouter.api_key = Some(key);
|
||||
self.modified = true;
|
||||
}
|
||||
|
||||
/// Get OpenRouter base URL
|
||||
pub fn openrouter_base_url(&self) -> &str {
|
||||
&self.config.llm.openrouter.base_url
|
||||
}
|
||||
|
||||
/// Set OpenRouter base URL
|
||||
pub fn set_openrouter_base_url(&mut self, url: String) {
|
||||
self.config.llm.openrouter.base_url = url;
|
||||
self.modified = true;
|
||||
/// Configure LLM provider with all settings
|
||||
pub fn configure_llm(&mut self, provider: String, model: Option<String>, base_url: Option<String>, api_key: Option<&str>) -> Result<()> {
|
||||
self.set_llm_provider(provider.clone());
|
||||
|
||||
if let Some(m) = model {
|
||||
self.set_llm_model(m);
|
||||
}
|
||||
|
||||
self.set_llm_base_url(base_url);
|
||||
|
||||
if let Some(key) = api_key {
|
||||
if provider_needs_api_key(&provider) {
|
||||
self.set_api_key(key)?;
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
// Commit configuration
|
||||
@@ -471,6 +519,7 @@ impl Default for ConfigManager {
|
||||
config: AppConfig::default(),
|
||||
config_path: PathBuf::new(),
|
||||
modified: false,
|
||||
keyring: KeyringManager::new(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -80,37 +80,16 @@ impl Default for AppConfig {
|
||||
/// LLM configuration
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct LlmConfig {
|
||||
/// Default LLM provider
|
||||
/// Current LLM provider (ollama, openai, anthropic, kimi, deepseek, openrouter)
|
||||
#[serde(default = "default_llm_provider")]
|
||||
pub provider: String,
|
||||
|
||||
/// OpenAI configuration
|
||||
#[serde(default)]
|
||||
pub openai: OpenAiConfig,
|
||||
/// Model to use (stored in config, not in keyring)
|
||||
#[serde(default = "default_model")]
|
||||
pub model: String,
|
||||
|
||||
/// Ollama configuration
|
||||
#[serde(default)]
|
||||
pub ollama: OllamaConfig,
|
||||
|
||||
/// Anthropic Claude configuration
|
||||
#[serde(default)]
|
||||
pub anthropic: AnthropicConfig,
|
||||
|
||||
/// Kimi (Moonshot AI) configuration
|
||||
#[serde(default)]
|
||||
pub kimi: KimiConfig,
|
||||
|
||||
/// DeepSeek configuration
|
||||
#[serde(default)]
|
||||
pub deepseek: DeepSeekConfig,
|
||||
|
||||
/// OpenRouter configuration
|
||||
#[serde(default)]
|
||||
pub openrouter: OpenRouterConfig,
|
||||
|
||||
/// Custom API configuration
|
||||
#[serde(default)]
|
||||
pub custom: Option<CustomLlmConfig>,
|
||||
/// API base URL (optional, will use provider default if not set)
|
||||
pub base_url: Option<String>,
|
||||
|
||||
/// Maximum tokens for generation
|
||||
#[serde(default = "default_max_tokens")]
|
||||
@@ -123,186 +102,35 @@ pub struct LlmConfig {
|
||||
/// Timeout in seconds
|
||||
#[serde(default = "default_timeout")]
|
||||
pub timeout: u64,
|
||||
|
||||
/// API key storage method (keyring, config, environment)
|
||||
#[serde(default = "default_api_key_storage")]
|
||||
pub api_key_storage: String,
|
||||
|
||||
/// API key (stored in config for fallback, encrypted if encrypt_sensitive is true)
|
||||
#[serde(default)]
|
||||
pub api_key: Option<String>,
|
||||
}
|
||||
|
||||
fn default_api_key_storage() -> String {
|
||||
"keyring".to_string()
|
||||
}
|
||||
|
||||
impl Default for LlmConfig {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
provider: default_llm_provider(),
|
||||
openai: OpenAiConfig::default(),
|
||||
ollama: OllamaConfig::default(),
|
||||
anthropic: AnthropicConfig::default(),
|
||||
kimi: KimiConfig::default(),
|
||||
deepseek: DeepSeekConfig::default(),
|
||||
openrouter: OpenRouterConfig::default(),
|
||||
custom: None,
|
||||
model: default_model(),
|
||||
base_url: None,
|
||||
max_tokens: default_max_tokens(),
|
||||
temperature: default_temperature(),
|
||||
timeout: default_timeout(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// OpenAI API configuration
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct OpenAiConfig {
|
||||
/// API key
|
||||
pub api_key: Option<String>,
|
||||
|
||||
/// Model to use
|
||||
#[serde(default = "default_openai_model")]
|
||||
pub model: String,
|
||||
|
||||
/// API base URL (for custom endpoints)
|
||||
#[serde(default = "default_openai_base_url")]
|
||||
pub base_url: String,
|
||||
}
|
||||
|
||||
impl Default for OpenAiConfig {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
api_key_storage: default_api_key_storage(),
|
||||
api_key: None,
|
||||
model: default_openai_model(),
|
||||
base_url: default_openai_base_url(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Ollama configuration
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct OllamaConfig {
|
||||
/// Ollama server URL
|
||||
#[serde(default = "default_ollama_url")]
|
||||
pub url: String,
|
||||
|
||||
/// Model to use
|
||||
#[serde(default = "default_ollama_model")]
|
||||
pub model: String,
|
||||
}
|
||||
|
||||
impl Default for OllamaConfig {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
url: default_ollama_url(),
|
||||
model: default_ollama_model(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Anthropic Claude configuration
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct AnthropicConfig {
|
||||
/// API key
|
||||
pub api_key: Option<String>,
|
||||
|
||||
/// Model to use
|
||||
#[serde(default = "default_anthropic_model")]
|
||||
pub model: String,
|
||||
}
|
||||
|
||||
impl Default for AnthropicConfig {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
api_key: None,
|
||||
model: default_anthropic_model(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Kimi (Moonshot AI) configuration
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct KimiConfig {
|
||||
/// API key
|
||||
pub api_key: Option<String>,
|
||||
|
||||
/// Model to use
|
||||
#[serde(default = "default_kimi_model")]
|
||||
pub model: String,
|
||||
|
||||
/// API base URL (for custom endpoints)
|
||||
#[serde(default = "default_kimi_base_url")]
|
||||
pub base_url: String,
|
||||
}
|
||||
|
||||
impl Default for KimiConfig {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
api_key: None,
|
||||
model: default_kimi_model(),
|
||||
base_url: default_kimi_base_url(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// DeepSeek configuration
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct DeepSeekConfig {
|
||||
/// API key
|
||||
pub api_key: Option<String>,
|
||||
|
||||
/// Model to use
|
||||
#[serde(default = "default_deepseek_model")]
|
||||
pub model: String,
|
||||
|
||||
/// API base URL (for custom endpoints)
|
||||
#[serde(default = "default_deepseek_base_url")]
|
||||
pub base_url: String,
|
||||
}
|
||||
|
||||
impl Default for DeepSeekConfig {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
api_key: None,
|
||||
model: default_deepseek_model(),
|
||||
base_url: default_deepseek_base_url(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// OpenRouter configuration
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct OpenRouterConfig {
|
||||
/// API key
|
||||
pub api_key: Option<String>,
|
||||
|
||||
/// Model to use
|
||||
#[serde(default = "default_openrouter_model")]
|
||||
pub model: String,
|
||||
|
||||
/// API base URL (for custom endpoints)
|
||||
#[serde(default = "default_openrouter_base_url")]
|
||||
pub base_url: String,
|
||||
}
|
||||
|
||||
impl Default for OpenRouterConfig {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
api_key: None,
|
||||
model: default_openrouter_model(),
|
||||
base_url: default_openrouter_base_url(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Custom LLM API configuration
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct CustomLlmConfig {
|
||||
/// API endpoint URL
|
||||
pub url: String,
|
||||
|
||||
/// API key (optional)
|
||||
pub api_key: Option<String>,
|
||||
|
||||
/// Model name
|
||||
pub model: String,
|
||||
|
||||
/// Request format template (JSON)
|
||||
pub request_template: String,
|
||||
|
||||
/// Response path to extract content (e.g., "choices.0.message.content")
|
||||
pub response_path: String,
|
||||
}
|
||||
|
||||
/// Commit configuration
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct CommitConfig {
|
||||
@@ -592,6 +420,10 @@ fn default_llm_provider() -> String {
|
||||
"ollama".to_string()
|
||||
}
|
||||
|
||||
fn default_model() -> String {
|
||||
"llama2".to_string()
|
||||
}
|
||||
|
||||
fn default_max_tokens() -> u32 {
|
||||
500
|
||||
}
|
||||
@@ -604,50 +436,6 @@ fn default_timeout() -> u64 {
|
||||
30
|
||||
}
|
||||
|
||||
fn default_openai_model() -> String {
|
||||
"gpt-4".to_string()
|
||||
}
|
||||
|
||||
fn default_openai_base_url() -> String {
|
||||
"https://api.openai.com/v1".to_string()
|
||||
}
|
||||
|
||||
fn default_ollama_url() -> String {
|
||||
"http://localhost:11434".to_string()
|
||||
}
|
||||
|
||||
fn default_ollama_model() -> String {
|
||||
"llama2".to_string()
|
||||
}
|
||||
|
||||
fn default_anthropic_model() -> String {
|
||||
"claude-3-sonnet-20240229".to_string()
|
||||
}
|
||||
|
||||
fn default_kimi_model() -> String {
|
||||
"moonshot-v1-8k".to_string()
|
||||
}
|
||||
|
||||
fn default_kimi_base_url() -> String {
|
||||
"https://api.moonshot.cn/v1".to_string()
|
||||
}
|
||||
|
||||
fn default_deepseek_model() -> String {
|
||||
"deepseek-chat".to_string()
|
||||
}
|
||||
|
||||
fn default_deepseek_base_url() -> String {
|
||||
"https://api.deepseek.com/v1".to_string()
|
||||
}
|
||||
|
||||
fn default_openrouter_model() -> String {
|
||||
"openai/gpt-3.5-turbo".to_string()
|
||||
}
|
||||
|
||||
fn default_openrouter_base_url() -> String {
|
||||
"https://openrouter.ai/api/v1".to_string()
|
||||
}
|
||||
|
||||
fn default_commit_format() -> CommitFormat {
|
||||
CommitFormat::Conventional
|
||||
}
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
use crate::config::{CommitFormat, LlmConfig, Language};
|
||||
use crate::config::{CommitFormat, Language};
|
||||
use crate::config::manager::ConfigManager;
|
||||
use crate::git::{CommitInfo, GitRepo};
|
||||
use crate::llm::{GeneratedCommit, LlmClient};
|
||||
use anyhow::{Context, Result};
|
||||
@@ -10,12 +11,11 @@ pub struct ContentGenerator {
|
||||
|
||||
impl ContentGenerator {
|
||||
/// Create new content generator
|
||||
pub async fn new(config: &LlmConfig) -> Result<Self> {
|
||||
let llm_client = LlmClient::from_config(config).await?;
|
||||
pub async fn new(manager: &ConfigManager) -> Result<Self> {
|
||||
let llm_client = LlmClient::from_config(manager).await?;
|
||||
|
||||
// Check if provider is available
|
||||
if !llm_client.is_available().await {
|
||||
anyhow::bail!("LLM provider '{}' is not available", config.provider);
|
||||
anyhow::bail!("LLM provider '{}' is not available", manager.llm_provider());
|
||||
}
|
||||
|
||||
Ok(Self { llm_client })
|
||||
|
||||
@@ -57,48 +57,50 @@ impl Default for LlmClientConfig {
|
||||
}
|
||||
|
||||
impl LlmClient {
|
||||
/// Create LLM client from configuration
|
||||
pub async fn from_config(config: &crate::config::LlmConfig) -> Result<Self> {
|
||||
/// Create LLM client from configuration manager
|
||||
pub async fn from_config(manager: &crate::config::manager::ConfigManager) -> Result<Self> {
|
||||
let config = manager.config();
|
||||
let client_config = LlmClientConfig {
|
||||
max_tokens: config.max_tokens,
|
||||
temperature: config.temperature,
|
||||
timeout: Duration::from_secs(config.timeout),
|
||||
max_tokens: config.llm.max_tokens,
|
||||
temperature: config.llm.temperature,
|
||||
timeout: Duration::from_secs(config.llm.timeout),
|
||||
};
|
||||
|
||||
let provider: Box<dyn LlmProvider> = match config.provider.as_str() {
|
||||
let provider = config.llm.provider.as_str();
|
||||
let model = config.llm.model.as_str();
|
||||
let base_url = manager.llm_base_url();
|
||||
let api_key = manager.get_api_key();
|
||||
|
||||
let provider: Box<dyn LlmProvider> = match provider {
|
||||
"ollama" => {
|
||||
Box::new(OllamaClient::new(&config.ollama.url, &config.ollama.model))
|
||||
Box::new(OllamaClient::new(&base_url, model))
|
||||
}
|
||||
"openai" => {
|
||||
let api_key = config.openai.api_key.as_ref()
|
||||
let key = api_key.as_ref()
|
||||
.ok_or_else(|| anyhow::anyhow!("OpenAI API key not configured"))?;
|
||||
Box::new(OpenAiClient::new(
|
||||
&config.openai.base_url,
|
||||
api_key,
|
||||
&config.openai.model,
|
||||
)?)
|
||||
Box::new(OpenAiClient::new(&base_url, key, model)?)
|
||||
}
|
||||
"anthropic" => {
|
||||
let api_key = config.anthropic.api_key.as_ref()
|
||||
let key = api_key.as_ref()
|
||||
.ok_or_else(|| anyhow::anyhow!("Anthropic API key not configured"))?;
|
||||
Box::new(AnthropicClient::new(api_key, &config.anthropic.model)?)
|
||||
Box::new(AnthropicClient::new(key, model)?)
|
||||
}
|
||||
"kimi" => {
|
||||
let api_key = config.kimi.api_key.as_ref()
|
||||
let key = api_key.as_ref()
|
||||
.ok_or_else(|| anyhow::anyhow!("Kimi API key not configured"))?;
|
||||
Box::new(KimiClient::with_base_url(api_key, &config.kimi.model, &config.kimi.base_url)?)
|
||||
Box::new(KimiClient::with_base_url(key, model, &base_url)?)
|
||||
}
|
||||
"deepseek" => {
|
||||
let api_key = config.deepseek.api_key.as_ref()
|
||||
let key = api_key.as_ref()
|
||||
.ok_or_else(|| anyhow::anyhow!("DeepSeek API key not configured"))?;
|
||||
Box::new(DeepSeekClient::with_base_url(api_key, &config.deepseek.model, &config.deepseek.base_url)?)
|
||||
Box::new(DeepSeekClient::with_base_url(key, model, &base_url)?)
|
||||
}
|
||||
"openrouter" => {
|
||||
let api_key = config.openrouter.api_key.as_ref()
|
||||
let key = api_key.as_ref()
|
||||
.ok_or_else(|| anyhow::anyhow!("OpenRouter API key not configured"))?;
|
||||
Box::new(OpenRouterClient::with_base_url(api_key, &config.openrouter.model, &config.openrouter.base_url)?)
|
||||
Box::new(OpenRouterClient::with_base_url(key, model, &base_url)?)
|
||||
}
|
||||
_ => bail!("Unknown LLM provider: {}", config.provider),
|
||||
_ => bail!("Unknown LLM provider: {}", provider),
|
||||
};
|
||||
|
||||
Ok(Self {
|
||||
@@ -1012,3 +1014,10 @@ Gruppieren Sie Commits nach:
|
||||
|
||||
Formatieren Sie in Markdown mit geeigneten Überschriften und Aufzählungspunkten.
|
||||
"#;
|
||||
|
||||
/// Test LLM connection
|
||||
pub async fn test_connection(manager: &crate::config::manager::ConfigManager) -> Result<String> {
|
||||
let client = LlmClient::from_config(manager).await?;
|
||||
let response = client.provider.generate("Say 'Hello, World!'").await?;
|
||||
Ok(response)
|
||||
}
|
||||
|
||||
219
src/utils/keyring.rs
Normal file
219
src/utils/keyring.rs
Normal file
@@ -0,0 +1,219 @@
|
||||
use anyhow::{bail, Context, Result};
|
||||
use std::env;
|
||||
|
||||
const SERVICE_NAME: &str = "quicommit";
|
||||
const ENV_API_KEY: &str = "QUICOMMIT_API_KEY";
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
|
||||
pub enum KeyringStatus {
|
||||
Available,
|
||||
Unavailable,
|
||||
}
|
||||
|
||||
pub struct KeyringManager {
|
||||
status: KeyringStatus,
|
||||
}
|
||||
|
||||
impl KeyringManager {
|
||||
pub fn new() -> Self {
|
||||
let status = Self::check_keyring_availability();
|
||||
Self { status }
|
||||
}
|
||||
|
||||
pub fn check_keyring_availability() -> KeyringStatus {
|
||||
#[cfg(target_os = "windows")]
|
||||
{
|
||||
KeyringStatus::Available
|
||||
}
|
||||
#[cfg(target_os = "macos")]
|
||||
{
|
||||
KeyringStatus::Available
|
||||
}
|
||||
#[cfg(target_os = "linux")]
|
||||
{
|
||||
Self::check_linux_keyring()
|
||||
}
|
||||
#[cfg(not(any(target_os = "windows", target_os = "macos", target_os = "linux")))]
|
||||
{
|
||||
KeyringStatus::Unavailable
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(target_os = "linux")]
|
||||
fn check_linux_keyring() -> KeyringStatus {
|
||||
use std::path::Path;
|
||||
|
||||
let has_dbus = Path::new("/usr/bin/dbus-daemon").exists()
|
||||
|| Path::new("/bin/dbus-daemon").exists()
|
||||
|| env::var("DBUS_SESSION_BUS_ADDRESS").is_ok();
|
||||
|
||||
let has_keyring = Path::new("/usr/bin/gnome-keyring-daemon").exists()
|
||||
|| Path::new("/usr/bin/gnome-keyring").exists()
|
||||
|| Path::new("/usr/bin/kwalletd5").exists()
|
||||
|| Path::new("/usr/bin/kwalletd6").exists()
|
||||
|| env::var("SECRET_SERVICE").is_ok();
|
||||
|
||||
if has_dbus && has_keyring {
|
||||
KeyringStatus::Available
|
||||
} else {
|
||||
KeyringStatus::Unavailable
|
||||
}
|
||||
}
|
||||
|
||||
pub fn status(&self) -> KeyringStatus {
|
||||
self.status
|
||||
}
|
||||
|
||||
pub fn is_available(&self) -> bool {
|
||||
self.status == KeyringStatus::Available
|
||||
}
|
||||
|
||||
pub fn store_api_key(&self, provider: &str, api_key: &str) -> Result<()> {
|
||||
if !self.is_available() {
|
||||
bail!("Keyring is not available on this system");
|
||||
}
|
||||
|
||||
let entry = keyring::Entry::new(SERVICE_NAME, provider)
|
||||
.context("Failed to create keyring entry")?;
|
||||
|
||||
entry.set_password(api_key)
|
||||
.context("Failed to store API key")?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn get_api_key(&self, provider: &str) -> Result<Option<String>> {
|
||||
// 优先从环境变量获取
|
||||
if let Ok(key) = env::var(ENV_API_KEY) {
|
||||
if !key.is_empty() {
|
||||
return Ok(Some(key));
|
||||
}
|
||||
}
|
||||
|
||||
// keyring 不可用时直接返回
|
||||
if !self.is_available() {
|
||||
return Ok(None);
|
||||
}
|
||||
|
||||
// 从 keyring 获取
|
||||
let entry = keyring::Entry::new(SERVICE_NAME, provider)
|
||||
.context("Failed to create keyring entry")?;
|
||||
|
||||
match entry.get_password() {
|
||||
Ok(key) => Ok(Some(key)),
|
||||
Err(keyring::Error::NoEntry) => Ok(None),
|
||||
Err(e) => Err(e.into()),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn delete_api_key(&self, provider: &str) -> Result<()> {
|
||||
if !self.is_available() {
|
||||
bail!("Keyring is not available on this system");
|
||||
}
|
||||
|
||||
let entry = keyring::Entry::new(SERVICE_NAME, provider)
|
||||
.context("Failed to create keyring entry")?;
|
||||
|
||||
entry.delete_credential()
|
||||
.context("Failed to delete API key")?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn has_api_key(&self, provider: &str) -> bool {
|
||||
self.get_api_key(provider).unwrap_or(None).is_some()
|
||||
}
|
||||
|
||||
pub fn get_status_message(&self) -> String {
|
||||
match self.status {
|
||||
KeyringStatus::Available => {
|
||||
#[cfg(target_os = "windows")]
|
||||
{
|
||||
"Windows Credential Manager is available".to_string()
|
||||
}
|
||||
#[cfg(target_os = "macos")]
|
||||
{
|
||||
"macOS Keychain is available".to_string()
|
||||
}
|
||||
#[cfg(target_os = "linux")]
|
||||
{
|
||||
"Linux secret service is available".to_string()
|
||||
}
|
||||
#[cfg(not(any(target_os = "windows", target_os = "macos", target_os = "linux")))]
|
||||
{
|
||||
"Keyring is available".to_string()
|
||||
}
|
||||
}
|
||||
KeyringStatus::Unavailable => {
|
||||
"Keyring is not available. Set QUICOMMIT_API_KEY environment variable.".to_string()
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Default for KeyringManager {
|
||||
fn default() -> Self {
|
||||
Self::new()
|
||||
}
|
||||
}
|
||||
|
||||
pub fn get_default_base_url(provider: &str) -> &'static str {
|
||||
match provider {
|
||||
"openai" => "https://api.openai.com/v1",
|
||||
"anthropic" => "https://api.anthropic.com/v1",
|
||||
"kimi" => "https://api.moonshot.cn/v1",
|
||||
"deepseek" => "https://api.deepseek.com/v1",
|
||||
"openrouter" => "https://openrouter.ai/api/v1",
|
||||
"ollama" => "http://localhost:11434",
|
||||
_ => "",
|
||||
}
|
||||
}
|
||||
|
||||
pub fn get_default_model(provider: &str) -> &'static str {
|
||||
match provider {
|
||||
"openai" => "gpt-4",
|
||||
"anthropic" => "claude-3-sonnet-20240229",
|
||||
"kimi" => "moonshot-v1-8k",
|
||||
"deepseek" => "deepseek-chat",
|
||||
"openrouter" => "openai/gpt-3.5-turbo",
|
||||
"ollama" => "llama2",
|
||||
_ => "",
|
||||
}
|
||||
}
|
||||
|
||||
pub fn get_supported_providers() -> &'static [&'static str] {
|
||||
&["ollama", "openai", "anthropic", "kimi", "deepseek", "openrouter"]
|
||||
}
|
||||
|
||||
pub fn provider_needs_api_key(provider: &str) -> bool {
|
||||
provider != "ollama"
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn test_get_default_base_url() {
|
||||
assert_eq!(get_default_base_url("openai"), "https://api.openai.com/v1");
|
||||
assert_eq!(get_default_base_url("anthropic"), "https://api.anthropic.com/v1");
|
||||
assert_eq!(get_default_base_url("kimi"), "https://api.moonshot.cn/v1");
|
||||
assert_eq!(get_default_base_url("deepseek"), "https://api.deepseek.com/v1");
|
||||
assert_eq!(get_default_base_url("openrouter"), "https://openrouter.ai/api/v1");
|
||||
assert_eq!(get_default_base_url("ollama"), "http://localhost:11434");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_get_default_model() {
|
||||
assert_eq!(get_default_model("openai"), "gpt-4");
|
||||
assert_eq!(get_default_model("anthropic"), "claude-3-sonnet-20240229");
|
||||
assert_eq!(get_default_model("ollama"), "llama2");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_provider_needs_api_key() {
|
||||
assert!(provider_needs_api_key("openai"));
|
||||
assert!(provider_needs_api_key("anthropic"));
|
||||
assert!(!provider_needs_api_key("ollama"));
|
||||
}
|
||||
}
|
||||
@@ -1,6 +1,7 @@
|
||||
pub mod crypto;
|
||||
pub mod editor;
|
||||
pub mod formatter;
|
||||
pub mod keyring;
|
||||
pub mod validators;
|
||||
|
||||
use anyhow::{Context, Result};
|
||||
|
||||
Reference in New Issue
Block a user