feat(keyring): 集成系统密钥环安全存储 API key

This commit is contained in:
2026-03-12 17:42:41 +08:00
parent c66d782eab
commit da85fc94b1
17 changed files with 990 additions and 1024 deletions

View File

@@ -80,37 +80,16 @@ impl Default for AppConfig {
/// LLM configuration
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct LlmConfig {
/// Default LLM provider
/// Current LLM provider (ollama, openai, anthropic, kimi, deepseek, openrouter)
#[serde(default = "default_llm_provider")]
pub provider: String,
/// OpenAI configuration
#[serde(default)]
pub openai: OpenAiConfig,
/// Model to use (stored in config, not in keyring)
#[serde(default = "default_model")]
pub model: String,
/// Ollama configuration
#[serde(default)]
pub ollama: OllamaConfig,
/// Anthropic Claude configuration
#[serde(default)]
pub anthropic: AnthropicConfig,
/// Kimi (Moonshot AI) configuration
#[serde(default)]
pub kimi: KimiConfig,
/// DeepSeek configuration
#[serde(default)]
pub deepseek: DeepSeekConfig,
/// OpenRouter configuration
#[serde(default)]
pub openrouter: OpenRouterConfig,
/// Custom API configuration
#[serde(default)]
pub custom: Option<CustomLlmConfig>,
/// API base URL (optional, will use provider default if not set)
pub base_url: Option<String>,
/// Maximum tokens for generation
#[serde(default = "default_max_tokens")]
@@ -123,186 +102,35 @@ pub struct LlmConfig {
/// Timeout in seconds
#[serde(default = "default_timeout")]
pub timeout: u64,
/// API key storage method (keyring, config, environment)
#[serde(default = "default_api_key_storage")]
pub api_key_storage: String,
/// API key (stored in config for fallback, encrypted if encrypt_sensitive is true)
#[serde(default)]
pub api_key: Option<String>,
}
fn default_api_key_storage() -> String {
"keyring".to_string()
}
impl Default for LlmConfig {
fn default() -> Self {
Self {
provider: default_llm_provider(),
openai: OpenAiConfig::default(),
ollama: OllamaConfig::default(),
anthropic: AnthropicConfig::default(),
kimi: KimiConfig::default(),
deepseek: DeepSeekConfig::default(),
openrouter: OpenRouterConfig::default(),
custom: None,
model: default_model(),
base_url: None,
max_tokens: default_max_tokens(),
temperature: default_temperature(),
timeout: default_timeout(),
}
}
}
/// OpenAI API configuration
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct OpenAiConfig {
/// API key
pub api_key: Option<String>,
/// Model to use
#[serde(default = "default_openai_model")]
pub model: String,
/// API base URL (for custom endpoints)
#[serde(default = "default_openai_base_url")]
pub base_url: String,
}
impl Default for OpenAiConfig {
fn default() -> Self {
Self {
api_key_storage: default_api_key_storage(),
api_key: None,
model: default_openai_model(),
base_url: default_openai_base_url(),
}
}
}
/// Ollama configuration
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct OllamaConfig {
/// Ollama server URL
#[serde(default = "default_ollama_url")]
pub url: String,
/// Model to use
#[serde(default = "default_ollama_model")]
pub model: String,
}
impl Default for OllamaConfig {
fn default() -> Self {
Self {
url: default_ollama_url(),
model: default_ollama_model(),
}
}
}
/// Anthropic Claude configuration
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct AnthropicConfig {
/// API key
pub api_key: Option<String>,
/// Model to use
#[serde(default = "default_anthropic_model")]
pub model: String,
}
impl Default for AnthropicConfig {
fn default() -> Self {
Self {
api_key: None,
model: default_anthropic_model(),
}
}
}
/// Kimi (Moonshot AI) configuration
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct KimiConfig {
/// API key
pub api_key: Option<String>,
/// Model to use
#[serde(default = "default_kimi_model")]
pub model: String,
/// API base URL (for custom endpoints)
#[serde(default = "default_kimi_base_url")]
pub base_url: String,
}
impl Default for KimiConfig {
fn default() -> Self {
Self {
api_key: None,
model: default_kimi_model(),
base_url: default_kimi_base_url(),
}
}
}
/// DeepSeek configuration
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct DeepSeekConfig {
/// API key
pub api_key: Option<String>,
/// Model to use
#[serde(default = "default_deepseek_model")]
pub model: String,
/// API base URL (for custom endpoints)
#[serde(default = "default_deepseek_base_url")]
pub base_url: String,
}
impl Default for DeepSeekConfig {
fn default() -> Self {
Self {
api_key: None,
model: default_deepseek_model(),
base_url: default_deepseek_base_url(),
}
}
}
/// OpenRouter configuration
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct OpenRouterConfig {
/// API key
pub api_key: Option<String>,
/// Model to use
#[serde(default = "default_openrouter_model")]
pub model: String,
/// API base URL (for custom endpoints)
#[serde(default = "default_openrouter_base_url")]
pub base_url: String,
}
impl Default for OpenRouterConfig {
fn default() -> Self {
Self {
api_key: None,
model: default_openrouter_model(),
base_url: default_openrouter_base_url(),
}
}
}
/// Custom LLM API configuration
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct CustomLlmConfig {
/// API endpoint URL
pub url: String,
/// API key (optional)
pub api_key: Option<String>,
/// Model name
pub model: String,
/// Request format template (JSON)
pub request_template: String,
/// Response path to extract content (e.g., "choices.0.message.content")
pub response_path: String,
}
/// Commit configuration
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct CommitConfig {
@@ -592,6 +420,10 @@ fn default_llm_provider() -> String {
"ollama".to_string()
}
fn default_model() -> String {
"llama2".to_string()
}
fn default_max_tokens() -> u32 {
500
}
@@ -604,50 +436,6 @@ fn default_timeout() -> u64 {
30
}
fn default_openai_model() -> String {
"gpt-4".to_string()
}
fn default_openai_base_url() -> String {
"https://api.openai.com/v1".to_string()
}
fn default_ollama_url() -> String {
"http://localhost:11434".to_string()
}
fn default_ollama_model() -> String {
"llama2".to_string()
}
fn default_anthropic_model() -> String {
"claude-3-sonnet-20240229".to_string()
}
fn default_kimi_model() -> String {
"moonshot-v1-8k".to_string()
}
fn default_kimi_base_url() -> String {
"https://api.moonshot.cn/v1".to_string()
}
fn default_deepseek_model() -> String {
"deepseek-chat".to_string()
}
fn default_deepseek_base_url() -> String {
"https://api.deepseek.com/v1".to_string()
}
fn default_openrouter_model() -> String {
"openai/gpt-3.5-turbo".to_string()
}
fn default_openrouter_base_url() -> String {
"https://openrouter.ai/api/v1".to_string()
}
fn default_commit_format() -> CommitFormat {
CommitFormat::Conventional
}