1028 lines
41 KiB
Rust
1028 lines
41 KiB
Rust
use anyhow::{bail, Result};
|
|
use clap::{Parser, Subcommand};
|
|
use colored::Colorize;
|
|
use dialoguer::{Confirm, Input, Select};
|
|
|
|
use crate::config::{Language, manager::ConfigManager};
|
|
use crate::config::CommitFormat;
|
|
|
|
/// Mask API key with asterisks for security
|
|
fn mask_api_key(key: Option<&str>) -> String {
|
|
match key {
|
|
Some(k) => {
|
|
if k.len() <= 8 {
|
|
"*".repeat(k.len())
|
|
} else {
|
|
format!("{}***{}", &k[..4], &k[k.len()-4..])
|
|
}
|
|
}
|
|
None => "✗ not set".red().to_string(),
|
|
}
|
|
}
|
|
|
|
/// Manage configuration settings
|
|
#[derive(Parser)]
|
|
pub struct ConfigCommand {
|
|
#[command(subcommand)]
|
|
command: Option<ConfigSubcommand>,
|
|
}
|
|
|
|
#[derive(Subcommand)]
|
|
enum ConfigSubcommand {
|
|
/// Show current configuration
|
|
Show,
|
|
|
|
/// List all configuration information (with masked API keys)
|
|
List,
|
|
|
|
/// Edit configuration file
|
|
Edit,
|
|
|
|
/// Set configuration value
|
|
Set {
|
|
/// Key (e.g., llm.provider, commit.format)
|
|
key: String,
|
|
/// Value
|
|
value: String,
|
|
},
|
|
|
|
/// Get configuration value
|
|
Get {
|
|
/// Key
|
|
key: String,
|
|
},
|
|
|
|
/// Set LLM provider
|
|
SetLlm {
|
|
/// Provider (ollama, openai, anthropic)
|
|
#[arg(value_name = "PROVIDER")]
|
|
provider: Option<String>,
|
|
},
|
|
|
|
/// Set OpenAI API key
|
|
SetOpenAiKey {
|
|
/// API key
|
|
key: String,
|
|
},
|
|
|
|
/// Set Anthropic API key
|
|
SetAnthropicKey {
|
|
/// API key
|
|
key: String,
|
|
},
|
|
|
|
/// Set Kimi API key
|
|
SetKimiKey {
|
|
/// API key
|
|
key: String,
|
|
},
|
|
|
|
/// Set DeepSeek API key
|
|
SetDeepSeekKey {
|
|
/// API key
|
|
key: String,
|
|
},
|
|
|
|
/// Set OpenRouter API key
|
|
SetOpenRouterKey {
|
|
/// API key
|
|
key: String,
|
|
},
|
|
|
|
/// Configure Ollama settings
|
|
SetOllama {
|
|
/// Ollama server URL
|
|
#[arg(short, long)]
|
|
url: Option<String>,
|
|
/// Model name
|
|
#[arg(short, long)]
|
|
model: Option<String>,
|
|
},
|
|
|
|
/// Configure Kimi settings
|
|
SetKimi {
|
|
/// API base URL (for custom endpoints)
|
|
#[arg(short, long)]
|
|
base_url: Option<String>,
|
|
/// Model name
|
|
#[arg(short, long)]
|
|
model: Option<String>,
|
|
},
|
|
|
|
/// Configure DeepSeek settings
|
|
SetDeepSeek {
|
|
/// API base URL (for custom endpoints)
|
|
#[arg(short, long)]
|
|
base_url: Option<String>,
|
|
/// Model name
|
|
#[arg(short, long)]
|
|
model: Option<String>,
|
|
},
|
|
|
|
/// Configure OpenRouter settings
|
|
SetOpenRouter {
|
|
/// API base URL (for custom endpoints)
|
|
#[arg(short, long)]
|
|
base_url: Option<String>,
|
|
/// Model name
|
|
#[arg(short, long)]
|
|
model: Option<String>,
|
|
},
|
|
|
|
/// Set commit format
|
|
SetCommitFormat {
|
|
/// Format (conventional, commitlint)
|
|
format: String,
|
|
},
|
|
|
|
/// Set version prefix for tags
|
|
SetVersionPrefix {
|
|
/// Prefix (e.g., 'v')
|
|
prefix: String,
|
|
},
|
|
|
|
/// Set changelog path
|
|
SetChangelogPath {
|
|
/// Path
|
|
path: String,
|
|
},
|
|
|
|
/// Set output language
|
|
SetLanguage {
|
|
/// Language code (en, zh, ja, ko, es, fr, de)
|
|
language: Option<String>,
|
|
},
|
|
|
|
/// Set whether to keep commit types in English
|
|
SetKeepTypesEnglish {
|
|
/// Keep types in English (true/false)
|
|
keep: bool,
|
|
},
|
|
|
|
/// Set whether to keep changelog types in English
|
|
SetKeepChangelogTypesEnglish {
|
|
/// Keep types in English (true/false)
|
|
keep: bool,
|
|
},
|
|
|
|
/// Reset configuration to defaults
|
|
Reset {
|
|
/// Skip confirmation
|
|
#[arg(short, long)]
|
|
force: bool,
|
|
},
|
|
|
|
/// Export configuration
|
|
Export {
|
|
/// Output file (defaults to stdout)
|
|
#[arg(short, long)]
|
|
output: Option<String>,
|
|
},
|
|
|
|
/// Import configuration
|
|
Import {
|
|
/// Input file
|
|
#[arg(short, long)]
|
|
file: String,
|
|
},
|
|
|
|
/// List available LLM models
|
|
ListModels,
|
|
|
|
/// Test LLM connection
|
|
TestLlm,
|
|
}
|
|
|
|
impl ConfigCommand {
|
|
pub async fn execute(&self) -> Result<()> {
|
|
match &self.command {
|
|
Some(ConfigSubcommand::Show) => self.show_config().await,
|
|
Some(ConfigSubcommand::List) => self.list_config().await,
|
|
Some(ConfigSubcommand::Edit) => self.edit_config().await,
|
|
Some(ConfigSubcommand::Set { key, value }) => self.set_value(key, value).await,
|
|
Some(ConfigSubcommand::Get { key }) => self.get_value(key).await,
|
|
Some(ConfigSubcommand::SetLlm { provider }) => self.set_llm(provider.as_deref()).await,
|
|
Some(ConfigSubcommand::SetOpenAiKey { key }) => self.set_openai_key(key).await,
|
|
Some(ConfigSubcommand::SetAnthropicKey { key }) => self.set_anthropic_key(key).await,
|
|
Some(ConfigSubcommand::SetKimiKey { key }) => self.set_kimi_key(key).await,
|
|
Some(ConfigSubcommand::SetDeepSeekKey { key }) => self.set_deepseek_key(key).await,
|
|
Some(ConfigSubcommand::SetOpenRouterKey { key }) => self.set_openrouter_key(key).await,
|
|
Some(ConfigSubcommand::SetOllama { url, model }) => self.set_ollama(url.as_deref(), model.as_deref()).await,
|
|
Some(ConfigSubcommand::SetKimi { base_url, model }) => self.set_kimi(base_url.as_deref(), model.as_deref()).await,
|
|
Some(ConfigSubcommand::SetDeepSeek { base_url, model }) => self.set_deepseek(base_url.as_deref(), model.as_deref()).await,
|
|
Some(ConfigSubcommand::SetOpenRouter { base_url, model }) => self.set_openrouter(base_url.as_deref(), model.as_deref()).await,
|
|
Some(ConfigSubcommand::SetCommitFormat { format }) => self.set_commit_format(format).await,
|
|
Some(ConfigSubcommand::SetVersionPrefix { prefix }) => self.set_version_prefix(prefix).await,
|
|
Some(ConfigSubcommand::SetChangelogPath { path }) => self.set_changelog_path(path).await,
|
|
Some(ConfigSubcommand::SetLanguage { language }) => self.set_language(language.as_deref()).await,
|
|
Some(ConfigSubcommand::SetKeepTypesEnglish { keep }) => self.set_keep_types_english(*keep).await,
|
|
Some(ConfigSubcommand::SetKeepChangelogTypesEnglish { keep }) => self.set_keep_changelog_types_english(*keep).await,
|
|
Some(ConfigSubcommand::Reset { force }) => self.reset(*force).await,
|
|
Some(ConfigSubcommand::Export { output }) => self.export_config(output.as_deref()).await,
|
|
Some(ConfigSubcommand::Import { file }) => self.import_config(file).await,
|
|
Some(ConfigSubcommand::ListModels) => self.list_models().await,
|
|
Some(ConfigSubcommand::TestLlm) => self.test_llm().await,
|
|
None => self.show_config().await,
|
|
}
|
|
}
|
|
|
|
async fn show_config(&self) -> Result<()> {
|
|
let manager = ConfigManager::new()?;
|
|
let config = manager.config();
|
|
|
|
println!("{}", "\nQuiCommit Configuration".bold());
|
|
println!("{}", "─".repeat(60));
|
|
|
|
println!("\n{}", "General:".bold());
|
|
println!(" Config file: {}", manager.path().display());
|
|
println!(" Default profile: {}",
|
|
config.default_profile.as_deref().unwrap_or("(none)").cyan());
|
|
println!(" Profiles: {}", config.profiles.len());
|
|
|
|
println!("\n{}", "LLM Configuration:".bold());
|
|
println!(" Provider: {}", config.llm.provider.cyan());
|
|
println!(" Max tokens: {}", config.llm.max_tokens);
|
|
println!(" Temperature: {}", config.llm.temperature);
|
|
println!(" Timeout: {}s", config.llm.timeout);
|
|
|
|
match config.llm.provider.as_str() {
|
|
"ollama" => {
|
|
println!(" URL: {}", config.llm.ollama.url);
|
|
println!(" Model: {}", config.llm.ollama.model.cyan());
|
|
}
|
|
"openai" => {
|
|
println!(" Model: {}", config.llm.openai.model.cyan());
|
|
println!(" Base URL: {}", config.llm.openai.base_url);
|
|
println!(" API key: {}", mask_api_key(config.llm.openai.api_key.as_deref()));
|
|
}
|
|
"anthropic" => {
|
|
println!(" Model: {}", config.llm.anthropic.model.cyan());
|
|
println!(" API key: {}", mask_api_key(config.llm.anthropic.api_key.as_deref()));
|
|
}
|
|
"kimi" => {
|
|
println!(" Model: {}", config.llm.kimi.model.cyan());
|
|
println!(" Base URL: {}", config.llm.kimi.base_url);
|
|
println!(" API key: {}", mask_api_key(config.llm.kimi.api_key.as_deref()));
|
|
}
|
|
"deepseek" => {
|
|
println!(" Model: {}", config.llm.deepseek.model.cyan());
|
|
println!(" Base URL: {}", config.llm.deepseek.base_url);
|
|
println!(" API key: {}", mask_api_key(config.llm.deepseek.api_key.as_deref()));
|
|
}
|
|
"openrouter" => {
|
|
println!(" Model: {}", config.llm.openrouter.model.cyan());
|
|
println!(" Base URL: {}", config.llm.openrouter.base_url);
|
|
println!(" API key: {}", mask_api_key(config.llm.openrouter.api_key.as_deref()));
|
|
}
|
|
_ => {}
|
|
}
|
|
|
|
println!("\n{}", "Commit Configuration:".bold());
|
|
println!(" Format: {}", config.commit.format.to_string().cyan());
|
|
println!(" Auto-generate: {}", if config.commit.auto_generate { "yes".green() } else { "no".red() });
|
|
println!(" GPG sign: {}", if config.commit.gpg_sign { "yes".green() } else { "no".red() });
|
|
println!(" Max subject length: {}", config.commit.max_subject_length);
|
|
|
|
println!("\n{}", "Tag Configuration:".bold());
|
|
println!(" Version prefix: '{}'", config.tag.version_prefix);
|
|
println!(" Auto-generate: {}", if config.tag.auto_generate { "yes".green() } else { "no".red() });
|
|
println!(" GPG sign: {}", if config.tag.gpg_sign { "yes".green() } else { "no".red() });
|
|
println!(" Include changelog: {}", if config.tag.include_changelog { "yes".green() } else { "no".red() });
|
|
|
|
println!("\n{}", "Language Configuration:".bold());
|
|
let language = manager.get_language().unwrap_or(Language::English);
|
|
println!(" Output language: {}", language.display_name().cyan());
|
|
println!(" Keep commit types in English: {}", if manager.keep_types_english() { "yes".green() } else { "no".red() });
|
|
println!(" Keep changelog types in English: {}", if manager.keep_changelog_types_english() { "yes".green() } else { "no".red() });
|
|
|
|
println!("\n{}", "Changelog Configuration:".bold());
|
|
println!(" Path: {}", config.changelog.path);
|
|
println!(" Auto-generate: {}", if config.changelog.auto_generate { "yes".green() } else { "no".red() });
|
|
println!(" Include hashes: {}", if config.changelog.include_hashes { "yes".green() } else { "no".red() });
|
|
println!(" Include authors: {}", if config.changelog.include_authors { "yes".green() } else { "no".red() });
|
|
println!(" Group by type: {}", if config.changelog.group_by_type { "yes".green() } else { "no".red() });
|
|
|
|
Ok(())
|
|
}
|
|
|
|
/// List all configuration information with masked API keys
|
|
async fn list_config(&self) -> Result<()> {
|
|
let manager = ConfigManager::new()?;
|
|
let config = manager.config();
|
|
|
|
println!("{}", "\nQuiCommit Configuration".bold());
|
|
println!("{}", "═".repeat(80));
|
|
|
|
println!("\n{}", "📁 General Configuration:".bold().blue());
|
|
println!(" Config file: {}", manager.path().display());
|
|
println!(" Default profile: {}",
|
|
config.default_profile.as_deref().unwrap_or("(none)").cyan());
|
|
println!(" Profiles: {} profile(s)", config.profiles.len());
|
|
println!(" Repository mappings: {} mapping(s)", config.repo_profiles.len());
|
|
|
|
println!("\n{}", "🤖 LLM Configuration:".bold().blue());
|
|
println!(" Provider: {}", config.llm.provider.cyan());
|
|
println!(" Max tokens: {}", config.llm.max_tokens);
|
|
println!(" Temperature: {}", config.llm.temperature);
|
|
println!(" Timeout: {}s", config.llm.timeout);
|
|
|
|
println!("\n{}", " LLM Provider Details:".dimmed());
|
|
|
|
// OpenAI
|
|
println!(" 🔹 OpenAI:");
|
|
println!(" Model: {}", config.llm.openai.model.cyan());
|
|
println!(" Base URL: {}", config.llm.openai.base_url);
|
|
println!(" API Key: {}", mask_api_key(config.llm.openai.api_key.as_deref()));
|
|
|
|
// Anthropic
|
|
println!(" 🔹 Anthropic:");
|
|
println!(" Model: {}", config.llm.anthropic.model.cyan());
|
|
println!(" API Key: {}", mask_api_key(config.llm.anthropic.api_key.as_deref()));
|
|
|
|
// Kimi
|
|
println!(" 🔹 Kimi (Moonshot AI):");
|
|
println!(" Model: {}", config.llm.kimi.model.cyan());
|
|
println!(" Base URL: {}", config.llm.kimi.base_url);
|
|
println!(" API Key: {}", mask_api_key(config.llm.kimi.api_key.as_deref()));
|
|
|
|
// DeepSeek
|
|
println!(" 🔹 DeepSeek:");
|
|
println!(" Model: {}", config.llm.deepseek.model.cyan());
|
|
println!(" Base URL: {}", config.llm.deepseek.base_url);
|
|
println!(" API Key: {}", mask_api_key(config.llm.deepseek.api_key.as_deref()));
|
|
|
|
// OpenRouter
|
|
println!(" 🔹 OpenRouter:");
|
|
println!(" Model: {}", config.llm.openrouter.model.cyan());
|
|
println!(" Base URL: {}", config.llm.openrouter.base_url);
|
|
println!(" API Key: {}", mask_api_key(config.llm.openrouter.api_key.as_deref()));
|
|
|
|
// Ollama
|
|
println!(" 🔹 Ollama:");
|
|
println!(" URL: {}", config.llm.ollama.url);
|
|
println!(" Model: {}", config.llm.ollama.model.cyan());
|
|
|
|
println!("\n{}", "📝 Commit Configuration:".bold().blue());
|
|
println!(" Format: {}", config.commit.format.to_string().cyan());
|
|
println!(" Auto-generate: {}", if config.commit.auto_generate { "✓ yes".green() } else { "✗ no".red() });
|
|
println!(" Allow empty: {}", if config.commit.allow_empty { "✓ yes".green() } else { "✗ no".red() });
|
|
println!(" GPG sign: {}", if config.commit.gpg_sign { "✓ yes".green() } else { "✗ no".red() });
|
|
println!(" Default scope: {}", config.commit.default_scope.as_deref().unwrap_or("(none)").cyan());
|
|
println!(" Max subject length: {}", config.commit.max_subject_length);
|
|
println!(" Require scope: {}", if config.commit.require_scope { "✓ yes".green() } else { "✗ no".red() });
|
|
println!(" Require body: {}", if config.commit.require_body { "✓ yes".green() } else { "✗ no".red() });
|
|
if !config.commit.body_required_types.is_empty() {
|
|
println!(" Body required types: {}", config.commit.body_required_types.join(", ").cyan());
|
|
}
|
|
|
|
println!("\n{}", "🏷️ Tag Configuration:".bold().blue());
|
|
println!(" Version prefix: '{}'", config.tag.version_prefix.cyan());
|
|
println!(" Auto-generate: {}", if config.tag.auto_generate { "✓ yes".green() } else { "✗ no".red() });
|
|
println!(" GPG sign: {}", if config.tag.gpg_sign { "✓ yes".green() } else { "✗ no".red() });
|
|
println!(" Include changelog: {}", if config.tag.include_changelog { "✓ yes".green() } else { "✗ no".red() });
|
|
println!(" Annotation template: {}", config.tag.annotation_template.as_deref().unwrap_or("(none)").cyan());
|
|
|
|
println!("\n{}", "📋 Changelog Configuration:".bold().blue());
|
|
println!(" Path: {}", config.changelog.path);
|
|
println!(" Auto-generate: {}", if config.changelog.auto_generate { "✓ yes".green() } else { "✗ no".red() });
|
|
println!(" Format: {}", format!("{:?}", config.changelog.format).cyan());
|
|
println!(" Include hashes: {}", if config.changelog.include_hashes { "✓ yes".green() } else { "✗ no".red() });
|
|
println!(" Include authors: {}", if config.changelog.include_authors { "✓ yes".green() } else { "✗ no".red() });
|
|
println!(" Group by type: {}", if config.changelog.group_by_type { "✓ yes".green() } else { "✗ no".red() });
|
|
if !config.changelog.custom_categories.is_empty() {
|
|
println!(" Custom categories: {} category(ies)", config.changelog.custom_categories.len());
|
|
}
|
|
|
|
println!("\n{}", "🎨 Theme Configuration:".bold().blue());
|
|
println!(" Colors: {}", if config.theme.colors { "✓ enabled".green() } else { "✗ disabled".red() });
|
|
println!(" Icons: {}", if config.theme.icons { "✓ enabled".green() } else { "✗ disabled".red() });
|
|
println!(" Date format: {}", config.theme.date_format.cyan());
|
|
|
|
println!("\n{}", "🔒 Security:".bold().blue());
|
|
println!(" Encrypt sensitive: {}", if config.encrypt_sensitive { "✓ yes".green() } else { "✗ no".red() });
|
|
|
|
Ok(())
|
|
}
|
|
|
|
async fn edit_config(&self) -> Result<()> {
|
|
let manager = ConfigManager::new()?;
|
|
crate::utils::editor::edit_file(manager.path())?;
|
|
println!("{} Configuration updated", "✓".green());
|
|
Ok(())
|
|
}
|
|
|
|
async fn set_value(&self, key: &str, value: &str) -> Result<()> {
|
|
let mut manager = ConfigManager::new()?;
|
|
|
|
match key {
|
|
"llm.provider" => manager.set_llm_provider(value.to_string()),
|
|
"llm.max_tokens" => {
|
|
let tokens: u32 = value.parse()?;
|
|
manager.config_mut().llm.max_tokens = tokens;
|
|
}
|
|
"llm.temperature" => {
|
|
let temp: f32 = value.parse()?;
|
|
manager.config_mut().llm.temperature = temp;
|
|
}
|
|
"llm.timeout" => {
|
|
let timeout: u64 = value.parse()?;
|
|
manager.config_mut().llm.timeout = timeout;
|
|
}
|
|
"commit.format" => {
|
|
let format = match value {
|
|
"conventional" => CommitFormat::Conventional,
|
|
"commitlint" => CommitFormat::Commitlint,
|
|
_ => bail!("Invalid format: {}. Use: conventional, commitlint", value),
|
|
};
|
|
manager.set_commit_format(format);
|
|
}
|
|
"commit.auto_generate" => {
|
|
manager.set_auto_generate_commits(value == "true");
|
|
}
|
|
"tag.version_prefix" => manager.set_version_prefix(value.to_string()),
|
|
"changelog.path" => manager.set_changelog_path(value.to_string()),
|
|
_ => bail!("Unknown configuration key: {}", key),
|
|
}
|
|
|
|
manager.save()?;
|
|
println!("{} Set {} = {}", "✓".green(), key.cyan(), value);
|
|
|
|
Ok(())
|
|
}
|
|
|
|
async fn get_value(&self, key: &str) -> Result<()> {
|
|
let manager = ConfigManager::new()?;
|
|
let config = manager.config();
|
|
|
|
let value = match key {
|
|
"llm.provider" => &config.llm.provider,
|
|
"llm.max_tokens" => return Ok(println!("{}", config.llm.max_tokens)),
|
|
"llm.temperature" => return Ok(println!("{}", config.llm.temperature)),
|
|
"llm.timeout" => return Ok(println!("{}", config.llm.timeout)),
|
|
"commit.format" => return Ok(println!("{}", config.commit.format)),
|
|
"tag.version_prefix" => &config.tag.version_prefix,
|
|
"changelog.path" => &config.changelog.path,
|
|
_ => bail!("Unknown configuration key: {}", key),
|
|
};
|
|
|
|
println!("{}", value);
|
|
Ok(())
|
|
}
|
|
|
|
async fn set_llm(&self, provider: Option<&str>) -> Result<()> {
|
|
let mut manager = ConfigManager::new()?;
|
|
|
|
let provider = if let Some(p) = provider {
|
|
p.to_string()
|
|
} else {
|
|
let providers = vec!["ollama", "openai", "anthropic", "kimi", "deepseek", "openrouter"];
|
|
let idx = Select::new()
|
|
.with_prompt("Select LLM provider")
|
|
.items(&providers)
|
|
.default(0)
|
|
.interact()?;
|
|
providers[idx].to_string()
|
|
};
|
|
|
|
manager.set_llm_provider(provider.clone());
|
|
|
|
// Configure provider-specific settings
|
|
match provider.as_str() {
|
|
"openai" => {
|
|
let api_key: String = Input::new()
|
|
.with_prompt("OpenAI API key")
|
|
.interact_text()?;
|
|
manager.set_openai_api_key(api_key);
|
|
|
|
let model: String = Input::new()
|
|
.with_prompt("Model")
|
|
.default("gpt-4".to_string())
|
|
.interact_text()?;
|
|
manager.config_mut().llm.openai.model = model;
|
|
|
|
let base_url: String = Input::new()
|
|
.with_prompt("Base URL (optional)")
|
|
.default("https://api.openai.com/v1".to_string())
|
|
.interact_text()?;
|
|
if base_url != "https://api.openai.com/v1" {
|
|
manager.config_mut().llm.openai.base_url = base_url;
|
|
}
|
|
}
|
|
"anthropic" => {
|
|
let api_key: String = Input::new()
|
|
.with_prompt("Anthropic API key")
|
|
.interact_text()?;
|
|
manager.set_anthropic_api_key(api_key);
|
|
|
|
let model: String = Input::new()
|
|
.with_prompt("Model")
|
|
.default("claude-3-sonnet-20240229".to_string())
|
|
.interact_text()?;
|
|
manager.config_mut().llm.anthropic.model = model;
|
|
}
|
|
"kimi" => {
|
|
let api_key: String = Input::new()
|
|
.with_prompt("Kimi API key")
|
|
.interact_text()?;
|
|
manager.set_kimi_api_key(api_key);
|
|
|
|
let model: String = Input::new()
|
|
.with_prompt("Model")
|
|
.default("moonshot-v1-8k".to_string())
|
|
.interact_text()?;
|
|
manager.config_mut().llm.kimi.model = model;
|
|
|
|
let base_url: String = Input::new()
|
|
.with_prompt("Base URL (optional)")
|
|
.default("https://api.moonshot.cn/v1".to_string())
|
|
.interact_text()?;
|
|
if base_url != "https://api.moonshot.cn/v1" {
|
|
manager.set_kimi_base_url(base_url);
|
|
}
|
|
}
|
|
"deepseek" => {
|
|
let api_key: String = Input::new()
|
|
.with_prompt("DeepSeek API key")
|
|
.interact_text()?;
|
|
manager.set_deepseek_api_key(api_key);
|
|
|
|
let model: String = Input::new()
|
|
.with_prompt("Model")
|
|
.default("deepseek-chat".to_string())
|
|
.interact_text()?;
|
|
manager.config_mut().llm.deepseek.model = model;
|
|
|
|
let base_url: String = Input::new()
|
|
.with_prompt("Base URL (optional)")
|
|
.default("https://api.deepseek.com/v1".to_string())
|
|
.interact_text()?;
|
|
if base_url != "https://api.deepseek.com/v1" {
|
|
manager.set_deepseek_base_url(base_url);
|
|
}
|
|
}
|
|
"openrouter" => {
|
|
let api_key: String = Input::new()
|
|
.with_prompt("OpenRouter API key")
|
|
.interact_text()?;
|
|
manager.set_openrouter_api_key(api_key);
|
|
|
|
let model: String = Input::new()
|
|
.with_prompt("Model")
|
|
.default("openai/gpt-3.5-turbo".to_string())
|
|
.interact_text()?;
|
|
manager.config_mut().llm.openrouter.model = model;
|
|
|
|
let base_url: String = Input::new()
|
|
.with_prompt("Base URL (optional)")
|
|
.default("https://openrouter.ai/api/v1".to_string())
|
|
.interact_text()?;
|
|
if base_url != "https://openrouter.ai/api/v1" {
|
|
manager.set_openrouter_base_url(base_url);
|
|
}
|
|
}
|
|
"ollama" => {
|
|
let url: String = Input::new()
|
|
.with_prompt("Ollama URL")
|
|
.default("http://localhost:11434".to_string())
|
|
.interact_text()?;
|
|
manager.config_mut().llm.ollama.url = url;
|
|
|
|
let model: String = Input::new()
|
|
.with_prompt("Model")
|
|
.default("llama2".to_string())
|
|
.interact_text()?;
|
|
manager.config_mut().llm.ollama.model = model;
|
|
}
|
|
_ => {}
|
|
}
|
|
|
|
manager.save()?;
|
|
println!("{} Set LLM provider to {}", "✓".green(), provider.cyan());
|
|
|
|
Ok(())
|
|
}
|
|
|
|
async fn set_openai_key(&self, key: &str) -> Result<()> {
|
|
let mut manager = ConfigManager::new()?;
|
|
manager.set_openai_api_key(key.to_string());
|
|
manager.save()?;
|
|
println!("{} OpenAI API key set", "✓".green());
|
|
Ok(())
|
|
}
|
|
|
|
async fn set_anthropic_key(&self, key: &str) -> Result<()> {
|
|
let mut manager = ConfigManager::new()?;
|
|
manager.set_anthropic_api_key(key.to_string());
|
|
manager.save()?;
|
|
println!("{} Anthropic API key set", "✓".green());
|
|
Ok(())
|
|
}
|
|
|
|
async fn set_kimi_key(&self, key: &str) -> Result<()> {
|
|
let mut manager = ConfigManager::new()?;
|
|
manager.set_kimi_api_key(key.to_string());
|
|
manager.save()?;
|
|
println!("{} Kimi API key set", "✓".green());
|
|
Ok(())
|
|
}
|
|
|
|
async fn set_deepseek_key(&self, key: &str) -> Result<()> {
|
|
let mut manager = ConfigManager::new()?;
|
|
manager.set_deepseek_api_key(key.to_string());
|
|
manager.save()?;
|
|
println!("{} DeepSeek API key set", "✓".green());
|
|
Ok(())
|
|
}
|
|
|
|
async fn set_openrouter_key(&self, key: &str) -> Result<()> {
|
|
let mut manager = ConfigManager::new()?;
|
|
manager.set_openrouter_api_key(key.to_string());
|
|
manager.save()?;
|
|
println!("{} OpenRouter API key set", "✓".green());
|
|
Ok(())
|
|
}
|
|
|
|
async fn set_kimi(&self, base_url: Option<&str>, model: Option<&str>) -> Result<()> {
|
|
let mut manager = ConfigManager::new()?;
|
|
|
|
if let Some(url) = base_url {
|
|
manager.set_kimi_base_url(url.to_string());
|
|
}
|
|
if let Some(m) = model {
|
|
manager.config_mut().llm.kimi.model = m.to_string();
|
|
}
|
|
|
|
manager.save()?;
|
|
println!("{} Kimi configuration updated", "✓".green());
|
|
Ok(())
|
|
}
|
|
|
|
async fn set_deepseek(&self, base_url: Option<&str>, model: Option<&str>) -> Result<()> {
|
|
let mut manager = ConfigManager::new()?;
|
|
|
|
if let Some(url) = base_url {
|
|
manager.set_deepseek_base_url(url.to_string());
|
|
}
|
|
if let Some(m) = model {
|
|
manager.config_mut().llm.deepseek.model = m.to_string();
|
|
}
|
|
|
|
manager.save()?;
|
|
println!("{} DeepSeek configuration updated", "✓".green());
|
|
Ok(())
|
|
}
|
|
|
|
async fn set_openrouter(&self, base_url: Option<&str>, model: Option<&str>) -> Result<()> {
|
|
let mut manager = ConfigManager::new()?;
|
|
|
|
if let Some(url) = base_url {
|
|
manager.set_openrouter_base_url(url.to_string());
|
|
}
|
|
if let Some(m) = model {
|
|
manager.config_mut().llm.openrouter.model = m.to_string();
|
|
}
|
|
|
|
manager.save()?;
|
|
println!("{} OpenRouter configuration updated", "✓".green());
|
|
Ok(())
|
|
}
|
|
|
|
async fn set_ollama(&self, url: Option<&str>, model: Option<&str>) -> Result<()> {
|
|
let mut manager = ConfigManager::new()?;
|
|
|
|
if let Some(u) = url {
|
|
manager.config_mut().llm.ollama.url = u.to_string();
|
|
}
|
|
if let Some(m) = model {
|
|
manager.config_mut().llm.ollama.model = m.to_string();
|
|
}
|
|
|
|
manager.save()?;
|
|
println!("{} Ollama configuration updated", "✓".green());
|
|
Ok(())
|
|
}
|
|
|
|
async fn set_commit_format(&self, format: &str) -> Result<()> {
|
|
let mut manager = ConfigManager::new()?;
|
|
|
|
let format = match format {
|
|
"conventional" => CommitFormat::Conventional,
|
|
"commitlint" => CommitFormat::Commitlint,
|
|
_ => bail!("Invalid format: {}. Use: conventional, commitlint", format),
|
|
};
|
|
|
|
manager.set_commit_format(format);
|
|
manager.save()?;
|
|
println!("{} Set commit format to {}", "✓".green(), format.to_string().cyan());
|
|
Ok(())
|
|
}
|
|
|
|
async fn set_version_prefix(&self, prefix: &str) -> Result<()> {
|
|
let mut manager = ConfigManager::new()?;
|
|
manager.set_version_prefix(prefix.to_string());
|
|
manager.save()?;
|
|
println!("{} Set version prefix to '{}'", "✓".green(), prefix);
|
|
Ok(())
|
|
}
|
|
|
|
async fn set_changelog_path(&self, path: &str) -> Result<()> {
|
|
let mut manager = ConfigManager::new()?;
|
|
manager.set_changelog_path(path.to_string());
|
|
manager.save()?;
|
|
println!("{} Set changelog path to {}", "✓".green(), path);
|
|
Ok(())
|
|
}
|
|
|
|
async fn set_language(&self, language: Option<&str>) -> Result<()> {
|
|
let mut manager = ConfigManager::new()?;
|
|
|
|
let language_code = if let Some(lang) = language {
|
|
lang.to_string()
|
|
} else {
|
|
let languages = vec![
|
|
Language::English,
|
|
Language::Chinese,
|
|
Language::Japanese,
|
|
Language::Korean,
|
|
Language::Spanish,
|
|
Language::French,
|
|
Language::German,
|
|
];
|
|
let language_names: Vec<String> = languages.iter().map(|l| l.display_name().to_string()).collect();
|
|
let idx = Select::new()
|
|
.with_prompt("Select language")
|
|
.items(&language_names)
|
|
.default(0)
|
|
.interact()?;
|
|
languages[idx].to_code().to_string()
|
|
};
|
|
|
|
manager.set_output_language(language_code.clone());
|
|
manager.save()?;
|
|
println!("{} Set output language to {}", "✓".green(), language_code.cyan());
|
|
Ok(())
|
|
}
|
|
|
|
async fn set_keep_types_english(&self, keep: bool) -> Result<()> {
|
|
let mut manager = ConfigManager::new()?;
|
|
manager.set_keep_types_english(keep);
|
|
manager.save()?;
|
|
let status = if keep { "enabled" } else { "disabled" };
|
|
println!("{} Keep commit types in English: {}", "✓".green(), status);
|
|
Ok(())
|
|
}
|
|
|
|
async fn set_keep_changelog_types_english(&self, keep: bool) -> Result<()> {
|
|
let mut manager = ConfigManager::new()?;
|
|
manager.set_keep_changelog_types_english(keep);
|
|
manager.save()?;
|
|
let status = if keep { "enabled" } else { "disabled" };
|
|
println!("{} Keep changelog types in English: {}", "✓".green(), status);
|
|
Ok(())
|
|
}
|
|
|
|
async fn reset(&self, force: bool) -> Result<()> {
|
|
if !force {
|
|
let confirm = Confirm::new()
|
|
.with_prompt("Are you sure you want to reset all configuration?")
|
|
.default(false)
|
|
.interact()?;
|
|
|
|
if !confirm {
|
|
println!("{}", "Cancelled.".yellow());
|
|
return Ok(());
|
|
}
|
|
}
|
|
|
|
let mut manager = ConfigManager::new()?;
|
|
manager.reset();
|
|
manager.save()?;
|
|
|
|
println!("{} Configuration reset to defaults", "✓".green());
|
|
Ok(())
|
|
}
|
|
|
|
async fn export_config(&self, output: Option<&str>) -> Result<()> {
|
|
let manager = ConfigManager::new()?;
|
|
let toml = manager.export()?;
|
|
|
|
if let Some(path) = output {
|
|
std::fs::write(path, toml)?;
|
|
println!("{} Configuration exported to {}", "✓".green(), path);
|
|
} else {
|
|
println!("{}", toml);
|
|
}
|
|
|
|
Ok(())
|
|
}
|
|
|
|
async fn import_config(&self, file: &str) -> Result<()> {
|
|
let toml = std::fs::read_to_string(file)?;
|
|
|
|
let mut manager = ConfigManager::new()?;
|
|
manager.import(&toml)?;
|
|
manager.save()?;
|
|
|
|
println!("{} Configuration imported from {}", "✓".green(), file);
|
|
Ok(())
|
|
}
|
|
|
|
async fn list_models(&self) -> Result<()> {
|
|
let manager = ConfigManager::new()?;
|
|
let config = manager.config();
|
|
|
|
match config.llm.provider.as_str() {
|
|
"ollama" => {
|
|
let client = crate::llm::OllamaClient::new(
|
|
&config.llm.ollama.url,
|
|
&config.llm.ollama.model,
|
|
);
|
|
|
|
println!("Fetching available models from Ollama...");
|
|
match client.list_models().await {
|
|
Ok(models) => {
|
|
println!("\n{}", "Available models:".bold());
|
|
for model in models {
|
|
let marker = if model == config.llm.ollama.model { "●".green() } else { "○".dimmed() };
|
|
println!("{} {}", marker, model);
|
|
}
|
|
}
|
|
Err(e) => {
|
|
println!("{} Failed to fetch models: {}", "✗".red(), e);
|
|
}
|
|
}
|
|
}
|
|
"openai" => {
|
|
if let Some(ref key) = config.llm.openai.api_key {
|
|
let client = crate::llm::OpenAiClient::new(
|
|
&config.llm.openai.base_url,
|
|
key,
|
|
&config.llm.openai.model,
|
|
)?;
|
|
|
|
println!("Fetching available models from OpenAI...");
|
|
match client.list_models().await {
|
|
Ok(models) => {
|
|
println!("\n{}", "Available models:".bold());
|
|
for model in models {
|
|
let marker = if model == config.llm.openai.model { "●".green() } else { "○".dimmed() };
|
|
println!("{} {}", marker, model);
|
|
}
|
|
}
|
|
Err(e) => {
|
|
println!("{} Failed to fetch models: {}", "✗".red(), e);
|
|
}
|
|
}
|
|
} else {
|
|
bail!("OpenAI API key not configured");
|
|
}
|
|
}
|
|
"anthropic" => {
|
|
if let Some(ref key) = config.llm.anthropic.api_key {
|
|
let client = crate::llm::AnthropicClient::new(
|
|
key,
|
|
&config.llm.anthropic.model,
|
|
)?;
|
|
|
|
println!("Fetching available models from Anthropic...");
|
|
match client.list_models().await {
|
|
Ok(models) => {
|
|
println!("\n{}", "Available models:".bold());
|
|
for model in models {
|
|
let marker = if model == config.llm.anthropic.model { "●".green() } else { "○".dimmed() };
|
|
println!("{} {}", marker, model);
|
|
}
|
|
}
|
|
Err(e) => {
|
|
println!("{} Failed to fetch models: {}", "✗".red(), e);
|
|
}
|
|
}
|
|
} else {
|
|
bail!("Anthropic API key not configured");
|
|
}
|
|
}
|
|
"kimi" => {
|
|
if let Some(ref key) = config.llm.kimi.api_key {
|
|
let client = crate::llm::KimiClient::with_base_url(
|
|
key,
|
|
&config.llm.kimi.model,
|
|
&config.llm.kimi.base_url,
|
|
)?;
|
|
|
|
println!("Fetching available models from Kimi...");
|
|
match client.list_models().await {
|
|
Ok(models) => {
|
|
println!("\n{}", "Available models:".bold());
|
|
for model in models {
|
|
let marker = if model == config.llm.kimi.model { "●".green() } else { "○".dimmed() };
|
|
println!("{} {}", marker, model);
|
|
}
|
|
}
|
|
Err(e) => {
|
|
println!("{} Failed to fetch models: {}", "✗".red(), e);
|
|
}
|
|
}
|
|
} else {
|
|
bail!("Kimi API key not configured");
|
|
}
|
|
}
|
|
"deepseek" => {
|
|
if let Some(ref key) = config.llm.deepseek.api_key {
|
|
let client = crate::llm::DeepSeekClient::with_base_url(
|
|
key,
|
|
&config.llm.deepseek.model,
|
|
&config.llm.deepseek.base_url,
|
|
)?;
|
|
|
|
println!("Fetching available models from DeepSeek...");
|
|
match client.list_models().await {
|
|
Ok(models) => {
|
|
println!("\n{}", "Available models:".bold());
|
|
for model in models {
|
|
let marker = if model == config.llm.deepseek.model { "●".green() } else { "○".dimmed() };
|
|
println!("{} {}", marker, model);
|
|
}
|
|
}
|
|
Err(e) => {
|
|
println!("{} Failed to fetch models: {}", "✗".red(), e);
|
|
}
|
|
}
|
|
} else {
|
|
bail!("DeepSeek API key not configured");
|
|
}
|
|
}
|
|
"openrouter" => {
|
|
if let Some(ref key) = config.llm.openrouter.api_key {
|
|
let client = crate::llm::OpenRouterClient::with_base_url(
|
|
key,
|
|
&config.llm.openrouter.model,
|
|
&config.llm.openrouter.base_url,
|
|
)?;
|
|
|
|
println!("Fetching available models from OpenRouter...");
|
|
match client.list_models().await {
|
|
Ok(models) => {
|
|
println!("\n{}", "Available models:".bold());
|
|
for model in models {
|
|
let marker = if model == config.llm.openrouter.model { "●".green() } else { "○".dimmed() };
|
|
println!("{} {}", marker, model);
|
|
}
|
|
}
|
|
Err(e) => {
|
|
println!("{} Failed to fetch models: {}", "✗".red(), e);
|
|
}
|
|
}
|
|
} else {
|
|
bail!("OpenRouter API key not configured");
|
|
}
|
|
}
|
|
provider => {
|
|
println!("Listing models not supported for provider: {}", provider);
|
|
}
|
|
}
|
|
|
|
Ok(())
|
|
}
|
|
|
|
async fn test_llm(&self) -> Result<()> {
|
|
let manager = ConfigManager::new()?;
|
|
let config = manager.config();
|
|
|
|
println!("Testing LLM connection ({})...", config.llm.provider.cyan());
|
|
|
|
match crate::llm::LlmClient::from_config(&config.llm).await {
|
|
Ok(client) => {
|
|
if client.is_available().await {
|
|
println!("{} LLM connection successful!", "✓".green());
|
|
|
|
// Test generation with a sample diff
|
|
println!("Testing generation...");
|
|
let sample_diff = r#"diff --git a/src/main.rs b/src/main.rs
|
|
--- a/src/main.rs
|
|
+++ b/src/main.rs
|
|
@@ -1,3 +1,4 @@
|
|
fn main() {
|
|
+ println!("Hello, World!");
|
|
}"#;
|
|
match client.generate_commit_message(sample_diff, crate::config::CommitFormat::Conventional, crate::config::Language::English).await {
|
|
Ok(response) => {
|
|
println!("{} Generation test passed", "✓".green());
|
|
println!("Response: {}", response.description.dimmed());
|
|
}
|
|
Err(e) => {
|
|
println!("{} Generation test failed: {}", "✗".red(), e);
|
|
}
|
|
}
|
|
} else {
|
|
println!("{} LLM provider is not available", "✗".red());
|
|
}
|
|
}
|
|
Err(e) => {
|
|
println!("{} Failed to initialize LLM: {}", "✗".red(), e);
|
|
}
|
|
}
|
|
|
|
Ok(())
|
|
}
|
|
}
|