Files
QuiCommit/src/commands/config.rs

519 lines
19 KiB
Rust

use anyhow::{bail, Context, Result};
use clap::{Parser, Subcommand};
use colored::Colorize;
use dialoguer::{Confirm, Input, Select};
use crate::config::manager::ConfigManager;
use crate::config::{CommitFormat, LlmConfig};
/// Manage configuration settings
#[derive(Parser)]
pub struct ConfigCommand {
#[command(subcommand)]
command: Option<ConfigSubcommand>,
}
#[derive(Subcommand)]
enum ConfigSubcommand {
/// Show current configuration
Show,
/// Edit configuration file
Edit,
/// Set configuration value
Set {
/// Key (e.g., llm.provider, commit.format)
key: String,
/// Value
value: String,
},
/// Get configuration value
Get {
/// Key
key: String,
},
/// Set LLM provider
SetLlm {
/// Provider (ollama, openai, anthropic)
#[arg(value_name = "PROVIDER")]
provider: Option<String>,
},
/// Set OpenAI API key
SetOpenAiKey {
/// API key
key: String,
},
/// Set Anthropic API key
SetAnthropicKey {
/// API key
key: String,
},
/// Configure Ollama settings
SetOllama {
/// Ollama server URL
#[arg(short, long)]
url: Option<String>,
/// Model name
#[arg(short, long)]
model: Option<String>,
},
/// Set commit format
SetCommitFormat {
/// Format (conventional, commitlint)
format: String,
},
/// Set version prefix for tags
SetVersionPrefix {
/// Prefix (e.g., 'v')
prefix: String,
},
/// Set changelog path
SetChangelogPath {
/// Path
path: String,
},
/// Reset configuration to defaults
Reset {
/// Skip confirmation
#[arg(short, long)]
force: bool,
},
/// Export configuration
Export {
/// Output file (defaults to stdout)
#[arg(short, long)]
output: Option<String>,
},
/// Import configuration
Import {
/// Input file
#[arg(short, long)]
file: String,
},
/// List available LLM models
ListModels,
/// Test LLM connection
TestLlm,
}
impl ConfigCommand {
pub async fn execute(&self) -> Result<()> {
match &self.command {
Some(ConfigSubcommand::Show) => self.show_config().await,
Some(ConfigSubcommand::Edit) => self.edit_config().await,
Some(ConfigSubcommand::Set { key, value }) => self.set_value(key, value).await,
Some(ConfigSubcommand::Get { key }) => self.get_value(key).await,
Some(ConfigSubcommand::SetLlm { provider }) => self.set_llm(provider.as_deref()).await,
Some(ConfigSubcommand::SetOpenAiKey { key }) => self.set_openai_key(key).await,
Some(ConfigSubcommand::SetAnthropicKey { key }) => self.set_anthropic_key(key).await,
Some(ConfigSubcommand::SetOllama { url, model }) => self.set_ollama(url.as_deref(), model.as_deref()).await,
Some(ConfigSubcommand::SetCommitFormat { format }) => self.set_commit_format(format).await,
Some(ConfigSubcommand::SetVersionPrefix { prefix }) => self.set_version_prefix(prefix).await,
Some(ConfigSubcommand::SetChangelogPath { path }) => self.set_changelog_path(path).await,
Some(ConfigSubcommand::Reset { force }) => self.reset(*force).await,
Some(ConfigSubcommand::Export { output }) => self.export_config(output.as_deref()).await,
Some(ConfigSubcommand::Import { file }) => self.import_config(file).await,
Some(ConfigSubcommand::ListModels) => self.list_models().await,
Some(ConfigSubcommand::TestLlm) => self.test_llm().await,
None => self.show_config().await,
}
}
async fn show_config(&self) -> Result<()> {
let manager = ConfigManager::new()?;
let config = manager.config();
println!("{}", "\nQuicCommit Configuration".bold());
println!("{}", "".repeat(60));
println!("\n{}", "General:".bold());
println!(" Config file: {}", manager.path().display());
println!(" Default profile: {}",
config.default_profile.as_deref().unwrap_or("(none)").cyan());
println!(" Profiles: {}", config.profiles.len());
println!("\n{}", "LLM Configuration:".bold());
println!(" Provider: {}", config.llm.provider.cyan());
println!(" Max tokens: {}", config.llm.max_tokens);
println!(" Temperature: {}", config.llm.temperature);
println!(" Timeout: {}s", config.llm.timeout);
match config.llm.provider.as_str() {
"ollama" => {
println!(" URL: {}", config.llm.ollama.url);
println!(" Model: {}", config.llm.ollama.model.cyan());
}
"openai" => {
println!(" Model: {}", config.llm.openai.model.cyan());
println!(" API key: {}",
if config.llm.openai.api_key.is_some() { "✓ set".green() } else { "✗ not set".red() });
}
"anthropic" => {
println!(" Model: {}", config.llm.anthropic.model.cyan());
println!(" API key: {}",
if config.llm.anthropic.api_key.is_some() { "✓ set".green() } else { "✗ not set".red() });
}
_ => {}
}
println!("\n{}", "Commit Configuration:".bold());
println!(" Format: {}", config.commit.format.to_string().cyan());
println!(" Auto-generate: {}", if config.commit.auto_generate { "yes".green() } else { "no".red() });
println!(" GPG sign: {}", if config.commit.gpg_sign { "yes".green() } else { "no".red() });
println!(" Max subject length: {}", config.commit.max_subject_length);
println!("\n{}", "Tag Configuration:".bold());
println!(" Version prefix: '{}'", config.tag.version_prefix);
println!(" Auto-generate: {}", if config.tag.auto_generate { "yes".green() } else { "no".red() });
println!(" GPG sign: {}", if config.tag.gpg_sign { "yes".green() } else { "no".red() });
println!(" Include changelog: {}", if config.tag.include_changelog { "yes".green() } else { "no".red() });
println!("\n{}", "Changelog Configuration:".bold());
println!(" Path: {}", config.changelog.path);
println!(" Auto-generate: {}", if config.changelog.auto_generate { "yes".green() } else { "no".red() });
println!(" Include hashes: {}", if config.changelog.include_hashes { "yes".green() } else { "no".red() });
println!(" Include authors: {}", if config.changelog.include_authors { "yes".green() } else { "no".red() });
println!(" Group by type: {}", if config.changelog.group_by_type { "yes".green() } else { "no".red() });
Ok(())
}
async fn edit_config(&self) -> Result<()> {
let manager = ConfigManager::new()?;
crate::utils::editor::edit_file(manager.path())?;
println!("{} Configuration updated", "".green());
Ok(())
}
async fn set_value(&self, key: &str, value: &str) -> Result<()> {
let mut manager = ConfigManager::new()?;
match key {
"llm.provider" => manager.set_llm_provider(value.to_string()),
"llm.max_tokens" => {
let tokens: u32 = value.parse()?;
manager.config_mut().llm.max_tokens = tokens;
}
"llm.temperature" => {
let temp: f32 = value.parse()?;
manager.config_mut().llm.temperature = temp;
}
"llm.timeout" => {
let timeout: u64 = value.parse()?;
manager.config_mut().llm.timeout = timeout;
}
"commit.format" => {
let format = match value {
"conventional" => CommitFormat::Conventional,
"commitlint" => CommitFormat::Commitlint,
_ => bail!("Invalid format: {}. Use: conventional, commitlint", value),
};
manager.set_commit_format(format);
}
"commit.auto_generate" => {
manager.set_auto_generate_commits(value == "true");
}
"tag.version_prefix" => manager.set_version_prefix(value.to_string()),
"changelog.path" => manager.set_changelog_path(value.to_string()),
_ => bail!("Unknown configuration key: {}", key),
}
manager.save()?;
println!("{} Set {} = {}", "".green(), key.cyan(), value);
Ok(())
}
async fn get_value(&self, key: &str) -> Result<()> {
let manager = ConfigManager::new()?;
let config = manager.config();
let value = match key {
"llm.provider" => &config.llm.provider,
"llm.max_tokens" => return Ok(println!("{}", config.llm.max_tokens)),
"llm.temperature" => return Ok(println!("{}", config.llm.temperature)),
"llm.timeout" => return Ok(println!("{}", config.llm.timeout)),
"commit.format" => return Ok(println!("{}", config.commit.format)),
"tag.version_prefix" => &config.tag.version_prefix,
"changelog.path" => &config.changelog.path,
_ => bail!("Unknown configuration key: {}", key),
};
println!("{}", value);
Ok(())
}
async fn set_llm(&self, provider: Option<&str>) -> Result<()> {
let mut manager = ConfigManager::new()?;
let provider = if let Some(p) = provider {
p.to_string()
} else {
let providers = vec!["ollama", "openai", "anthropic"];
let idx = Select::new()
.with_prompt("Select LLM provider")
.items(&providers)
.default(0)
.interact()?;
providers[idx].to_string()
};
manager.set_llm_provider(provider.clone());
// Configure provider-specific settings
match provider.as_str() {
"openai" => {
let api_key: String = Input::new()
.with_prompt("OpenAI API key")
.interact_text()?;
manager.set_openai_api_key(api_key);
let model: String = Input::new()
.with_prompt("Model")
.default("gpt-4".to_string())
.interact_text()?;
manager.config_mut().llm.openai.model = model;
}
"anthropic" => {
let api_key: String = Input::new()
.with_prompt("Anthropic API key")
.interact_text()?;
manager.set_anthropic_api_key(api_key);
}
"ollama" => {
let url: String = Input::new()
.with_prompt("Ollama URL")
.default("http://localhost:11434".to_string())
.interact_text()?;
manager.config_mut().llm.ollama.url = url;
let model: String = Input::new()
.with_prompt("Model")
.default("llama2".to_string())
.interact_text()?;
manager.config_mut().llm.ollama.model = model;
}
_ => {}
}
manager.save()?;
println!("{} Set LLM provider to {}", "".green(), provider.cyan());
Ok(())
}
async fn set_openai_key(&self, key: &str) -> Result<()> {
let mut manager = ConfigManager::new()?;
manager.set_openai_api_key(key.to_string());
manager.save()?;
println!("{} OpenAI API key set", "".green());
Ok(())
}
async fn set_anthropic_key(&self, key: &str) -> Result<()> {
let mut manager = ConfigManager::new()?;
manager.set_anthropic_api_key(key.to_string());
manager.save()?;
println!("{} Anthropic API key set", "".green());
Ok(())
}
async fn set_ollama(&self, url: Option<&str>, model: Option<&str>) -> Result<()> {
let mut manager = ConfigManager::new()?;
if let Some(u) = url {
manager.config_mut().llm.ollama.url = u.to_string();
}
if let Some(m) = model {
manager.config_mut().llm.ollama.model = m.to_string();
}
manager.save()?;
println!("{} Ollama configuration updated", "".green());
Ok(())
}
async fn set_commit_format(&self, format: &str) -> Result<()> {
let mut manager = ConfigManager::new()?;
let format = match format {
"conventional" => CommitFormat::Conventional,
"commitlint" => CommitFormat::Commitlint,
_ => bail!("Invalid format: {}. Use: conventional, commitlint", format),
};
manager.set_commit_format(format);
manager.save()?;
println!("{} Set commit format to {}", "".green(), format.to_string().cyan());
Ok(())
}
async fn set_version_prefix(&self, prefix: &str) -> Result<()> {
let mut manager = ConfigManager::new()?;
manager.set_version_prefix(prefix.to_string());
manager.save()?;
println!("{} Set version prefix to '{}'", "".green(), prefix);
Ok(())
}
async fn set_changelog_path(&self, path: &str) -> Result<()> {
let mut manager = ConfigManager::new()?;
manager.set_changelog_path(path.to_string());
manager.save()?;
println!("{} Set changelog path to {}", "".green(), path);
Ok(())
}
async fn reset(&self, force: bool) -> Result<()> {
if !force {
let confirm = Confirm::new()
.with_prompt("Are you sure you want to reset all configuration?")
.default(false)
.interact()?;
if !confirm {
println!("{}", "Cancelled.".yellow());
return Ok(());
}
}
let mut manager = ConfigManager::new()?;
manager.reset();
manager.save()?;
println!("{} Configuration reset to defaults", "".green());
Ok(())
}
async fn export_config(&self, output: Option<&str>) -> Result<()> {
let manager = ConfigManager::new()?;
let toml = manager.export()?;
if let Some(path) = output {
std::fs::write(path, toml)?;
println!("{} Configuration exported to {}", "".green(), path);
} else {
println!("{}", toml);
}
Ok(())
}
async fn import_config(&self, file: &str) -> Result<()> {
let toml = std::fs::read_to_string(file)?;
let mut manager = ConfigManager::new()?;
manager.import(&toml)?;
manager.save()?;
println!("{} Configuration imported from {}", "".green(), file);
Ok(())
}
async fn list_models(&self) -> Result<()> {
let manager = ConfigManager::new()?;
let config = manager.config();
match config.llm.provider.as_str() {
"ollama" => {
let client = crate::llm::OllamaClient::new(
&config.llm.ollama.url,
&config.llm.ollama.model,
);
println!("Fetching available models from Ollama...");
match client.list_models().await {
Ok(models) => {
println!("\n{}", "Available models:".bold());
for model in models {
let marker = if model == config.llm.ollama.model { "".green() } else { "".dimmed() };
println!("{} {}", marker, model);
}
}
Err(e) => {
println!("{} Failed to fetch models: {}", "".red(), e);
}
}
}
"openai" => {
if let Some(ref key) = config.llm.openai.api_key {
let client = crate::llm::OpenAiClient::new(
&config.llm.openai.base_url,
key,
&config.llm.openai.model,
)?;
println!("Fetching available models from OpenAI...");
match client.list_models().await {
Ok(models) => {
println!("\n{}", "Available models:".bold());
for model in models {
let marker = if model == config.llm.openai.model { "".green() } else { "".dimmed() };
println!("{} {}", marker, model);
}
}
Err(e) => {
println!("{} Failed to fetch models: {}", "".red(), e);
}
}
} else {
bail!("OpenAI API key not configured");
}
}
provider => {
println!("Listing models not supported for provider: {}", provider);
}
}
Ok(())
}
async fn test_llm(&self) -> Result<()> {
let manager = ConfigManager::new()?;
let config = manager.config();
println!("Testing LLM connection ({})...", config.llm.provider.cyan());
match crate::llm::LlmClient::from_config(&config.llm).await {
Ok(client) => {
if client.is_available().await {
println!("{} LLM connection successful!", "".green());
// Test generation
println!("Testing generation...");
match client.generate_commit_message("test", crate::config::CommitFormat::Conventional).await {
Ok(response) => {
println!("{} Generation test passed", "".green());
println!("Response: {}", response.description.dimmed());
}
Err(e) => {
println!("{} Generation test failed: {}", "".red(), e);
}
}
} else {
println!("{} LLM provider is not available", "".red());
}
}
Err(e) => {
println!("{} Failed to initialize LLM: {}", "".red(), e);
}
}
Ok(())
}
}