From 56383150319b4552199f1f99ae5a57a4fa8454cc Mon Sep 17 00:00:00 2001 From: SidneyZhang Date: Mon, 2 Feb 2026 06:40:41 +0000 Subject: [PATCH] =?UTF-8?q?feat(config):=20=E4=B8=BA=20anthropic=E3=80=81k?= =?UTF-8?q?imi=E3=80=81deepseek=20=E6=B7=BB=E5=8A=A0=20list=5Fmodels=20?= =?UTF-8?q?=E6=94=AF=E6=8C=81?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- src/commands/config.rs | 107 +++++++++++++++++++++++++++++++++++++++-- src/git/mod.rs | 54 +++++++++++++++++++-- src/llm/anthropic.rs | 12 +++-- src/llm/deepseek.rs | 50 ++++++++++++++----- src/llm/kimi.rs | 50 ++++++++++++++----- src/llm/openrouter.rs | 50 ++++++++++++++----- 6 files changed, 278 insertions(+), 45 deletions(-) diff --git a/src/commands/config.rs b/src/commands/config.rs index 70c5cc1..dda6071 100644 --- a/src/commands/config.rs +++ b/src/commands/config.rs @@ -830,14 +830,14 @@ impl ConfigCommand { async fn list_models(&self) -> Result<()> { let manager = ConfigManager::new()?; let config = manager.config(); - + match config.llm.provider.as_str() { "ollama" => { let client = crate::llm::OllamaClient::new( &config.llm.ollama.url, &config.llm.ollama.model, ); - + println!("Fetching available models from Ollama..."); match client.list_models().await { Ok(models) => { @@ -859,7 +859,7 @@ impl ConfigCommand { key, &config.llm.openai.model, )?; - + println!("Fetching available models from OpenAI..."); match client.list_models().await { Ok(models) => { @@ -877,11 +877,110 @@ impl ConfigCommand { bail!("OpenAI API key not configured"); } } + "anthropic" => { + if let Some(ref key) = config.llm.anthropic.api_key { + let client = crate::llm::AnthropicClient::new( + key, + &config.llm.anthropic.model, + )?; + + println!("Fetching available models from Anthropic..."); + match client.list_models().await { + Ok(models) => { + println!("\n{}", "Available models:".bold()); + for model in models { + let marker = if model == config.llm.anthropic.model { "●".green() } else { "○".dimmed() }; + println!("{} {}", marker, model); + } + } + Err(e) => { + println!("{} Failed to fetch models: {}", "✗".red(), e); + } + } + } else { + bail!("Anthropic API key not configured"); + } + } + "kimi" => { + if let Some(ref key) = config.llm.kimi.api_key { + let client = crate::llm::KimiClient::with_base_url( + key, + &config.llm.kimi.model, + &config.llm.kimi.base_url, + )?; + + println!("Fetching available models from Kimi..."); + match client.list_models().await { + Ok(models) => { + println!("\n{}", "Available models:".bold()); + for model in models { + let marker = if model == config.llm.kimi.model { "●".green() } else { "○".dimmed() }; + println!("{} {}", marker, model); + } + } + Err(e) => { + println!("{} Failed to fetch models: {}", "✗".red(), e); + } + } + } else { + bail!("Kimi API key not configured"); + } + } + "deepseek" => { + if let Some(ref key) = config.llm.deepseek.api_key { + let client = crate::llm::DeepSeekClient::with_base_url( + key, + &config.llm.deepseek.model, + &config.llm.deepseek.base_url, + )?; + + println!("Fetching available models from DeepSeek..."); + match client.list_models().await { + Ok(models) => { + println!("\n{}", "Available models:".bold()); + for model in models { + let marker = if model == config.llm.deepseek.model { "●".green() } else { "○".dimmed() }; + println!("{} {}", marker, model); + } + } + Err(e) => { + println!("{} Failed to fetch models: {}", "✗".red(), e); + } + } + } else { + bail!("DeepSeek API key not configured"); + } + } + "openrouter" => { + if let Some(ref key) = config.llm.openrouter.api_key { + let client = crate::llm::OpenRouterClient::with_base_url( + key, + &config.llm.openrouter.model, + &config.llm.openrouter.base_url, + )?; + + println!("Fetching available models from OpenRouter..."); + match client.list_models().await { + Ok(models) => { + println!("\n{}", "Available models:".bold()); + for model in models { + let marker = if model == config.llm.openrouter.model { "●".green() } else { "○".dimmed() }; + println!("{} {}", marker, model); + } + } + Err(e) => { + println!("{} Failed to fetch models: {}", "✗".red(), e); + } + } + } else { + bail!("OpenRouter API key not configured"); + } + } provider => { println!("Listing models not supported for provider: {}", provider); } } - + Ok(()) } diff --git a/src/git/mod.rs b/src/git/mod.rs index d409740..48ffd18 100644 --- a/src/git/mod.rs +++ b/src/git/mod.rs @@ -22,15 +22,41 @@ impl GitRepo { /// Open a git repository pub fn open>(path: P) -> Result { let path = path.as_ref(); - let absolute_path = path.canonicalize().unwrap_or_else(|_| path.to_path_buf()); + + // Enhanced cross-platform path handling + let absolute_path = if let Ok(canonical) = path.canonicalize() { + canonical + } else { + // Fallback: convert to absolute path without canonicalization + if path.is_absolute() { + path.to_path_buf() + } else { + std::env::current_dir()?.join(path) + } + }; + // Try multiple git repository discovery strategies for cross-platform compatibility let repo = Repository::discover(&absolute_path) - .or_else(|_| Repository::open(&absolute_path)) + .or_else(|discover_err| { + // Try direct open as fallback + Repository::open(&absolute_path).map_err(|open_err| { + // Provide detailed error information for debugging + anyhow::anyhow!( + "Git repository discovery failed:\n\ + Discovery error: {}\n\ + Direct open error: {}\n\ + Path attempted: {:?}\n\ + Current directory: {:?}", + discover_err, open_err, absolute_path, std::env::current_dir() + ) + }) + }) .with_context(|| { format!( "Failed to open git repository at '{:?}'. Please ensure:\n\ - 1. The directory is set as safe (run: git config --global --add safe.directory \"{}\")\n\ - 2. The path is correct and contains a valid '.git' folder.", + 1. The directory contains a valid '.git' folder\n\ + 2. The directory is set as safe (run: git config --global --add safe.directory \"{}\")\n\ + 3. You have proper permissions to access the repository", absolute_path, absolute_path.display() ) @@ -689,19 +715,37 @@ impl StatusSummary { pub fn find_repo>(start_path: P) -> Result { let start_path = start_path.as_ref(); + // Try the starting path first if let Ok(repo) = GitRepo::open(start_path) { return Ok(repo); } + // Walk up the directory tree to find a git repository let mut current = start_path; + let mut attempted_paths = vec![current.to_string_lossy().to_string()]; + while let Some(parent) = current.parent() { + attempted_paths.push(parent.to_string_lossy().to_string()); + if let Ok(repo) = GitRepo::open(parent) { return Ok(repo); } current = parent; } - bail!("No git repository found starting from {:?}", start_path) + // Provide detailed error information for debugging + bail!( + "No git repository found starting from {:?}.\n\ + Paths attempted:\n {}\n\ + Current directory: {:?}\n\ + Please ensure:\n\ + 1. You are in a git repository or its subdirectory\n\ + 2. The repository has a valid .git folder\n\ + 3. You have proper permissions to access the repository", + start_path, + attempted_paths.join("\n "), + std::env::current_dir() + ) } /// Check if path is inside a git repository diff --git a/src/llm/anthropic.rs b/src/llm/anthropic.rs index 9e6a511..8627038 100644 --- a/src/llm/anthropic.rs +++ b/src/llm/anthropic.rs @@ -70,10 +70,16 @@ impl AnthropicClient { Ok(self) } + /// List available models + pub async fn list_models(&self) -> Result> { + // Anthropic doesn't have a models API endpoint, return predefined list + Ok(ANTHROPIC_MODELS.iter().map(|&m| m.to_string()).collect()) + } + /// Validate API key pub async fn validate_key(&self) -> Result { let url = "https://api.anthropic.com/v1/messages"; - + let request = MessagesRequest { model: self.model.clone(), max_tokens: 5, @@ -84,7 +90,7 @@ impl AnthropicClient { }], system: None, }; - + let response = self.client .post(url) .header("x-api-key", &self.api_key) @@ -93,7 +99,7 @@ impl AnthropicClient { .json(&request) .send() .await; - + match response { Ok(resp) => { if resp.status().is_success() { diff --git a/src/llm/deepseek.rs b/src/llm/deepseek.rs index 6154cbd..067ea86 100644 --- a/src/llm/deepseek.rs +++ b/src/llm/deepseek.rs @@ -82,25 +82,53 @@ impl DeepSeekClient { Ok(self) } - /// Validate API key - pub async fn validate_key(&self) -> Result { + /// List available models + pub async fn list_models(&self) -> Result> { let url = format!("{}/models", self.base_url); - + let response = self.client .get(&url) .header("Authorization", format!("Bearer {}", self.api_key)) .send() .await - .context("Failed to validate DeepSeek API key")?; - - if response.status().is_success() { - Ok(true) - } else if response.status().as_u16() == 401 { - Ok(false) - } else { + .context("Failed to list DeepSeek models")?; + + if !response.status().is_success() { let status = response.status(); let text = response.text().await.unwrap_or_default(); - bail!("DeepSeek API error: {} - {}", status, text) + bail!("DeepSeek API error: {} - {}", status, text); + } + + #[derive(Deserialize)] + struct ModelsResponse { + data: Vec, + } + + #[derive(Deserialize)] + struct Model { + id: String, + } + + let result: ModelsResponse = response + .json() + .await + .context("Failed to parse DeepSeek response")?; + + Ok(result.data.into_iter().map(|m| m.id).collect()) + } + + /// Validate API key + pub async fn validate_key(&self) -> Result { + match self.list_models().await { + Ok(_) => Ok(true), + Err(e) => { + let err_str = e.to_string(); + if err_str.contains("401") || err_str.contains("Unauthorized") { + Ok(false) + } else { + Err(e) + } + } } } } diff --git a/src/llm/kimi.rs b/src/llm/kimi.rs index 1fd88aa..3681dec 100644 --- a/src/llm/kimi.rs +++ b/src/llm/kimi.rs @@ -82,25 +82,53 @@ impl KimiClient { Ok(self) } - /// Validate API key - pub async fn validate_key(&self) -> Result { + /// List available models + pub async fn list_models(&self) -> Result> { let url = format!("{}/models", self.base_url); - + let response = self.client .get(&url) .header("Authorization", format!("Bearer {}", self.api_key)) .send() .await - .context("Failed to validate Kimi API key")?; - - if response.status().is_success() { - Ok(true) - } else if response.status().as_u16() == 401 { - Ok(false) - } else { + .context("Failed to list Kimi models")?; + + if !response.status().is_success() { let status = response.status(); let text = response.text().await.unwrap_or_default(); - bail!("Kimi API error: {} - {}", status, text) + bail!("Kimi API error: {} - {}", status, text); + } + + #[derive(Deserialize)] + struct ModelsResponse { + data: Vec, + } + + #[derive(Deserialize)] + struct Model { + id: String, + } + + let result: ModelsResponse = response + .json() + .await + .context("Failed to parse Kimi response")?; + + Ok(result.data.into_iter().map(|m| m.id).collect()) + } + + /// Validate API key + pub async fn validate_key(&self) -> Result { + match self.list_models().await { + Ok(_) => Ok(true), + Err(e) => { + let err_str = e.to_string(); + if err_str.contains("401") || err_str.contains("Unauthorized") { + Ok(false) + } else { + Err(e) + } + } } } } diff --git a/src/llm/openrouter.rs b/src/llm/openrouter.rs index cf645a3..0f4c7ed 100644 --- a/src/llm/openrouter.rs +++ b/src/llm/openrouter.rs @@ -82,10 +82,10 @@ impl OpenRouterClient { Ok(self) } - /// Validate API key - pub async fn validate_key(&self) -> Result { + /// List available models + pub async fn list_models(&self) -> Result> { let url = format!("{}/models", self.base_url); - + let response = self.client .get(&url) .header("Authorization", format!("Bearer {}", self.api_key)) @@ -93,16 +93,44 @@ impl OpenRouterClient { .header("X-Title", "QuiCommit") .send() .await - .context("Failed to validate OpenRouter API key")?; - - if response.status().is_success() { - Ok(true) - } else if response.status().as_u16() == 401 { - Ok(false) - } else { + .context("Failed to list OpenRouter models")?; + + if !response.status().is_success() { let status = response.status(); let text = response.text().await.unwrap_or_default(); - bail!("OpenRouter API error: {} - {}", status, text) + bail!("OpenRouter API error: {} - {}", status, text); + } + + #[derive(Deserialize)] + struct ModelsResponse { + data: Vec, + } + + #[derive(Deserialize)] + struct Model { + id: String, + } + + let result: ModelsResponse = response + .json() + .await + .context("Failed to parse OpenRouter response")?; + + Ok(result.data.into_iter().map(|m| m.id).collect()) + } + + /// Validate API key + pub async fn validate_key(&self) -> Result { + match self.list_models().await { + Ok(_) => Ok(true), + Err(e) => { + let err_str = e.to_string(); + if err_str.contains("401") || err_str.contains("Unauthorized") { + Ok(false) + } else { + Err(e) + } + } } } }