diff --git a/src/git.rs b/src/git.rs index afb87fe..1dc276c 100644 --- a/src/git.rs +++ b/src/git.rs @@ -11,7 +11,16 @@ use crate::cmd; use anyhow::{bail, Result}; pub(crate) fn get_diffs() -> Result { - let output = cmd::run_command("git", &["diff", "--staged", "-w"])?; + let output = cmd::run_command( + "git", + &[ + "diff", + "--staged", + "--ignore-all-space", + "--diff-algorithm=minimal", + "--function-context", + ], + )?; Ok(output) } diff --git a/src/llms/openai.rs b/src/llms/openai.rs index 9623cc9..cda7c21 100644 --- a/src/llms/openai.rs +++ b/src/llms/openai.rs @@ -3,6 +3,7 @@ use std::time::Duration; use anyhow::{anyhow, bail, Result}; use async_trait::async_trait; +use colored::Colorize; use reqwest::{Client, ClientBuilder}; use serde_json::{json, Value}; use tiktoken_rs::tiktoken::{p50k_base, CoreBPE}; @@ -40,13 +41,14 @@ impl OpenAIClient { pub(crate) fn get_prompt_token_limit_for_model(&self) -> usize { match self.model.as_str() { - "text-davinci-003" => 4097, + "text-davinci-003" => 4000, + "text-davinci-002" => 4000, "text-curie-001" => 2048, "text-babbage-001" => 2048, "text-ada-001" => 2048, - "code-davinci-002" => 8000, + "code-davinci-002" => 4000, "code-cushman-001" => 2048, - _ => 4097, + _ => 4096, } } } @@ -101,7 +103,24 @@ impl LlmClient for OpenAIClient { })?; Ok(json_response["choices"][0]["text"] .as_str() - .ok_or_else(|| anyhow!("Unexpected JSON response:\n{}", json_response))? + .ok_or_else(|| { + let error_message: &str = json_response + .get("error") + .and_then(|e| e.get("message")) + .and_then(|m| m.as_str()) + .unwrap_or_default(); + if !error_message.is_empty() { + return anyhow!( + "{}", + format!("OpenAI error: {error_message}").bold().yellow() + ); + } + + anyhow!( + "Unexpected API response:\n{}", + json_response.to_string().yellow() + ) + })? .trim() .to_string()) }