diff --git a/src/bin/hook.rs b/src/bin/hook.rs index d9ebd913..b56f300c 100644 --- a/src/bin/hook.rs +++ b/src/bin/hook.rs @@ -94,13 +94,15 @@ impl Args { Some(Message | Template | Merge | Squash) => Ok(()), Some(Commit) | None => { let repo = Repository::open_from_env().context("Failed to open repository")?; - let model = config::APP + let model = config::APP_CONFIG .model .clone() .unwrap_or("gpt-4o-mini".to_string()) .into(); let used_tokens = commit::token_used(&model)?; - let max_tokens = config::APP.max_tokens.unwrap_or(model.context_size()); + let max_tokens = config::APP_CONFIG + .max_tokens + .unwrap_or(model.context_size()); let remaining_tokens = max_tokens.saturating_sub(used_tokens).max(512); // Ensure minimum 512 tokens let tree = match self.sha1.as_deref() { diff --git a/src/commit.rs b/src/commit.rs index 908d37ef..e3246629 100644 --- a/src/commit.rs +++ b/src/commit.rs @@ -5,7 +5,7 @@ use async_openai::Client; use crate::{config, debug_output, openai, profile}; use crate::model::Model; -use crate::config::App as Settings; +use crate::config::AppConfig; use crate::multi_step_integration::{generate_commit_message_local, generate_commit_message_multi_step}; /// The instruction template included at compile time @@ -21,7 +21,10 @@ const INSTRUCTION_TEMPLATE: &str = include_str!("../resources/prompt.md"); #[doc(hidden)] pub fn get_instruction_template() -> Result { profile!("Generate instruction template"); - let max_length = config::APP.max_commit_length.unwrap_or(72).to_string(); + let max_length = config::APP_CONFIG + .max_commit_length + .unwrap_or(72) + .to_string(); let template = mustache::compile_str(INSTRUCTION_TEMPLATE) .map_err(|e| anyhow!("Template compilation error: {}", e))? .render_to_string(&hashmap! { @@ -70,7 +73,7 @@ pub fn create_commit_request(diff: String, max_tokens: usize, model: Model) -> R /// Returns an error if: /// - max_tokens is 0 /// - OpenAI API call fails -pub async fn generate(patch: String, remaining_tokens: usize, model: Model, settings: Option<&Settings>) -> Result { +pub async fn generate(patch: String, remaining_tokens: usize, model: Model, settings: Option<&AppConfig>) -> Result { profile!("Generate commit message"); if remaining_tokens == 0 { @@ -80,7 +83,7 @@ pub async fn generate(patch: String, remaining_tokens: usize, model: Model, sett // Try multi-step approach first let max_length = settings .and_then(|s| s.max_commit_length) - .or(config::APP.max_commit_length); + .or(config::APP_CONFIG.max_commit_length); // Check if we have a valid API key configuration let has_valid_api_key = if let Some(custom_settings) = settings { @@ -91,7 +94,7 @@ pub async fn generate(patch: String, remaining_tokens: usize, model: Model, sett .unwrap_or(false) } else { // Check environment variable or config - config::APP + config::APP_CONFIG .openai_api_key .as_ref() .map(|key| !key.is_empty() && key != "") @@ -215,7 +218,7 @@ mod tests { #[tokio::test] async fn test_missing_api_key_error() { // Create settings with no API key - let settings = Settings { + let settings = AppConfig { openai_api_key: None, model: Some("gpt-4o-mini".to_string()), max_tokens: Some(1024), @@ -253,7 +256,7 @@ mod tests { #[tokio::test] async fn test_invalid_api_key_error() { // Create settings with invalid API key - let settings = Settings { + let settings = AppConfig { openai_api_key: Some("".to_string()), model: Some("gpt-4o-mini".to_string()), max_tokens: Some(1024), diff --git a/src/config.rs b/src/config.rs index bb5fd09e..38c99ac1 100644 --- a/src/config.rs +++ b/src/config.rs @@ -16,7 +16,7 @@ const DEFAULT_MODEL: &str = "gpt-4o-mini"; const DEFAULT_API_KEY: &str = ""; #[derive(Debug, Default, Deserialize, PartialEq, Eq, Serialize)] -pub struct App { +pub struct AppConfig { pub openai_api_key: Option, pub model: Option, pub max_tokens: Option, @@ -32,7 +32,7 @@ pub struct ConfigPaths { lazy_static! { static ref PATHS: ConfigPaths = ConfigPaths::new(); - pub static ref APP: App = App::new().expect("Failed to load config"); + pub static ref APP_CONFIG: AppConfig = AppConfig::new().expect("Failed to load config"); } impl ConfigPaths { @@ -55,7 +55,7 @@ impl ConfigPaths { } } -impl App { +impl AppConfig { pub fn new() -> Result { dotenv::dotenv().ok(); PATHS.ensure_exists()?; diff --git a/src/generation/mod.rs b/src/generation/mod.rs new file mode 100644 index 00000000..4bfdcee4 --- /dev/null +++ b/src/generation/mod.rs @@ -0,0 +1,3 @@ +pub mod types; + +pub use types::{CommitResponse, FileCategory, FileChange, OperationType}; diff --git a/src/generation/types.rs b/src/generation/types.rs new file mode 100644 index 00000000..06209dc0 --- /dev/null +++ b/src/generation/types.rs @@ -0,0 +1,94 @@ +use std::collections::HashMap; + +use serde::{Deserialize, Serialize}; + +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct FileChange { + pub file_path: String, + pub operation_type: OperationType, + pub diff_content: Option, + pub lines_added: u32, + pub lines_removed: u32, + pub file_category: FileCategory, + pub summary: String, + pub impact_score: f32 +} + +#[derive(Debug, Clone, Copy, Serialize, Deserialize, PartialEq, Eq)] +pub enum OperationType { + Added, + Modified, + Deleted, + Renamed, + Binary +} + +impl OperationType { + pub fn as_str(&self) -> &'static str { + match self { + OperationType::Added => "added", + OperationType::Modified => "modified", + OperationType::Deleted => "deleted", + OperationType::Renamed => "renamed", + OperationType::Binary => "binary" + } + } +} + +impl From<&str> for OperationType { + fn from(s: &str) -> Self { + match s { + "added" => OperationType::Added, + "modified" => OperationType::Modified, + "deleted" => OperationType::Deleted, + "renamed" => OperationType::Renamed, + "binary" => OperationType::Binary, + _ => OperationType::Modified // default fallback + } + } +} + +#[derive(Debug, Clone, Copy, Serialize, Deserialize, PartialEq, Eq)] +pub enum FileCategory { + Source, + Test, + Config, + Docs, + Binary, + Build +} + +impl FileCategory { + pub fn as_str(&self) -> &'static str { + match self { + FileCategory::Source => "source", + FileCategory::Test => "test", + FileCategory::Config => "config", + FileCategory::Docs => "docs", + FileCategory::Binary => "binary", + FileCategory::Build => "build" + } + } +} + +impl From<&str> for FileCategory { + fn from(s: &str) -> Self { + match s { + "source" => FileCategory::Source, + "test" => FileCategory::Test, + "config" => FileCategory::Config, + "docs" => FileCategory::Docs, + "binary" => FileCategory::Binary, + "build" => FileCategory::Build, + _ => FileCategory::Source // default fallback + } + } +} + +/// Unified response type for commit message generation +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct CommitResponse { + pub message: String, + pub reasoning: String, + pub files: HashMap +} diff --git a/src/lib.rs b/src/lib.rs index 13e13b04..7081bf59 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -11,6 +11,7 @@ pub mod multi_step_analysis; pub mod multi_step_integration; pub mod simple_multi_step; pub mod debug_output; +pub mod generation; // Re-exports pub use profiling::Profile; diff --git a/src/main.rs b/src/main.rs index 93bcf916..6b250122 100644 --- a/src/main.rs +++ b/src/main.rs @@ -5,7 +5,7 @@ use structopt::StructOpt; use anyhow::Result; use dotenv::dotenv; -use crate::config::App; +use crate::config::AppConfig; use crate::filesystem::Filesystem; #[derive(StructOpt)] @@ -119,28 +119,28 @@ fn run_config_reset() -> Result<()> { } fn run_config_model(value: String) -> Result<()> { - let mut app = App::new()?; + let mut app = AppConfig::new()?; app.update_model(value.clone())?; println!("✅ Model set to: {value}"); Ok(()) } fn run_config_max_tokens(max_tokens: usize) -> Result<()> { - let mut app = App::new()?; + let mut app = AppConfig::new()?; app.update_max_tokens(max_tokens)?; println!("✅ Max tokens set to: {max_tokens}"); Ok(()) } fn run_config_max_commit_length(max_commit_length: usize) -> Result<()> { - let mut app = App::new()?; + let mut app = AppConfig::new()?; app.update_max_commit_length(max_commit_length)?; println!("✅ Max commit length set to: {max_commit_length}"); Ok(()) } fn run_config_openai_api_key(value: String) -> Result<()> { - let mut app = App::new()?; + let mut app = AppConfig::new()?; app.update_openai_api_key(value)?; println!("✅ OpenAI API key updated"); Ok(()) diff --git a/src/model.rs b/src/model.rs index 344d4d21..5bbd1f22 100644 --- a/src/model.rs +++ b/src/model.rs @@ -12,7 +12,7 @@ use colored::Colorize; use crate::profile; // use crate::config::format_prompt; // Temporarily comment out -use crate::config::App as Settings; // Use App as Settings +use crate::config::AppConfig; // Cached tokenizer for performance static TOKENIZER: OnceLock = OnceLock::new(); @@ -217,7 +217,7 @@ fn get_tokenizer(_model_str: &str) -> CoreBPE { tiktoken_rs::cl100k_base().expect("Failed to create tokenizer") } -pub async fn run(settings: Settings, content: String) -> Result { +pub async fn run(settings: AppConfig, content: String) -> Result { let model_str = settings.model.as_deref().unwrap_or(DEFAULT_MODEL_NAME); let client = async_openai::Client::new(); diff --git a/src/multi_step_analysis.rs b/src/multi_step_analysis.rs index 9cfd6745..c0bacc3c 100644 --- a/src/multi_step_analysis.rs +++ b/src/multi_step_analysis.rs @@ -2,6 +2,7 @@ use serde::{Deserialize, Serialize}; use serde_json::json; use async_openai::types::{ChatCompletionTool, ChatCompletionToolType, FunctionObjectArgs}; use anyhow::Result; +// TODO: Migrate to unified types from generation module /// File analysis result from the analyze function #[derive(Debug, Clone, Serialize, Deserialize)] @@ -23,7 +24,7 @@ pub struct FileDataForScoring { pub summary: String } -/// File data with calculated impact score +/// File data with calculated impact score #[derive(Debug, Clone, Serialize, Deserialize)] pub struct FileWithScore { pub file_path: String, diff --git a/src/openai.rs b/src/openai.rs index 53cfac13..a25b5532 100644 --- a/src/openai.rs +++ b/src/openai.rs @@ -10,7 +10,7 @@ use futures::future::join_all; use crate::{commit, config, debug_output, function_calling, profile}; use crate::model::Model; -use crate::config::App as Settings; +use crate::config::AppConfig; use crate::multi_step_integration::generate_commit_message_multi_step; const MAX_ATTEMPTS: usize = 3; @@ -110,7 +110,7 @@ pub async fn generate_commit_message(diff: &str) -> Result { } /// Creates an OpenAI configuration from application settings -pub fn create_openai_config(settings: &Settings) -> Result { +pub fn create_openai_config(settings: &AppConfig) -> Result { let api_key = settings .openai_api_key .as_ref() @@ -205,7 +205,7 @@ pub async fn call_with_config(request: Request, config: OpenAIConfig) -> Result< let client = Client::with_config(config.clone()); let model = request.model.to_string(); - match generate_commit_message_multi_step(&client, &model, &request.prompt, config::APP.max_commit_length).await { + match generate_commit_message_multi_step(&client, &model, &request.prompt, config::APP_CONFIG.max_commit_length).await { Ok(message) => return Ok(Response { response: message }), Err(e) => { // Check if it's an API key error and propagate it @@ -218,7 +218,7 @@ pub async fn call_with_config(request: Request, config: OpenAIConfig) -> Result< // Original single-step implementation as fallback // Create client with timeout if specified - let client = if let Some(timeout) = config::APP.timeout { + let client = if let Some(timeout) = config::APP_CONFIG.timeout { let http_client = reqwest::ClientBuilder::new() .timeout(Duration::from_secs(timeout as u64)) .build()?; @@ -236,7 +236,7 @@ pub async fn call_with_config(request: Request, config: OpenAIConfig) -> Result< let truncated_prompt = truncate_to_fit(&request.prompt, available_tokens, &request.model)?; // Create the commit function tool - let commit_tool = function_calling::create_commit_function_tool(config::APP.max_commit_length)?; + let commit_tool = function_calling::create_commit_function_tool(config::APP_CONFIG.max_commit_length)?; let chat_request = CreateChatCompletionRequestArgs::default() .max_tokens(request.max_tokens) @@ -382,7 +382,7 @@ pub async fn call(request: Request) -> Result { profile!("OpenAI API call"); // Create OpenAI configuration using our settings - let config = create_openai_config(&config::APP)?; + let config = create_openai_config(&config::APP_CONFIG)?; // Use the call_with_config function with the default config call_with_config(request, config).await