Skip to content
6 changes: 4 additions & 2 deletions src/bin/hook.rs
Original file line number Diff line number Diff line change
Expand Up @@ -94,13 +94,15 @@ impl Args {
Some(Message | Template | Merge | Squash) => Ok(()),
Some(Commit) | None => {
let repo = Repository::open_from_env().context("Failed to open repository")?;
let model = config::APP
let model = config::APP_CONFIG
.model
.clone()
.unwrap_or("gpt-4o-mini".to_string())
.into();
let used_tokens = commit::token_used(&model)?;
let max_tokens = config::APP.max_tokens.unwrap_or(model.context_size());
let max_tokens = config::APP_CONFIG
.max_tokens
.unwrap_or(model.context_size());
let remaining_tokens = max_tokens.saturating_sub(used_tokens).max(512); // Ensure minimum 512 tokens

let tree = match self.sha1.as_deref() {
Expand Down
17 changes: 10 additions & 7 deletions src/commit.rs
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@ use async_openai::Client;

use crate::{config, debug_output, openai, profile};
use crate::model::Model;
use crate::config::App as Settings;
use crate::config::AppConfig;
use crate::multi_step_integration::{generate_commit_message_local, generate_commit_message_multi_step};

/// The instruction template included at compile time
Expand All @@ -21,7 +21,10 @@ const INSTRUCTION_TEMPLATE: &str = include_str!("../resources/prompt.md");
#[doc(hidden)]
pub fn get_instruction_template() -> Result<String> {
profile!("Generate instruction template");
let max_length = config::APP.max_commit_length.unwrap_or(72).to_string();
let max_length = config::APP_CONFIG
.max_commit_length
.unwrap_or(72)
.to_string();
let template = mustache::compile_str(INSTRUCTION_TEMPLATE)
.map_err(|e| anyhow!("Template compilation error: {}", e))?
.render_to_string(&hashmap! {
Expand Down Expand Up @@ -70,7 +73,7 @@ pub fn create_commit_request(diff: String, max_tokens: usize, model: Model) -> R
/// Returns an error if:
/// - max_tokens is 0
/// - OpenAI API call fails
pub async fn generate(patch: String, remaining_tokens: usize, model: Model, settings: Option<&Settings>) -> Result<openai::Response> {
pub async fn generate(patch: String, remaining_tokens: usize, model: Model, settings: Option<&AppConfig>) -> Result<openai::Response> {
profile!("Generate commit message");

if remaining_tokens == 0 {
Expand All @@ -80,7 +83,7 @@ pub async fn generate(patch: String, remaining_tokens: usize, model: Model, sett
// Try multi-step approach first
let max_length = settings
.and_then(|s| s.max_commit_length)
.or(config::APP.max_commit_length);
.or(config::APP_CONFIG.max_commit_length);

// Check if we have a valid API key configuration
let has_valid_api_key = if let Some(custom_settings) = settings {
Expand All @@ -91,7 +94,7 @@ pub async fn generate(patch: String, remaining_tokens: usize, model: Model, sett
.unwrap_or(false)
} else {
// Check environment variable or config
config::APP
config::APP_CONFIG
.openai_api_key
.as_ref()
.map(|key| !key.is_empty() && key != "<PLACE HOLDER FOR YOUR API KEY>")
Expand Down Expand Up @@ -215,7 +218,7 @@ mod tests {
#[tokio::test]
async fn test_missing_api_key_error() {
// Create settings with no API key
let settings = Settings {
let settings = AppConfig {
openai_api_key: None,
model: Some("gpt-4o-mini".to_string()),
max_tokens: Some(1024),
Expand Down Expand Up @@ -253,7 +256,7 @@ mod tests {
#[tokio::test]
async fn test_invalid_api_key_error() {
// Create settings with invalid API key
let settings = Settings {
let settings = AppConfig {
openai_api_key: Some("<PLACE HOLDER FOR YOUR API KEY>".to_string()),
model: Some("gpt-4o-mini".to_string()),
max_tokens: Some(1024),
Expand Down
6 changes: 3 additions & 3 deletions src/config.rs
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@ const DEFAULT_MODEL: &str = "gpt-4o-mini";
const DEFAULT_API_KEY: &str = "<PLACE HOLDER FOR YOUR API KEY>";

#[derive(Debug, Default, Deserialize, PartialEq, Eq, Serialize)]
pub struct App {
pub struct AppConfig {
pub openai_api_key: Option<String>,
pub model: Option<String>,
pub max_tokens: Option<usize>,
Expand All @@ -32,7 +32,7 @@ pub struct ConfigPaths {

lazy_static! {
static ref PATHS: ConfigPaths = ConfigPaths::new();
pub static ref APP: App = App::new().expect("Failed to load config");
pub static ref APP_CONFIG: AppConfig = AppConfig::new().expect("Failed to load config");
}

impl ConfigPaths {
Expand All @@ -55,7 +55,7 @@ impl ConfigPaths {
}
}

impl App {
impl AppConfig {
pub fn new() -> Result<Self> {
dotenv::dotenv().ok();
PATHS.ensure_exists()?;
Expand Down
3 changes: 3 additions & 0 deletions src/generation/mod.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
pub mod types;

pub use types::{CommitResponse, FileCategory, FileChange, OperationType};
94 changes: 94 additions & 0 deletions src/generation/types.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,94 @@
use std::collections::HashMap;

use serde::{Deserialize, Serialize};

#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct FileChange {
pub file_path: String,
pub operation_type: OperationType,
pub diff_content: Option<String>,
pub lines_added: u32,
pub lines_removed: u32,
pub file_category: FileCategory,
pub summary: String,
pub impact_score: f32
}

#[derive(Debug, Clone, Copy, Serialize, Deserialize, PartialEq, Eq)]
pub enum OperationType {
Added,
Modified,
Deleted,
Renamed,
Binary
}

impl OperationType {
pub fn as_str(&self) -> &'static str {
match self {
OperationType::Added => "added",
OperationType::Modified => "modified",
OperationType::Deleted => "deleted",
OperationType::Renamed => "renamed",
OperationType::Binary => "binary"
}
}
}

impl From<&str> for OperationType {
fn from(s: &str) -> Self {
match s {
"added" => OperationType::Added,
"modified" => OperationType::Modified,
"deleted" => OperationType::Deleted,
"renamed" => OperationType::Renamed,
"binary" => OperationType::Binary,
_ => OperationType::Modified // default fallback
}
}
}

#[derive(Debug, Clone, Copy, Serialize, Deserialize, PartialEq, Eq)]
pub enum FileCategory {
Source,
Test,
Config,
Docs,
Binary,
Build
}

impl FileCategory {
pub fn as_str(&self) -> &'static str {
match self {
FileCategory::Source => "source",
FileCategory::Test => "test",
FileCategory::Config => "config",
FileCategory::Docs => "docs",
FileCategory::Binary => "binary",
FileCategory::Build => "build"
}
}
}

impl From<&str> for FileCategory {
fn from(s: &str) -> Self {
match s {
"source" => FileCategory::Source,
"test" => FileCategory::Test,
"config" => FileCategory::Config,
"docs" => FileCategory::Docs,
"binary" => FileCategory::Binary,
"build" => FileCategory::Build,
_ => FileCategory::Source // default fallback
}
}
}

/// Unified response type for commit message generation
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct CommitResponse {
pub message: String,
pub reasoning: String,
pub files: HashMap<String, FileChange>
}
1 change: 1 addition & 0 deletions src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@ pub mod multi_step_analysis;
pub mod multi_step_integration;
pub mod simple_multi_step;
pub mod debug_output;
pub mod generation;

// Re-exports
pub use profiling::Profile;
10 changes: 5 additions & 5 deletions src/main.rs
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@ use structopt::StructOpt;
use anyhow::Result;
use dotenv::dotenv;

use crate::config::App;
use crate::config::AppConfig;
use crate::filesystem::Filesystem;

#[derive(StructOpt)]
Expand Down Expand Up @@ -119,28 +119,28 @@ fn run_config_reset() -> Result<()> {
}

fn run_config_model(value: String) -> Result<()> {
let mut app = App::new()?;
let mut app = AppConfig::new()?;
app.update_model(value.clone())?;
println!("✅ Model set to: {value}");
Ok(())
}

fn run_config_max_tokens(max_tokens: usize) -> Result<()> {
let mut app = App::new()?;
let mut app = AppConfig::new()?;
app.update_max_tokens(max_tokens)?;
println!("✅ Max tokens set to: {max_tokens}");
Ok(())
}

fn run_config_max_commit_length(max_commit_length: usize) -> Result<()> {
let mut app = App::new()?;
let mut app = AppConfig::new()?;
app.update_max_commit_length(max_commit_length)?;
println!("✅ Max commit length set to: {max_commit_length}");
Ok(())
}

fn run_config_openai_api_key(value: String) -> Result<()> {
let mut app = App::new()?;
let mut app = AppConfig::new()?;
app.update_openai_api_key(value)?;
println!("✅ OpenAI API key updated");
Ok(())
Expand Down
4 changes: 2 additions & 2 deletions src/model.rs
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@ use colored::Colorize;

use crate::profile;
// use crate::config::format_prompt; // Temporarily comment out
use crate::config::App as Settings; // Use App as Settings
use crate::config::AppConfig;

// Cached tokenizer for performance
static TOKENIZER: OnceLock<CoreBPE> = OnceLock::new();
Expand Down Expand Up @@ -217,7 +217,7 @@ fn get_tokenizer(_model_str: &str) -> CoreBPE {
tiktoken_rs::cl100k_base().expect("Failed to create tokenizer")
}

pub async fn run(settings: Settings, content: String) -> Result<String> {
pub async fn run(settings: AppConfig, content: String) -> Result<String> {
let model_str = settings.model.as_deref().unwrap_or(DEFAULT_MODEL_NAME);

let client = async_openai::Client::new();
Expand Down
3 changes: 2 additions & 1 deletion src/multi_step_analysis.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@ use serde::{Deserialize, Serialize};
use serde_json::json;
use async_openai::types::{ChatCompletionTool, ChatCompletionToolType, FunctionObjectArgs};
use anyhow::Result;
// TODO: Migrate to unified types from generation module

/// File analysis result from the analyze function
#[derive(Debug, Clone, Serialize, Deserialize)]
Expand All @@ -23,7 +24,7 @@ pub struct FileDataForScoring {
pub summary: String
}

/// File data with calculated impact score
/// File data with calculated impact score
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct FileWithScore {
pub file_path: String,
Expand Down
12 changes: 6 additions & 6 deletions src/openai.rs
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@ use futures::future::join_all;

use crate::{commit, config, debug_output, function_calling, profile};
use crate::model::Model;
use crate::config::App as Settings;
use crate::config::AppConfig;
use crate::multi_step_integration::generate_commit_message_multi_step;

const MAX_ATTEMPTS: usize = 3;
Expand Down Expand Up @@ -110,7 +110,7 @@ pub async fn generate_commit_message(diff: &str) -> Result<String> {
}

/// Creates an OpenAI configuration from application settings
pub fn create_openai_config(settings: &Settings) -> Result<OpenAIConfig> {
pub fn create_openai_config(settings: &AppConfig) -> Result<OpenAIConfig> {
let api_key = settings
.openai_api_key
.as_ref()
Expand Down Expand Up @@ -205,7 +205,7 @@ pub async fn call_with_config(request: Request, config: OpenAIConfig) -> Result<
let client = Client::with_config(config.clone());
let model = request.model.to_string();

match generate_commit_message_multi_step(&client, &model, &request.prompt, config::APP.max_commit_length).await {
match generate_commit_message_multi_step(&client, &model, &request.prompt, config::APP_CONFIG.max_commit_length).await {
Ok(message) => return Ok(Response { response: message }),
Err(e) => {
// Check if it's an API key error and propagate it
Expand All @@ -218,7 +218,7 @@ pub async fn call_with_config(request: Request, config: OpenAIConfig) -> Result<

// Original single-step implementation as fallback
// Create client with timeout if specified
let client = if let Some(timeout) = config::APP.timeout {
let client = if let Some(timeout) = config::APP_CONFIG.timeout {
let http_client = reqwest::ClientBuilder::new()
.timeout(Duration::from_secs(timeout as u64))
.build()?;
Expand All @@ -236,7 +236,7 @@ pub async fn call_with_config(request: Request, config: OpenAIConfig) -> Result<
let truncated_prompt = truncate_to_fit(&request.prompt, available_tokens, &request.model)?;

// Create the commit function tool
let commit_tool = function_calling::create_commit_function_tool(config::APP.max_commit_length)?;
let commit_tool = function_calling::create_commit_function_tool(config::APP_CONFIG.max_commit_length)?;

let chat_request = CreateChatCompletionRequestArgs::default()
.max_tokens(request.max_tokens)
Expand Down Expand Up @@ -382,7 +382,7 @@ pub async fn call(request: Request) -> Result<Response> {
profile!("OpenAI API call");

// Create OpenAI configuration using our settings
let config = create_openai_config(&config::APP)?;
let config = create_openai_config(&config::APP_CONFIG)?;

// Use the call_with_config function with the default config
call_with_config(request, config).await
Expand Down
Loading