From 7d011002b6dd1575006d20e2ba13781f76e92bdf Mon Sep 17 00:00:00 2001 From: adeeshperera Date: Wed, 22 Oct 2025 12:22:57 +0530 Subject: [PATCH 1/3] fix #121 : add systematic logging for store operations - Add informational logging for successful Save, ChangeDefault, DeleteModel, and UpdateAPIKey operations - Provide user feedback on state changes with fmt.Printf() following existing patterns - Improve visibility into store operation completion for better debugging experience --- cmd/cli/store/store.go | 38 +++++++++++++++++++++++++++++++++----- 1 file changed, 33 insertions(+), 5 deletions(-) diff --git a/cmd/cli/store/store.go b/cmd/cli/store/store.go index 143d3bf..1468140 100644 --- a/cmd/cli/store/store.go +++ b/cmd/cli/store/store.go @@ -110,7 +110,13 @@ func (s *StoreMethods) Save(LLMConfig LLMProvider) error { return err } - return os.WriteFile(configPath, data, 0600) + err = os.WriteFile(configPath, data, 0600) + if err != nil { + return err + } + + fmt.Printf("LLM provider %s saved successfully\n", LLMConfig.LLM.String()) + return nil } // DefaultLLMKey returns the currently selected default LLM provider, if any. @@ -240,7 +246,13 @@ func ChangeDefault(Model types.LLMProvider) error { return err } - return os.WriteFile(configPath, data, 0600) + err = os.WriteFile(configPath, data, 0600) + if err != nil { + return err + } + + fmt.Printf("%s set as default\n", Model.String()) + return nil } // DeleteModel removes the specified provider from the saved configuration. @@ -280,7 +292,12 @@ func (s *StoreMethods) DeleteModel(Model types.LLMProvider) error { if err != nil { return err } - return os.WriteFile(configPath, []byte("{}"), 0600) + err = os.WriteFile(configPath, []byte("{}"), 0600) + if err != nil { + return err + } + fmt.Printf("%s model deleted\n", Model.String()) + return nil } } else { @@ -301,7 +318,12 @@ func (s *StoreMethods) DeleteModel(Model types.LLMProvider) error { if err != nil { return err } - return os.WriteFile(configPath, data, 0600) + err = os.WriteFile(configPath, data, 0600) + if err != nil { + return err + } + fmt.Printf("%s model deleted\n", Model.String()) + return nil } } @@ -357,6 +379,12 @@ func (s *StoreMethods) UpdateAPIKey(Model types.LLMProvider, APIKey string) erro return err } - return os.WriteFile(configPath, data, 0600) + err = os.WriteFile(configPath, data, 0600) + if err != nil { + return err + } + + fmt.Printf("API key for %s updated successfully\n", Model.String()) + return nil } From c02213ccd6e20fb2de908f5d330df33eb6f1182c Mon Sep 17 00:00:00 2001 From: adeeshperera Date: Wed, 22 Oct 2025 12:41:19 +0530 Subject: [PATCH 2/3] fix #120 : Replace magic numbers with constants in LLM providers - Defined constants for model names, token limits, temperature settings, API endpoints, and headers in the following modules: - Claude - Groq - Gemini - ChatGPT - Grok - Ollama - Improved code maintainability and consistency by eliminating hardcoded values. --- internal/chatgpt/chatgpt.go | 6 +++++- internal/claude/claude.go | 22 ++++++++++++++++------ internal/gemini/gemini.go | 9 +++++++-- internal/grok/grok.go | 18 +++++++++++++----- internal/groq/groq.go | 18 +++++++++++++----- internal/ollama/ollama.go | 12 +++++++++--- 6 files changed, 63 insertions(+), 22 deletions(-) diff --git a/internal/chatgpt/chatgpt.go b/internal/chatgpt/chatgpt.go index 714c046..445400f 100644 --- a/internal/chatgpt/chatgpt.go +++ b/internal/chatgpt/chatgpt.go @@ -10,6 +10,10 @@ import ( "github.com/dfanso/commit-msg/pkg/types" ) +const ( + chatgptModel = openai.ChatModelGPT4o +) + // GenerateCommitMessage calls OpenAI's chat completions API to turn the provided // repository changes into a polished git commit message. func GenerateCommitMessage(config *types.Config, changes string, apiKey string, opts *types.GenerationOptions) (string, error) { @@ -22,7 +26,7 @@ func GenerateCommitMessage(config *types.Config, changes string, apiKey string, Messages: []openai.ChatCompletionMessageParamUnion{ openai.UserMessage(prompt), }, - Model: openai.ChatModelGPT4o, + Model: chatgptModel, }) if err != nil { return "", fmt.Errorf("OpenAI error: %w", err) diff --git a/internal/claude/claude.go b/internal/claude/claude.go index c4aefbc..c93a00a 100644 --- a/internal/claude/claude.go +++ b/internal/claude/claude.go @@ -11,6 +11,16 @@ import ( "github.com/dfanso/commit-msg/pkg/types" ) +const ( + claudeModel = "claude-3-5-sonnet-20241022" + claudeMaxTokens = 200 + claudeAPIEndpoint = "https://api.anthropic.com/v1/messages" + claudeAPIVersion = "2023-06-01" + contentTypeJSON = "application/json" + anthropicVersionHeader = "anthropic-version" + xAPIKeyHeader = "x-api-key" +) + // ClaudeRequest describes the payload sent to Anthropic's Claude messages API. type ClaudeRequest struct { Model string `json:"model"` @@ -34,8 +44,8 @@ func GenerateCommitMessage(config *types.Config, changes string, apiKey string, prompt := types.BuildCommitPrompt(changes, opts) reqBody := ClaudeRequest{ - Model: "claude-3-5-sonnet-20241022", - MaxTokens: 200, + Model: claudeModel, + MaxTokens: claudeMaxTokens, Messages: []types.Message{ { Role: "user", @@ -50,14 +60,14 @@ func GenerateCommitMessage(config *types.Config, changes string, apiKey string, } ctx := context.Background() - req, err := http.NewRequestWithContext(ctx, "POST", "https://api.anthropic.com/v1/messages", bytes.NewBuffer(jsonData)) + req, err := http.NewRequestWithContext(ctx, "POST", claudeAPIEndpoint, bytes.NewBuffer(jsonData)) if err != nil { return "", err } - req.Header.Set("Content-Type", "application/json") - req.Header.Set("x-api-key", apiKey) - req.Header.Set("anthropic-version", "2023-06-01") + req.Header.Set("Content-Type", contentTypeJSON) + req.Header.Set(xAPIKeyHeader, apiKey) + req.Header.Set(anthropicVersionHeader, claudeAPIVersion) client := httpClient.GetClient() resp, err := client.Do(req) diff --git a/internal/gemini/gemini.go b/internal/gemini/gemini.go index 0606d49..3d8146d 100644 --- a/internal/gemini/gemini.go +++ b/internal/gemini/gemini.go @@ -10,6 +10,11 @@ import ( "github.com/dfanso/commit-msg/pkg/types" ) +const ( + geminiModel = "gemini-2.0-flash" + geminiTemperature = 0.2 +) + // GenerateCommitMessage asks Google Gemini to author a commit message for the // supplied repository changes and optional style instructions. func GenerateCommitMessage(config *types.Config, changes string, apiKey string, opts *types.GenerationOptions) (string, error) { @@ -25,8 +30,8 @@ func GenerateCommitMessage(config *types.Config, changes string, apiKey string, defer client.Close() // Create a GenerativeModel with appropriate settings - model := client.GenerativeModel("gemini-2.0-flash") - model.SetTemperature(0.2) // Lower temperature for more focused responses + model := client.GenerativeModel(geminiModel) + model.SetTemperature(geminiTemperature) // Lower temperature for more focused responses // Generate content using the prompt resp, err := model.GenerateContent(ctx, genai.Text(prompt)) diff --git a/internal/grok/grok.go b/internal/grok/grok.go index 7a42960..d86e784 100644 --- a/internal/grok/grok.go +++ b/internal/grok/grok.go @@ -11,6 +11,14 @@ import ( "github.com/dfanso/commit-msg/pkg/types" ) +const ( + grokModel = "grok-3-mini-fast-beta" + grokTemperature = 0 + grokAPIEndpoint = "https://api.x.ai/v1/chat/completions" + grokContentType = "application/json" + authorizationPrefix = "Bearer " +) + // GenerateCommitMessage calls X.AI's Grok API to create a commit message from // the provided Git diff and generation options. func GenerateCommitMessage(config *types.Config, changes string, apiKey string, opts *types.GenerationOptions) (string, error) { @@ -25,9 +33,9 @@ func GenerateCommitMessage(config *types.Config, changes string, apiKey string, Content: prompt, }, }, - Model: "grok-3-mini-fast-beta", + Model: grokModel, Stream: false, - Temperature: 0, + Temperature: grokTemperature, } requestBody, err := json.Marshal(request) @@ -36,14 +44,14 @@ func GenerateCommitMessage(config *types.Config, changes string, apiKey string, } // Create HTTP request - req, err := http.NewRequest("POST", "https://api.x.ai/v1/chat/completions", bytes.NewBuffer(requestBody)) + req, err := http.NewRequest("POST", grokAPIEndpoint, bytes.NewBuffer(requestBody)) if err != nil { return "", err } // Set headers - req.Header.Set("Content-Type", "application/json") - req.Header.Set("Authorization", fmt.Sprintf("Bearer %s", apiKey)) + req.Header.Set("Content-Type", grokContentType) + req.Header.Set("Authorization", fmt.Sprintf("%s%s", authorizationPrefix, apiKey)) client := httpClient.GetClient() resp, err := client.Do(req) diff --git a/internal/groq/groq.go b/internal/groq/groq.go index 013dd0e..4630fb8 100644 --- a/internal/groq/groq.go +++ b/internal/groq/groq.go @@ -36,6 +36,14 @@ type chatResponse struct { // If Groq updates their defaults again, override via GROQ_MODEL. const defaultModel = "llama-3.3-70b-versatile" +const ( + groqTemperature = 0.2 + groqMaxTokens = 200 + groqSystemMessage = "You are an assistant that writes clear, concise git commit messages." + groqContentType = "application/json" + groqAuthorizationPrefix = "Bearer " +) + var ( // allow overrides in tests baseURL = "https://api.groq.com/openai/v1/chat/completions" @@ -62,10 +70,10 @@ func GenerateCommitMessage(_ *types.Config, changes string, apiKey string, opts payload := chatRequest{ Model: model, - Temperature: 0.2, - MaxTokens: 200, + Temperature: groqTemperature, + MaxTokens: groqMaxTokens, Messages: []chatMessage{ - {Role: "system", Content: "You are an assistant that writes clear, concise git commit messages."}, + {Role: "system", Content: groqSystemMessage}, {Role: "user", Content: prompt}, }, } @@ -85,8 +93,8 @@ func GenerateCommitMessage(_ *types.Config, changes string, apiKey string, opts return "", fmt.Errorf("failed to create Groq request: %w", err) } - req.Header.Set("Content-Type", "application/json") - req.Header.Set("Authorization", fmt.Sprintf("Bearer %s", apiKey)) + req.Header.Set("Content-Type", groqContentType) + req.Header.Set("Authorization", fmt.Sprintf("%s%s", groqAuthorizationPrefix, apiKey)) resp, err := httpClient.Do(req) if err != nil { diff --git a/internal/ollama/ollama.go b/internal/ollama/ollama.go index 98f354b..0db08c9 100644 --- a/internal/ollama/ollama.go +++ b/internal/ollama/ollama.go @@ -11,6 +11,12 @@ import ( "github.com/dfanso/commit-msg/pkg/types" ) +const ( + ollamaDefaultModel = "llama3:latest" + ollamaStream = false + ollamaContentType = "application/json" +) + // OllamaRequest captures the prompt payload sent to an Ollama HTTP endpoint. type OllamaRequest struct { Model string `json:"model"` @@ -28,7 +34,7 @@ type OllamaResponse struct { func GenerateCommitMessage(_ *types.Config, changes string, url string, model string, opts *types.GenerationOptions) (string, error) { // Use llama3:latest as the default model if model == "" { - model = "llama3:latest" + model = ollamaDefaultModel } // Preparing the prompt @@ -38,7 +44,7 @@ func GenerateCommitMessage(_ *types.Config, changes string, url string, model st reqBody := map[string]interface{}{ "model": model, "prompt": prompt, - "stream": false, + "stream": ollamaStream, } // Generating the body @@ -51,7 +57,7 @@ func GenerateCommitMessage(_ *types.Config, changes string, url string, model st if err != nil { return "", fmt.Errorf("failed to create request: %v", err) } - req.Header.Set("Content-Type", "application/json") + req.Header.Set("Content-Type", ollamaContentType) resp, err := httpClient.GetOllamaClient().Do(req) if err != nil { From 0bf7df813fe2a35683015fbbd288fd97b6e248df Mon Sep 17 00:00:00 2001 From: adeeshperera Date: Wed, 22 Oct 2025 12:57:52 +0530 Subject: [PATCH 3/3] Revert "fix #121 : add systematic logging for store operations" This reverts commit 7d011002b6dd1575006d20e2ba13781f76e92bdf. --- cmd/cli/store/store.go | 38 +++++--------------------------------- 1 file changed, 5 insertions(+), 33 deletions(-) diff --git a/cmd/cli/store/store.go b/cmd/cli/store/store.go index 1468140..143d3bf 100644 --- a/cmd/cli/store/store.go +++ b/cmd/cli/store/store.go @@ -110,13 +110,7 @@ func (s *StoreMethods) Save(LLMConfig LLMProvider) error { return err } - err = os.WriteFile(configPath, data, 0600) - if err != nil { - return err - } - - fmt.Printf("LLM provider %s saved successfully\n", LLMConfig.LLM.String()) - return nil + return os.WriteFile(configPath, data, 0600) } // DefaultLLMKey returns the currently selected default LLM provider, if any. @@ -246,13 +240,7 @@ func ChangeDefault(Model types.LLMProvider) error { return err } - err = os.WriteFile(configPath, data, 0600) - if err != nil { - return err - } - - fmt.Printf("%s set as default\n", Model.String()) - return nil + return os.WriteFile(configPath, data, 0600) } // DeleteModel removes the specified provider from the saved configuration. @@ -292,12 +280,7 @@ func (s *StoreMethods) DeleteModel(Model types.LLMProvider) error { if err != nil { return err } - err = os.WriteFile(configPath, []byte("{}"), 0600) - if err != nil { - return err - } - fmt.Printf("%s model deleted\n", Model.String()) - return nil + return os.WriteFile(configPath, []byte("{}"), 0600) } } else { @@ -318,12 +301,7 @@ func (s *StoreMethods) DeleteModel(Model types.LLMProvider) error { if err != nil { return err } - err = os.WriteFile(configPath, data, 0600) - if err != nil { - return err - } - fmt.Printf("%s model deleted\n", Model.String()) - return nil + return os.WriteFile(configPath, data, 0600) } } @@ -379,12 +357,6 @@ func (s *StoreMethods) UpdateAPIKey(Model types.LLMProvider, APIKey string) erro return err } - err = os.WriteFile(configPath, data, 0600) - if err != nil { - return err - } - - fmt.Printf("API key for %s updated successfully\n", Model.String()) - return nil + return os.WriteFile(configPath, data, 0600) }