Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 5 additions & 1 deletion internal/chatgpt/chatgpt.go
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,10 @@ import (
"github.com/dfanso/commit-msg/pkg/types"
)

const (
chatgptModel = openai.ChatModelGPT4o
)

// GenerateCommitMessage calls OpenAI's chat completions API to turn the provided
// repository changes into a polished git commit message.
func GenerateCommitMessage(config *types.Config, changes string, apiKey string, opts *types.GenerationOptions) (string, error) {
Expand All @@ -22,7 +26,7 @@ func GenerateCommitMessage(config *types.Config, changes string, apiKey string,
Messages: []openai.ChatCompletionMessageParamUnion{
openai.UserMessage(prompt),
},
Model: openai.ChatModelGPT4o,
Model: chatgptModel,
})
if err != nil {
return "", fmt.Errorf("OpenAI error: %w", err)
Expand Down
22 changes: 16 additions & 6 deletions internal/claude/claude.go
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,16 @@ import (
"github.com/dfanso/commit-msg/pkg/types"
)

const (
claudeModel = "claude-3-5-sonnet-20241022"
claudeMaxTokens = 200
claudeAPIEndpoint = "https://api.anthropic.com/v1/messages"
claudeAPIVersion = "2023-06-01"
contentTypeJSON = "application/json"
anthropicVersionHeader = "anthropic-version"
xAPIKeyHeader = "x-api-key"
)

// ClaudeRequest describes the payload sent to Anthropic's Claude messages API.
type ClaudeRequest struct {
Model string `json:"model"`
Expand All @@ -34,8 +44,8 @@ func GenerateCommitMessage(config *types.Config, changes string, apiKey string,
prompt := types.BuildCommitPrompt(changes, opts)

reqBody := ClaudeRequest{
Model: "claude-3-5-sonnet-20241022",
MaxTokens: 200,
Model: claudeModel,
MaxTokens: claudeMaxTokens,
Messages: []types.Message{
{
Role: "user",
Expand All @@ -50,14 +60,14 @@ func GenerateCommitMessage(config *types.Config, changes string, apiKey string,
}

ctx := context.Background()
req, err := http.NewRequestWithContext(ctx, "POST", "https://api.anthropic.com/v1/messages", bytes.NewBuffer(jsonData))
req, err := http.NewRequestWithContext(ctx, "POST", claudeAPIEndpoint, bytes.NewBuffer(jsonData))
if err != nil {
return "", err
}

req.Header.Set("Content-Type", "application/json")
req.Header.Set("x-api-key", apiKey)
req.Header.Set("anthropic-version", "2023-06-01")
req.Header.Set("Content-Type", contentTypeJSON)
req.Header.Set(xAPIKeyHeader, apiKey)
req.Header.Set(anthropicVersionHeader, claudeAPIVersion)

client := httpClient.GetClient()
resp, err := client.Do(req)
Expand Down
9 changes: 7 additions & 2 deletions internal/gemini/gemini.go
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,11 @@ import (
"github.com/dfanso/commit-msg/pkg/types"
)

const (
geminiModel = "gemini-2.0-flash"
geminiTemperature = 0.2
)

// GenerateCommitMessage asks Google Gemini to author a commit message for the
// supplied repository changes and optional style instructions.
func GenerateCommitMessage(config *types.Config, changes string, apiKey string, opts *types.GenerationOptions) (string, error) {
Expand All @@ -25,8 +30,8 @@ func GenerateCommitMessage(config *types.Config, changes string, apiKey string,
defer client.Close()

// Create a GenerativeModel with appropriate settings
model := client.GenerativeModel("gemini-2.0-flash")
model.SetTemperature(0.2) // Lower temperature for more focused responses
model := client.GenerativeModel(geminiModel)
model.SetTemperature(geminiTemperature) // Lower temperature for more focused responses

// Generate content using the prompt
resp, err := model.GenerateContent(ctx, genai.Text(prompt))
Expand Down
18 changes: 13 additions & 5 deletions internal/grok/grok.go
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,14 @@ import (
"github.com/dfanso/commit-msg/pkg/types"
)

const (
grokModel = "grok-3-mini-fast-beta"
grokTemperature = 0
grokAPIEndpoint = "https://api.x.ai/v1/chat/completions"
grokContentType = "application/json"
authorizationPrefix = "Bearer "
)

// GenerateCommitMessage calls X.AI's Grok API to create a commit message from
// the provided Git diff and generation options.
func GenerateCommitMessage(config *types.Config, changes string, apiKey string, opts *types.GenerationOptions) (string, error) {
Expand All @@ -25,9 +33,9 @@ func GenerateCommitMessage(config *types.Config, changes string, apiKey string,
Content: prompt,
},
},
Model: "grok-3-mini-fast-beta",
Model: grokModel,
Stream: false,
Temperature: 0,
Temperature: grokTemperature,
}

requestBody, err := json.Marshal(request)
Expand All @@ -36,14 +44,14 @@ func GenerateCommitMessage(config *types.Config, changes string, apiKey string,
}

// Create HTTP request
req, err := http.NewRequest("POST", "https://api.x.ai/v1/chat/completions", bytes.NewBuffer(requestBody))
req, err := http.NewRequest("POST", grokAPIEndpoint, bytes.NewBuffer(requestBody))
if err != nil {
return "", err
}

// Set headers
req.Header.Set("Content-Type", "application/json")
req.Header.Set("Authorization", fmt.Sprintf("Bearer %s", apiKey))
req.Header.Set("Content-Type", grokContentType)
req.Header.Set("Authorization", fmt.Sprintf("%s%s", authorizationPrefix, apiKey))

client := httpClient.GetClient()
resp, err := client.Do(req)
Expand Down
18 changes: 13 additions & 5 deletions internal/groq/groq.go
Original file line number Diff line number Diff line change
Expand Up @@ -36,6 +36,14 @@ type chatResponse struct {
// If Groq updates their defaults again, override via GROQ_MODEL.
const defaultModel = "llama-3.3-70b-versatile"

const (
groqTemperature = 0.2
groqMaxTokens = 200
groqSystemMessage = "You are an assistant that writes clear, concise git commit messages."
groqContentType = "application/json"
groqAuthorizationPrefix = "Bearer "
)

var (
// allow overrides in tests
baseURL = "https://api.groq.com/openai/v1/chat/completions"
Expand All @@ -62,10 +70,10 @@ func GenerateCommitMessage(_ *types.Config, changes string, apiKey string, opts

payload := chatRequest{
Model: model,
Temperature: 0.2,
MaxTokens: 200,
Temperature: groqTemperature,
MaxTokens: groqMaxTokens,
Messages: []chatMessage{
{Role: "system", Content: "You are an assistant that writes clear, concise git commit messages."},
{Role: "system", Content: groqSystemMessage},
{Role: "user", Content: prompt},
},
}
Expand All @@ -85,8 +93,8 @@ func GenerateCommitMessage(_ *types.Config, changes string, apiKey string, opts
return "", fmt.Errorf("failed to create Groq request: %w", err)
}

req.Header.Set("Content-Type", "application/json")
req.Header.Set("Authorization", fmt.Sprintf("Bearer %s", apiKey))
req.Header.Set("Content-Type", groqContentType)
req.Header.Set("Authorization", fmt.Sprintf("%s%s", groqAuthorizationPrefix, apiKey))

resp, err := httpClient.Do(req)
if err != nil {
Expand Down
12 changes: 9 additions & 3 deletions internal/ollama/ollama.go
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,12 @@ import (
"github.com/dfanso/commit-msg/pkg/types"
)

const (
ollamaDefaultModel = "llama3:latest"
ollamaStream = false
ollamaContentType = "application/json"
)

// OllamaRequest captures the prompt payload sent to an Ollama HTTP endpoint.
type OllamaRequest struct {
Model string `json:"model"`
Expand All @@ -28,7 +34,7 @@ type OllamaResponse struct {
func GenerateCommitMessage(_ *types.Config, changes string, url string, model string, opts *types.GenerationOptions) (string, error) {
// Use llama3:latest as the default model
if model == "" {
model = "llama3:latest"
model = ollamaDefaultModel
}

// Preparing the prompt
Expand All @@ -38,7 +44,7 @@ func GenerateCommitMessage(_ *types.Config, changes string, url string, model st
reqBody := map[string]interface{}{
"model": model,
"prompt": prompt,
"stream": false,
"stream": ollamaStream,
}

// Generating the body
Expand All @@ -51,7 +57,7 @@ func GenerateCommitMessage(_ *types.Config, changes string, url string, model st
if err != nil {
return "", fmt.Errorf("failed to create request: %v", err)
}
req.Header.Set("Content-Type", "application/json")
req.Header.Set("Content-Type", ollamaContentType)

resp, err := httpClient.GetOllamaClient().Do(req)
if err != nil {
Expand Down
Loading