Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 4 additions & 1 deletion CONTRIBUTING.md
Original file line number Diff line number Diff line change
Expand Up @@ -81,6 +81,7 @@ Improving documentation is always appreciated:
- Git
- API key for either:
- Google Gemini (`GEMINI_API_KEY`)
- Groq (`GROQ_API_KEY`)
- Grok (`GROK_API_KEY`)
- Claude (`CLAUDE_API_KEY`)

Expand All @@ -89,9 +90,11 @@ Improving documentation is always appreciated:
1. Set up your environment variables:

```bash
export COMMIT_LLM=gemini # or "grok"
export COMMIT_LLM=gemini # or "groq" / "grok"
export GEMINI_API_KEY=your-api-key-here
# OR
export GROQ_API_KEY=your-api-key-here
# OR
export GROK_API_KEY=your-api-key-here
```

Expand Down
24 changes: 16 additions & 8 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@ Looking to contribute? Check out:
## Features

✨ **AI-Powered Commit Messages** - Automatically generate meaningful commit messages
🔄 **Multiple LLM Support** - Choose between Google Gemini, Grok, Claude or ChatGPT
🔄 **Multiple LLM Support** - Choose between Google Gemini, Groq, Grok, Claude or ChatGPT
📝 **Context-Aware** - Analyzes staged and unstaged changes
📋 **Auto-Copy to Clipboard** - Generated messages are automatically copied for instant use
📊 **File Statistics Display** - Visual preview of changed files and line counts
Expand All @@ -41,13 +41,14 @@ You can use **Google Gemini**, **Grok**, **Claude**, or **ChatGPT** as the LLM t

### Environment Variables

| Variable | Values | Description |
| :--- | :--- | :--- |
| `COMMIT_LLM` | `gemini`, `grok`, `claude`, or `chatgpt` | Choose your LLM provider |
| `GEMINI_API_KEY` | Your API key | Required if using Gemini |
| `GROK_API_KEY` | Your API key | Required if using Grok |
| `CLAUDE_API_KEY` | Your API key | Required if using Claude |
| `OPENAI_API_KEY` | Your API key | Required if using ChatGPT |
| Variable | Values | Description |
| :--------------- | :----------------------------------------------- | :------------------------ |
| `COMMIT_LLM` | `gemini`, `groq`, `grok`, `claude`, or `chatgpt` | Choose your LLM provider |
| `GEMINI_API_KEY` | Your API key | Required if using Gemini |
| `GROQ_API_KEY` | Your API key | Required if using Groq |
| `GROK_API_KEY` | Your API key | Required if using Grok |
| `CLAUDE_API_KEY` | Your API key | Required if using Claude |
| `OPENAI_API_KEY` | Your API key | Required if using ChatGPT |

---

Expand Down Expand Up @@ -173,6 +174,13 @@ commit .
2. Generate an API key
3. Set the `GROK_API_KEY` environment variable

**Groq:**

1. Sign up at [Groq Cloud](https://console.groq.com/)
2. Create an API key
3. Set the `GROQ_API_KEY` environment variable
4. _(Optional)_ Set `GROQ_MODEL` or `GROQ_API_URL` to override defaults

**Claude (Anthropic):**

1. Visit the [Anthropic Console](https://console.anthropic.com/)
Expand Down
8 changes: 8 additions & 0 deletions cmd/commit-msg/main.go
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@ import (
"github.com/dfanso/commit-msg/internal/gemini"
"github.com/dfanso/commit-msg/internal/git"
"github.com/dfanso/commit-msg/internal/grok"
"github.com/dfanso/commit-msg/internal/groq"
"github.com/dfanso/commit-msg/internal/ollama"
"github.com/dfanso/commit-msg/internal/stats"
"github.com/dfanso/commit-msg/pkg/types"
Expand Down Expand Up @@ -40,6 +41,11 @@ func main() {
if apiKey == "" {
log.Fatalf("GROK_API_KEY is not set")
}
case "groq":
apiKey = os.Getenv("GROQ_API_KEY")
if apiKey == "" {
log.Fatalf("GROQ_API_KEY is not set")
}
case "chatgpt":
apiKey = os.Getenv("OPENAI_API_KEY")
if apiKey == "" {
Expand Down Expand Up @@ -139,6 +145,8 @@ func main() {
model = "llama3:latest"
}
commitMsg, err = ollama.GenerateCommitMessage(config, changes, url, model)
case "groq":
commitMsg, err = groq.GenerateCommitMessage(config, changes, apiKey)
default:
commitMsg, err = grok.GenerateCommitMessage(config, changes, apiKey)
}
Expand Down
111 changes: 111 additions & 0 deletions internal/groq/groq.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,111 @@
package groq

import (
"bytes"
"encoding/json"
"fmt"
"io"
"net/http"
"os"
"time"

"github.com/dfanso/commit-msg/pkg/types"
)

type chatMessage struct {
Role string `json:"role"`
Content string `json:"content"`
}

type chatRequest struct {
Model string `json:"model"`
Messages []chatMessage `json:"messages"`
Temperature float64 `json:"temperature"`
MaxTokens int `json:"max_tokens"`
}

type chatChoice struct {
Message chatMessage `json:"message"`
}

type chatResponse struct {
Choices []chatChoice `json:"choices"`
}

// defaultModel uses Groq's recommended general-purpose model as of Oct 2025.
// If Groq updates their defaults again, override via GROQ_MODEL.
const defaultModel = "llama-3.3-70b-versatile"

var (
// allow overrides in tests
baseURL = "https://api.groq.com/openai/v1/chat/completions"
httpClient = &http.Client{Timeout: 30 * time.Second}
)

// GenerateCommitMessage calls Groq's OpenAI-compatible chat completions API.
func GenerateCommitMessage(_ *types.Config, changes string, apiKey string) (string, error) {
if changes == "" {
return "", fmt.Errorf("no changes provided for commit message generation")
}

prompt := fmt.Sprintf("%s\n\n%s", types.CommitPrompt, changes)

model := os.Getenv("GROQ_MODEL")
if model == "" {
model = defaultModel
}

payload := chatRequest{
Model: model,
Temperature: 0.2,
MaxTokens: 200,
Messages: []chatMessage{
{Role: "system", Content: "You are an assistant that writes clear, concise git commit messages."},
{Role: "user", Content: prompt},
},
}

body, err := json.Marshal(payload)
if err != nil {
return "", fmt.Errorf("failed to marshal Groq request: %w", err)
}

endpoint := baseURL
if customEndpoint := os.Getenv("GROQ_API_URL"); customEndpoint != "" {
endpoint = customEndpoint
}

req, err := http.NewRequest(http.MethodPost, endpoint, bytes.NewBuffer(body))
if err != nil {
return "", fmt.Errorf("failed to create Groq request: %w", err)
}

req.Header.Set("Content-Type", "application/json")
req.Header.Set("Authorization", fmt.Sprintf("Bearer %s", apiKey))

resp, err := httpClient.Do(req)
if err != nil {
return "", fmt.Errorf("failed to call Groq API: %w", err)
}
defer resp.Body.Close()

responseBody, err := io.ReadAll(resp.Body)
if err != nil {
return "", fmt.Errorf("failed to read Groq response: %w", err)
}

if resp.StatusCode != http.StatusOK {
return "", fmt.Errorf("groq API returned status %d: %s", resp.StatusCode, string(responseBody))
}

var completion chatResponse
if err := json.Unmarshal(responseBody, &completion); err != nil {
return "", fmt.Errorf("failed to decode Groq response: %w", err)
}

if len(completion.Choices) == 0 || completion.Choices[0].Message.Content == "" {
return "", fmt.Errorf("groq API returned empty response")
}

return completion.Choices[0].Message.Content, nil
}
106 changes: 106 additions & 0 deletions internal/groq/groq_test.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,106 @@
package groq

import (
"encoding/json"
"net/http"
"net/http/httptest"
"testing"

"github.com/dfanso/commit-msg/pkg/types"
)

type capturedRequest struct {
Model string `json:"model"`
Messages []chatMessage `json:"messages"`
Temperature float64 `json:"temperature"`
MaxTokens int `json:"max_tokens"`
}

func withTestServer(t *testing.T, handler http.HandlerFunc, fn func()) {
t.Helper()

t.Setenv("GROQ_API_URL", "")
t.Setenv("GROQ_MODEL", "")

srv := httptest.NewServer(handler)
t.Cleanup(srv.Close)

prevURL := baseURL
prevClient := httpClient

baseURL = srv.URL
httpClient = srv.Client()

t.Cleanup(func() {
baseURL = prevURL
httpClient = prevClient
})

fn()
}

func TestGenerateCommitMessageSuccess(t *testing.T) {
withTestServer(t, func(w http.ResponseWriter, r *http.Request) {
if r.Method != http.MethodPost {
t.Fatalf("unexpected method: %s", r.Method)
}

if got := r.Header.Get("Authorization"); got != "Bearer test-key" {
t.Fatalf("unexpected authorization header: %s", got)
}

var payload capturedRequest
if err := json.NewDecoder(r.Body).Decode(&payload); err != nil {
t.Fatalf("failed to decode request: %v", err)
}

if payload.Model != "llama-3.3-70b-versatile" {
t.Fatalf("unexpected model: %s", payload.Model)
}

if len(payload.Messages) != 2 {
t.Fatalf("expected 2 messages, got %d", len(payload.Messages))
}

resp := chatResponse{
Choices: []chatChoice{
{Message: chatMessage{Role: "assistant", Content: "Feat: add groq provider"}},
},
}

w.Header().Set("Content-Type", "application/json")
if err := json.NewEncoder(w).Encode(resp); err != nil {
t.Fatalf("failed to write response: %v", err)
}
}, func() {
msg, err := GenerateCommitMessage(&types.Config{}, "diff", "test-key")
if err != nil {
t.Fatalf("GenerateCommitMessage returned error: %v", err)
}

expected := "Feat: add groq provider"
if msg != expected {
t.Fatalf("expected %q, got %q", expected, msg)
}
})
}

func TestGenerateCommitMessageNonOK(t *testing.T) {
withTestServer(t, func(w http.ResponseWriter, r *http.Request) {
http.Error(w, `{"error":"bad things"}`, http.StatusBadGateway)
}, func() {
_, err := GenerateCommitMessage(&types.Config{}, "changes", "key")
if err == nil {
t.Fatal("expected error but got nil")
}
})
}

func TestGenerateCommitMessageEmptyChanges(t *testing.T) {
t.Setenv("GROQ_MODEL", "")
t.Setenv("GROQ_API_URL", "")

if _, err := GenerateCommitMessage(&types.Config{}, "", "key"); err == nil {
t.Fatal("expected error for empty changes")
}
}
Loading