diff --git a/README.md b/README.md index 6c27156..75d86ab 100644 --- a/README.md +++ b/README.md @@ -5,7 +5,7 @@ AI-powered Git commit message generator that analyzes your staged changes and ou ## Features - Generates 10 commit message suggestions from your staged diff -- Providers: GitHub Copilot (default), OpenAI, OpenRouter +- Providers: GitHub Copilot (default), OpenAI - Interactive config to pick provider/model and set keys - Simple output suitable for piping into TUI menus (one message per line) @@ -68,25 +68,49 @@ providers: copilot: api_key: "$GITHUB_TOKEN" # Uses GitHub token; token is exchanged internally model: "gpt-4o" # or "openai/gpt-4o"; both accepted + # endpoint_url: "https://api.githubcopilot.com" # Optional - uses default if not specified openai: api_key: "$OPENAI_API_KEY" model: "gpt-4o" - openrouter: - api_key: "$OPENROUTER_API_KEY" # or a literal key - model: "openai/gpt-4o" # OpenRouter model IDs, e.g. anthropic/claude-3.5-sonnet + # endpoint_url: "https://api.openai.com/v1" # Optional - uses default if not specified + # Custom provider example (e.g., local Ollama): + # local: + # api_key: "not-needed" + # model: "llama3.1:8b" + # endpoint_url: "http://localhost:11434/v1" ``` -Notes: -- Copilot: requires a GitHub token with models scope. The tool can also discover IDE Copilot tokens, but models scope is recommended. -- Environment variable references are supported by prefixing with `$` (e.g., `$OPENAI_API_KEY`). +### Custom Endpoints -### Configure via CLI +You can configure custom API endpoints for any provider, which is useful for: +- **Local AI models**: Ollama, LM Studio, or other local inference servers +- **Enterprise proxies**: Internal API gateways or proxy servers +- **Alternative providers**: Any OpenAI-compatible API endpoint -```bash -lazycommit config set # interactive provider/model/key picker -lazycommit config get # show current provider/model +The `endpoint_url` field is optional. If not specified, the official endpoint for that provider will be used. + +#### Examples + +**Ollama (local):** +```yaml +active_provider: openai # Use openai provider for Ollama compatibility +providers: + openai: + api_key: "ollama" # Ollama doesn't require real API keys + model: "llama3.1:8b" + endpoint_url: "http://localhost:11434/v1" ``` + + + + + + + + + + ## Integration with TUI Git clients Because `lazycommit commit` prints plain lines, it plugs nicely into menu UIs. @@ -111,22 +135,6 @@ customCommands: labelFormat: "{{ .raw | green }}" ``` -Tips: -- For `lazycommit commit`, you can omit `filter` and just use `valueFormat: "{{ .raw }}"` and `labelFormat: "{{ .raw | green }}"`. -- If you pipe a numbered list tool (e.g., `bunx bunnai`), keep the regex groups `number` and `message` as shown. - -## Providers and models - -- Copilot (default when a GitHub token is available): uses `gpt-4o` unless overridden. Accepts `openai/gpt-4o` and normalizes it to `gpt-4o`. -- OpenAI: choose from models defined in the interactive picker (e.g., gpt‑4o, gpt‑4.1, o3, o1, etc.). -- OpenRouter: pick from OpenRouter-prefixed IDs (e.g., `openai/gpt-4o`, `anthropic/claude-3.5-sonnet`). Extra headers are set automatically. - -## How it works - -- Reads `git diff --cached`. -- Sends a single prompt to the selected provider to generate 10 lines. -- Prints the lines exactly, suitable for piping/selecting. - ## Troubleshooting - "No staged changes to commit." — run `git add` first. diff --git a/cmd/commit.go b/cmd/commit.go index 54b6cbc..2f8a6fc 100644 --- a/cmd/commit.go +++ b/cmd/commit.go @@ -56,14 +56,20 @@ var commitCmd = &cobra.Command{ } } + endpoint, err := config.GetEndpoint() + if err != nil { + fmt.Fprintf(os.Stderr, "Error getting endpoint: %v\n", err) + os.Exit(1) + } + switch providerName { case "copilot": - aiProvider = provider.NewCopilotProviderWithModel(apiKey, model) + aiProvider = provider.NewCopilotProviderWithModel(apiKey, model, endpoint) case "openai": - aiProvider = provider.NewOpenAIProvider(apiKey, model) + aiProvider = provider.NewOpenAIProvider(apiKey, model, endpoint) default: // Default to copilot if provider is not set or unknown - aiProvider = provider.NewCopilotProvider(apiKey) + aiProvider = provider.NewCopilotProvider(apiKey, endpoint) } commitMessages, err := aiProvider.GenerateCommitMessages(context.Background(), diff) diff --git a/cmd/config.go b/cmd/config.go index af56f14..58ae468 100644 --- a/cmd/config.go +++ b/cmd/config.go @@ -2,6 +2,7 @@ package cmd import ( "fmt" + "net/url" "os" "github.com/AlecAivazis/survey/v2" @@ -26,8 +27,14 @@ var getCmd = &cobra.Command{ fmt.Println("Error getting model:", err) os.Exit(1) } + endpoint, err := config.GetEndpoint() + if err != nil { + fmt.Println("Error getting endpoint:", err) + os.Exit(1) + } fmt.Printf("Active Provider: %s\n", provider) fmt.Printf("Model: %s\n", model) + fmt.Printf("Endpoint: %s\n", endpoint) }, } @@ -39,13 +46,40 @@ var setCmd = &cobra.Command{ }, } +func validateEndpointURL(val interface{}) error { + endpoint, ok := val.(string) + if !ok { + return fmt.Errorf("endpoint must be a string") + } + + // Empty string is valid (uses default) + if endpoint == "" { + return nil + } + + parsedURL, err := url.Parse(endpoint) + if err != nil { + return fmt.Errorf("invalid URL format: %w", err) + } + + if parsedURL.Scheme != "http" && parsedURL.Scheme != "https" { + return fmt.Errorf("endpoint must use http or https protocol") + } + + if parsedURL.Host == "" { + return fmt.Errorf("endpoint must have a valid host") + } + + return nil +} + func runInteractiveConfig() { currentProvider := config.GetProvider() currentModel, _ := config.GetModel() providerPrompt := &survey.Select{ Message: "Choose a provider:", - Options: []string{"openai", "openrouter", "copilot"}, + Options: []string{"openai", "copilot"}, Default: currentProvider, } var selectedProvider string @@ -87,9 +121,8 @@ func runInteractiveConfig() { // Dynamically generate available models for OpenAI availableModels := map[string][]string{ - "openai": {}, - "openrouter": {}, - "copilot": {"gpt-4o"}, // TODO: update if copilot models are dynamic + "openai": {}, + "copilot": {"gpt-4o"}, // TODO: update if copilot models are dynamic } modelDisplayToID := map[string]string{} @@ -99,12 +132,6 @@ func runInteractiveConfig() { availableModels["openai"] = append(availableModels["openai"], display) modelDisplayToID[display] = string(id) } - } else if selectedProvider == "openrouter" { - for id, m := range models.OpenRouterModels { - display := fmt.Sprintf("%s (%s)", m.Name, string(id)) - availableModels["openrouter"] = append(availableModels["openrouter"], display) - modelDisplayToID[display] = string(id) - } } modelPrompt := &survey.Select{ @@ -115,7 +142,7 @@ func runInteractiveConfig() { // Try to set the default to the current model if possible isValidDefault := false currentDisplay := "" - if selectedProvider == "openai" || selectedProvider == "openrouter" { + if selectedProvider == "openai" { for display, id := range modelDisplayToID { if id == currentModel || display == currentModel { isValidDefault = true @@ -144,7 +171,7 @@ func runInteractiveConfig() { } selectedModel := selectedDisplay - if selectedProvider == "openai" || selectedProvider == "openrouter" { + if selectedProvider == "openai" { selectedModel = modelDisplayToID[selectedDisplay] } @@ -156,6 +183,33 @@ func runInteractiveConfig() { } fmt.Printf("Model set to: %s\n", selectedModel) } + + // Get current endpoint + currentEndpoint, _ := config.GetEndpoint() + + // Endpoint configuration prompt + endpointPrompt := &survey.Input{ + Message: "Enter custom endpoint URL (leave empty for default):", + Default: currentEndpoint, + } + var endpoint string + err = survey.AskOne(endpointPrompt, &endpoint, survey.WithValidator(validateEndpointURL)) + if err != nil { + fmt.Println(err.Error()) + return + } + + // Only set endpoint if it's different from current + if endpoint != currentEndpoint && endpoint != "" { + err := config.SetEndpoint(selectedProvider, endpoint) + if err != nil { + fmt.Printf("Error setting endpoint: %v\n", err) + return + } + fmt.Printf("Endpoint set to: %s\n", endpoint) + } else if endpoint == "" { + fmt.Println("Using default endpoint for provider") + } } func init() { diff --git a/internal/config/config.go b/internal/config/config.go index 28b16b8..d1a6c97 100644 --- a/internal/config/config.go +++ b/internal/config/config.go @@ -3,6 +3,7 @@ package config import ( "encoding/json" "fmt" + "net/url" "os" "path/filepath" "runtime" @@ -12,8 +13,9 @@ import ( ) type ProviderConfig struct { - APIKey string `mapstructure:"api_key"` - Model string `mapstructure:"model"` + APIKey string `mapstructure:"api_key"` + Model string `mapstructure:"model"` + EndpointURL string `mapstructure:"endpoint_url"` } type Config struct { @@ -124,6 +126,28 @@ func GetModel() (string, error) { return providerConfig.Model, nil } +func GetEndpoint() (string, error) { + providerConfig, err := GetActiveProviderConfig() + if err != nil { + return "", err + } + + // If custom endpoint is configured, use it + if providerConfig.EndpointURL != "" { + return providerConfig.EndpointURL, nil + } + + // Return default endpoints based on provider + switch cfg.ActiveProvider { + case "openai": + return "https://api.openai.com/v1", nil + case "copilot": + return "https://api.githubcopilot.com", nil + default: + return "", fmt.Errorf("no default endpoint available for provider '%s'", cfg.ActiveProvider) + } +} + func SetProvider(provider string) error { if cfg == nil { InitConfig() @@ -150,6 +174,41 @@ func SetAPIKey(provider, apiKey string) error { return viper.WriteConfig() } +func validateEndpointURL(endpoint string) error { + if endpoint == "" { + return nil // Empty endpoint is valid (will use default) + } + + parsedURL, err := url.Parse(endpoint) + if err != nil { + return fmt.Errorf("invalid URL format: %w", err) + } + + if parsedURL.Scheme != "http" && parsedURL.Scheme != "https" { + return fmt.Errorf("endpoint must use http or https protocol") + } + + if parsedURL.Host == "" { + return fmt.Errorf("endpoint must have a valid host") + } + + return nil +} + +func SetEndpoint(provider, endpoint string) error { + if cfg == nil { + InitConfig() + } + + // Validate endpoint URL + if err := validateEndpointURL(endpoint); err != nil { + return err + } + + viper.Set(fmt.Sprintf("providers.%s.endpoint_url", provider), endpoint) + return viper.WriteConfig() +} + func LoadGitHubToken() (string, error) { if token := os.Getenv("GITHUB_TOKEN"); token != "" { return token, nil diff --git a/internal/config/config_test.go b/internal/config/config_test.go index 3512f2e..5cc80e9 100644 --- a/internal/config/config_test.go +++ b/internal/config/config_test.go @@ -52,7 +52,7 @@ func TestGetAPIKey_EnvironmentVariableNotSet(t *testing.T) { InitConfig() // Set up test provider with environment variable reference that doesn't exist - testProvider := "openrouter" + testProvider := "openai" cfg.ActiveProvider = testProvider if cfg.Providers == nil { cfg.Providers = make(map[string]ProviderConfig) @@ -68,7 +68,7 @@ func TestGetAPIKey_EnvironmentVariableNotSet(t *testing.T) { t.Fatal("Expected error for missing environment variable, got nil") } - expectedError := "environment variable 'NONEXISTENT_API_KEY' for provider 'openrouter' is not set or empty" + expectedError := "environment variable 'NONEXISTENT_API_KEY' for provider 'openai' is not set or empty" if err.Error() != expectedError { t.Errorf("Expected error message '%s', got '%s'", expectedError, err.Error()) } @@ -83,7 +83,7 @@ func TestGetAPIKey_RegularAPIKey(t *testing.T) { InitConfig() // Set up test provider with regular API key (not environment variable) - testProvider := "openrouter" + testProvider := "openai" testAPIKey := "regular-api-key-123" cfg.ActiveProvider = testProvider if cfg.Providers == nil { @@ -103,4 +103,138 @@ func TestGetAPIKey_RegularAPIKey(t *testing.T) { if resolvedKey != testAPIKey { t.Errorf("Expected API key to be %s, got %s", testAPIKey, resolvedKey) } +} + +func TestGetEndpoint_DefaultEndpoints(t *testing.T) { + // Reset configuration for clean test + cfg = nil + viper.Reset() + + // Test default endpoints for different providers + testCases := []struct { + provider string + expected string + }{ + {"openai", "https://api.openai.com/v1"}, + {"copilot", "https://api.githubcopilot.com"}, + } + + for _, tc := range testCases { + // Initialize config + InitConfig() + + // Set up test provider without custom endpoint + cfg.ActiveProvider = tc.provider + if cfg.Providers == nil { + cfg.Providers = make(map[string]ProviderConfig) + } + cfg.Providers[tc.provider] = ProviderConfig{ + APIKey: "test-key", + Model: "test-model", + // No EndpointURL set - should use default + } + + // Test that default endpoint is returned + endpoint, err := GetEndpoint() + if err != nil { + t.Fatalf("Expected no error for provider %s, got: %v", tc.provider, err) + } + + if endpoint != tc.expected { + t.Errorf("Expected endpoint %s for provider %s, got %s", tc.expected, tc.provider, endpoint) + } + } +} + +func TestGetEndpoint_CustomEndpoint(t *testing.T) { + // Reset configuration for clean test + cfg = nil + viper.Reset() + + // Initialize config + InitConfig() + + // Set up test provider with custom endpoint + testProvider := "openai" + customEndpoint := "https://custom.api.com/v1" + cfg.ActiveProvider = testProvider + if cfg.Providers == nil { + cfg.Providers = make(map[string]ProviderConfig) + } + cfg.Providers[testProvider] = ProviderConfig{ + APIKey: "test-key", + Model: "test-model", + EndpointURL: customEndpoint, + } + + // Test that custom endpoint is returned + endpoint, err := GetEndpoint() + if err != nil { + t.Fatalf("Expected no error, got: %v", err) + } + + if endpoint != customEndpoint { + t.Errorf("Expected custom endpoint %s, got %s", customEndpoint, endpoint) + } +} + +func TestGetEndpoint_UnknownProvider(t *testing.T) { + // Reset configuration for clean test + cfg = nil + viper.Reset() + + // Initialize config + InitConfig() + + // Set up unknown provider without custom endpoint + testProvider := "unknown-provider" + cfg.ActiveProvider = testProvider + if cfg.Providers == nil { + cfg.Providers = make(map[string]ProviderConfig) + } + cfg.Providers[testProvider] = ProviderConfig{ + APIKey: "test-key", + Model: "test-model", + } + + // Test that unknown provider without custom endpoint returns error + _, err := GetEndpoint() + if err == nil { + t.Fatal("Expected error for unknown provider, got nil") + } + + expectedError := "no default endpoint available for provider 'unknown-provider'" + if err.Error() != expectedError { + t.Errorf("Expected error message '%s', got '%s'", expectedError, err.Error()) + } +} + +func TestSetEndpoint_Validation(t *testing.T) { + // Reset configuration for clean test + cfg = nil + viper.Reset() + + // Initialize config + InitConfig() + + testCases := []struct { + endpoint string + valid bool + }{ + {"", true}, // Empty should be valid (default) + {"https://api.openai.com/v1", true}, // Valid HTTPS URL + {"http://localhost:11434", true}, // Valid HTTP URL + {"ftp://invalid.com", false}, // Invalid protocol + {"not-a-url", false}, // Invalid format + {"https://", false}, // Missing host + } + + for _, tc := range testCases { + err := SetEndpoint("test", tc.endpoint) + if tc.valid && err != nil { + t.Errorf("Expected valid endpoint %s to pass, but got error: %v", tc.endpoint, err) + } else if !tc.valid && err == nil { + t.Errorf("Expected invalid endpoint %s to fail, but it passed", tc.endpoint) + } + } } \ No newline at end of file diff --git a/internal/provider/copilot.go b/internal/provider/copilot.go index 6411560..3a309ca 100644 --- a/internal/provider/copilot.go +++ b/internal/provider/copilot.go @@ -16,22 +16,31 @@ import ( type CopilotProvider struct { apiKey string model string + endpoint string httpClient *http.Client } -func NewCopilotProvider(token string) *CopilotProvider { +func NewCopilotProvider(token, endpoint string) *CopilotProvider { + if endpoint == "" { + endpoint = "https://api.githubcopilot.com" + } return &CopilotProvider{ apiKey: token, model: "gpt-4o", + endpoint: endpoint, httpClient: &http.Client{Timeout: 30 * time.Second}, } } -func NewCopilotProviderWithModel(token, model string) *CopilotProvider { +func NewCopilotProviderWithModel(token, model, endpoint string) *CopilotProvider { m := normalizeCopilotModel(model) + if endpoint == "" { + endpoint = "https://api.githubcopilot.com" + } return &CopilotProvider{ apiKey: token, model: m, + endpoint: endpoint, httpClient: &http.Client{Timeout: 30 * time.Second}, } } @@ -119,7 +128,7 @@ func (c *CopilotProvider) GenerateCommitMessages(ctx context.Context, diff strin } client := openai.NewClient( - option.WithBaseURL("https://api.githubcopilot.com"), + option.WithBaseURL(c.endpoint), option.WithAPIKey(bearer), option.WithHeader("Editor-Version", "lazycommit/1.0"), option.WithHeader("Editor-Plugin-Version", "lazycommit/1.0"), diff --git a/internal/provider/models/models.go b/internal/provider/models/models.go index 55a9100..d962a7a 100644 --- a/internal/provider/models/models.go +++ b/internal/provider/models/models.go @@ -201,280 +201,3 @@ var OpenAIModels = map[ModelID]Model{ SupportsAttachments: true, }, } - -// OpenRouter model provider and model IDs -const ( - ProviderOpenRouter ModelProvider = "openrouter" - - OpenRouterGPT41 ModelID = "openrouter.gpt-4.1" - OpenRouterGPT41Mini ModelID = "openrouter.gpt-4.1-mini" - OpenRouterGPT41Nano ModelID = "openrouter.gpt-4.1-nano" - OpenRouterGPT45Preview ModelID = "openrouter.gpt-4.5-preview" - OpenRouterGPT4o ModelID = "openrouter.gpt-4o" - OpenRouterGPT4oMini ModelID = "openrouter.gpt-4o-mini" - OpenRouterO1 ModelID = "openrouter.o1" - OpenRouterO1Pro ModelID = "openrouter.o1-pro" - OpenRouterO1Mini ModelID = "openrouter.o1-mini" - OpenRouterO3 ModelID = "openrouter.o3" - OpenRouterO3Mini ModelID = "openrouter.o3-mini" - OpenRouterO4Mini ModelID = "openrouter.o4-mini" - OpenRouterGemini25Flash ModelID = "openrouter.gemini-2.5-flash" - OpenRouterGemini25 ModelID = "openrouter.gemini-2.5" - OpenRouterClaude35Sonnet ModelID = "openrouter.claude-3.5-sonnet" - OpenRouterClaude3Haiku ModelID = "openrouter.claude-3-haiku" - OpenRouterClaude37Sonnet ModelID = "openrouter.claude-3.7-sonnet" - OpenRouterClaude35Haiku ModelID = "openrouter.claude-3.5-haiku" - OpenRouterClaude3Opus ModelID = "openrouter.claude-3-opus" - OpenRouterDeepSeekR1Free ModelID = "openrouter.deepseek-r1-free" -) - -var OpenRouterModels = map[ModelID]Model{ - OpenRouterGPT41: { - ID: OpenRouterGPT41, - Name: "OpenRouter – GPT 4.1", - Provider: ProviderOpenRouter, - APIModel: "openai/gpt-4.1", - CostPer1MIn: OpenAIModels[GPT41].CostPer1MIn, - CostPer1MInCached: OpenAIModels[GPT41].CostPer1MInCached, - CostPer1MOut: OpenAIModels[GPT41].CostPer1MOut, - CostPer1MOutCached: OpenAIModels[GPT41].CostPer1MOutCached, - ContextWindow: OpenAIModels[GPT41].ContextWindow, - DefaultMaxTokens: OpenAIModels[GPT41].DefaultMaxTokens, - }, - OpenRouterGPT41Mini: { - ID: OpenRouterGPT41Mini, - Name: "OpenRouter – GPT 4.1 mini", - Provider: ProviderOpenRouter, - APIModel: "openai/gpt-4.1-mini", - CostPer1MIn: OpenAIModels[GPT41Mini].CostPer1MIn, - CostPer1MInCached: OpenAIModels[GPT41Mini].CostPer1MInCached, - CostPer1MOut: OpenAIModels[GPT41Mini].CostPer1MOut, - CostPer1MOutCached: OpenAIModels[GPT41Mini].CostPer1MOutCached, - ContextWindow: OpenAIModels[GPT41Mini].ContextWindow, - DefaultMaxTokens: OpenAIModels[GPT41Mini].DefaultMaxTokens, - }, - OpenRouterGPT41Nano: { - ID: OpenRouterGPT41Nano, - Name: "OpenRouter – GPT 4.1 nano", - Provider: ProviderOpenRouter, - APIModel: "openai/gpt-4.1-nano", - CostPer1MIn: OpenAIModels[GPT41Nano].CostPer1MIn, - CostPer1MInCached: OpenAIModels[GPT41Nano].CostPer1MInCached, - CostPer1MOut: OpenAIModels[GPT41Nano].CostPer1MOut, - CostPer1MOutCached: OpenAIModels[GPT41Nano].CostPer1MOutCached, - ContextWindow: OpenAIModels[GPT41Nano].ContextWindow, - DefaultMaxTokens: OpenAIModels[GPT41Nano].DefaultMaxTokens, - }, - OpenRouterGPT45Preview: { - ID: OpenRouterGPT45Preview, - Name: "OpenRouter – GPT 4.5 preview", - Provider: ProviderOpenRouter, - APIModel: "openai/gpt-4.5-preview", - CostPer1MIn: OpenAIModels[GPT45Preview].CostPer1MIn, - CostPer1MInCached: OpenAIModels[GPT45Preview].CostPer1MInCached, - CostPer1MOut: OpenAIModels[GPT45Preview].CostPer1MOut, - CostPer1MOutCached: OpenAIModels[GPT45Preview].CostPer1MOutCached, - ContextWindow: OpenAIModels[GPT45Preview].ContextWindow, - DefaultMaxTokens: OpenAIModels[GPT45Preview].DefaultMaxTokens, - }, - OpenRouterGPT4o: { - ID: OpenRouterGPT4o, - Name: "OpenRouter – GPT 4o", - Provider: ProviderOpenRouter, - APIModel: "openai/gpt-4o", - CostPer1MIn: OpenAIModels[GPT4o].CostPer1MIn, - CostPer1MInCached: OpenAIModels[GPT4o].CostPer1MInCached, - CostPer1MOut: OpenAIModels[GPT4o].CostPer1MOut, - CostPer1MOutCached: OpenAIModels[GPT4o].CostPer1MOutCached, - ContextWindow: OpenAIModels[GPT4o].ContextWindow, - DefaultMaxTokens: OpenAIModels[GPT4o].DefaultMaxTokens, - }, - OpenRouterGPT4oMini: { - ID: OpenRouterGPT4oMini, - Name: "OpenRouter – GPT 4o mini", - Provider: ProviderOpenRouter, - APIModel: "openai/gpt-4o-mini", - CostPer1MIn: OpenAIModels[GPT4oMini].CostPer1MIn, - CostPer1MInCached: OpenAIModels[GPT4oMini].CostPer1MInCached, - CostPer1MOut: OpenAIModels[GPT4oMini].CostPer1MOut, - CostPer1MOutCached: OpenAIModels[GPT4oMini].CostPer1MOutCached, - ContextWindow: OpenAIModels[GPT4oMini].ContextWindow, - }, - OpenRouterO1: { - ID: OpenRouterO1, - Name: "OpenRouter – O1", - Provider: ProviderOpenRouter, - APIModel: "openai/o1", - CostPer1MIn: OpenAIModels[O1].CostPer1MIn, - CostPer1MInCached: OpenAIModels[O1].CostPer1MInCached, - CostPer1MOut: OpenAIModels[O1].CostPer1MOut, - CostPer1MOutCached: OpenAIModels[O1].CostPer1MOutCached, - ContextWindow: OpenAIModels[O1].ContextWindow, - DefaultMaxTokens: OpenAIModels[O1].DefaultMaxTokens, - CanReason: OpenAIModels[O1].CanReason, - }, - OpenRouterO1Pro: { - ID: OpenRouterO1Pro, - Name: "OpenRouter – o1 pro", - Provider: ProviderOpenRouter, - APIModel: "openai/o1-pro", - CostPer1MIn: OpenAIModels[O1Pro].CostPer1MIn, - CostPer1MInCached: OpenAIModels[O1Pro].CostPer1MInCached, - CostPer1MOut: OpenAIModels[O1Pro].CostPer1MOut, - CostPer1MOutCached: OpenAIModels[O1Pro].CostPer1MOutCached, - ContextWindow: OpenAIModels[O1Pro].ContextWindow, - DefaultMaxTokens: OpenAIModels[O1Pro].DefaultMaxTokens, - CanReason: OpenAIModels[O1Pro].CanReason, - }, - OpenRouterO1Mini: { - ID: OpenRouterO1Mini, - Name: "OpenRouter – o1 mini", - Provider: ProviderOpenRouter, - APIModel: "openai/o1-mini", - CostPer1MIn: OpenAIModels[O1Mini].CostPer1MIn, - CostPer1MInCached: OpenAIModels[O1Mini].CostPer1MInCached, - CostPer1MOut: OpenAIModels[O1Mini].CostPer1MOut, - CostPer1MOutCached: OpenAIModels[O1Mini].CostPer1MOutCached, - ContextWindow: OpenAIModels[O1Mini].ContextWindow, - DefaultMaxTokens: OpenAIModels[O1Mini].DefaultMaxTokens, - CanReason: OpenAIModels[O1Mini].CanReason, - }, - OpenRouterO3: { - ID: OpenRouterO3, - Name: "OpenRouter – o3", - Provider: ProviderOpenRouter, - APIModel: "openai/o3", - CostPer1MIn: OpenAIModels[O3].CostPer1MIn, - CostPer1MInCached: OpenAIModels[O3].CostPer1MInCached, - CostPer1MOut: OpenAIModels[O3].CostPer1MOut, - CostPer1MOutCached: OpenAIModels[O3].CostPer1MOutCached, - ContextWindow: OpenAIModels[O3].ContextWindow, - DefaultMaxTokens: OpenAIModels[O3].DefaultMaxTokens, - CanReason: OpenAIModels[O3].CanReason, - }, - OpenRouterO3Mini: { - ID: OpenRouterO3Mini, - Name: "OpenRouter – o3 mini", - Provider: ProviderOpenRouter, - APIModel: "openai/o3-mini-high", - CostPer1MIn: OpenAIModels[O3Mini].CostPer1MIn, - CostPer1MInCached: OpenAIModels[O3Mini].CostPer1MInCached, - CostPer1MOut: OpenAIModels[O3Mini].CostPer1MOut, - CostPer1MOutCached: OpenAIModels[O3Mini].CostPer1MOutCached, - ContextWindow: OpenAIModels[O3Mini].ContextWindow, - DefaultMaxTokens: OpenAIModels[O3Mini].DefaultMaxTokens, - CanReason: OpenAIModels[O3Mini].CanReason, - }, - OpenRouterO4Mini: { - ID: OpenRouterO4Mini, - Name: "OpenRouter – o4 mini", - Provider: ProviderOpenRouter, - APIModel: "openai/o4-mini-high", - CostPer1MIn: OpenAIModels[O4Mini].CostPer1MIn, - CostPer1MInCached: OpenAIModels[O4Mini].CostPer1MInCached, - CostPer1MOut: OpenAIModels[O4Mini].CostPer1MOut, - CostPer1MOutCached: OpenAIModels[O4Mini].CostPer1MOutCached, - ContextWindow: OpenAIModels[O4Mini].ContextWindow, - DefaultMaxTokens: OpenAIModels[O4Mini].DefaultMaxTokens, - CanReason: OpenAIModels[O4Mini].CanReason, - }, - // The following models reference GeminiModels and AnthropicModels, which are not defined in this codebase yet. - // TODO: Implement GeminiModels and AnthropicModels, then update these entries accordingly. - OpenRouterGemini25Flash: { - ID: OpenRouterGemini25Flash, - Name: "OpenRouter – Gemini 2.5 Flash", - Provider: ProviderOpenRouter, - APIModel: "google/gemini-2.5-flash-preview:thinking", - CostPer1MIn: 0, // TODO: GeminiModels[Gemini25Flash].CostPer1MIn - CostPer1MInCached: 0, // TODO: GeminiModels[Gemini25Flash].CostPer1MInCached - CostPer1MOut: 0, // TODO: GeminiModels[Gemini25Flash].CostPer1MOut - CostPer1MOutCached: 0, // TODO: GeminiModels[Gemini25Flash].CostPer1MOutCached - ContextWindow: 0, // TODO: GeminiModels[Gemini25Flash].ContextWindow - DefaultMaxTokens: 0, // TODO: GeminiModels[Gemini25Flash].DefaultMaxTokens - }, - OpenRouterGemini25: { - ID: OpenRouterGemini25, - Name: "OpenRouter – Gemini 2.5 Pro", - Provider: ProviderOpenRouter, - APIModel: "google/gemini-2.5-pro-preview-03-25", - CostPer1MIn: 0, // TODO: GeminiModels[Gemini25].CostPer1MIn - CostPer1MInCached: 0, // TODO: GeminiModels[Gemini25].CostPer1MInCached - CostPer1MOut: 0, // TODO: GeminiModels[Gemini25].CostPer1MOut - CostPer1MOutCached: 0, // TODO: GeminiModels[Gemini25].CostPer1MOutCached - ContextWindow: 0, // TODO: GeminiModels[Gemini25].ContextWindow - DefaultMaxTokens: 0, // TODO: GeminiModels[Gemini25].DefaultMaxTokens - }, - OpenRouterClaude35Sonnet: { - ID: OpenRouterClaude35Sonnet, - Name: "OpenRouter – Claude 3.5 Sonnet", - Provider: ProviderOpenRouter, - APIModel: "anthropic/claude-3.5-sonnet", - CostPer1MIn: 0, // TODO: AnthropicModels[Claude35Sonnet].CostPer1MIn - CostPer1MInCached: 0, // TODO: AnthropicModels[Claude35Sonnet].CostPer1MInCached - CostPer1MOut: 0, // TODO: AnthropicModels[Claude35Sonnet].CostPer1MOut - CostPer1MOutCached: 0, // TODO: AnthropicModels[Claude35Sonnet].CostPer1MOutCached - ContextWindow: 0, // TODO: AnthropicModels[Claude35Sonnet].ContextWindow - DefaultMaxTokens: 0, // TODO: AnthropicModels[Claude35Sonnet].DefaultMaxTokens - }, - OpenRouterClaude3Haiku: { - ID: OpenRouterClaude3Haiku, - Name: "OpenRouter – Claude 3 Haiku", - Provider: ProviderOpenRouter, - APIModel: "anthropic/claude-3-haiku", - CostPer1MIn: 0, // TODO: AnthropicModels[Claude3Haiku].CostPer1MIn - CostPer1MInCached: 0, // TODO: AnthropicModels[Claude3Haiku].CostPer1MInCached - CostPer1MOut: 0, // TODO: AnthropicModels[Claude3Haiku].CostPer1MOut - CostPer1MOutCached: 0, // TODO: AnthropicModels[Claude3Haiku].CostPer1MOutCached - ContextWindow: 0, // TODO: AnthropicModels[Claude3Haiku].ContextWindow - DefaultMaxTokens: 0, // TODO: AnthropicModels[Claude3Haiku].DefaultMaxTokens - }, - OpenRouterClaude37Sonnet: { - ID: OpenRouterClaude37Sonnet, - Name: "OpenRouter – Claude 3.7 Sonnet", - Provider: ProviderOpenRouter, - APIModel: "anthropic/claude-3.7-sonnet", - CostPer1MIn: 0, // TODO: AnthropicModels[Claude37Sonnet].CostPer1MIn - CostPer1MInCached: 0, // TODO: AnthropicModels[Claude37Sonnet].CostPer1MInCached - CostPer1MOut: 0, // TODO: AnthropicModels[Claude37Sonnet].CostPer1MOut - CostPer1MOutCached: 0, // TODO: AnthropicModels[Claude37Sonnet].CostPer1MOutCached - ContextWindow: 0, // TODO: AnthropicModels[Claude37Sonnet].ContextWindow - DefaultMaxTokens: 0, // TODO: AnthropicModels[Claude37Sonnet].DefaultMaxTokens - CanReason: false, // TODO: AnthropicModels[Claude37Sonnet].CanReason - }, - OpenRouterClaude35Haiku: { - ID: OpenRouterClaude35Haiku, - Name: "OpenRouter – Claude 3.5 Haiku", - Provider: ProviderOpenRouter, - APIModel: "anthropic/claude-3.5-haiku", - CostPer1MIn: 0, // TODO: AnthropicModels[Claude35Haiku].CostPer1MIn - CostPer1MInCached: 0, // TODO: AnthropicModels[Claude35Haiku].CostPer1MInCached - CostPer1MOut: 0, // TODO: AnthropicModels[Claude35Haiku].CostPer1MOut - CostPer1MOutCached: 0, // TODO: AnthropicModels[Claude35Haiku].CostPer1MOutCached - ContextWindow: 0, // TODO: AnthropicModels[Claude35Haiku].ContextWindow - DefaultMaxTokens: 0, // TODO: AnthropicModels[Claude35Haiku].DefaultMaxTokens - }, - OpenRouterClaude3Opus: { - ID: OpenRouterClaude3Opus, - Name: "OpenRouter – Claude 3 Opus", - Provider: ProviderOpenRouter, - APIModel: "anthropic/claude-3-opus", - CostPer1MIn: 0, // TODO: AnthropicModels[Claude3Opus].CostPer1MIn - CostPer1MInCached: 0, // TODO: AnthropicModels[Claude3Opus].CostPer1MInCached - CostPer1MOut: 0, // TODO: AnthropicModels[Claude3Opus].CostPer1MOut - CostPer1MOutCached: 0, // TODO: AnthropicModels[Claude3Opus].CostPer1MOutCached - ContextWindow: 0, // TODO: AnthropicModels[Claude3Opus].ContextWindow - DefaultMaxTokens: 0, // TODO: AnthropicModels[Claude3Opus].DefaultMaxTokens - }, - OpenRouterDeepSeekR1Free: { - ID: OpenRouterDeepSeekR1Free, - Name: "OpenRouter – DeepSeek R1 Free", - Provider: ProviderOpenRouter, - APIModel: "deepseek/deepseek-r1-0528:free", - CostPer1MIn: 0, - CostPer1MInCached: 0, - CostPer1MOut: 0, - CostPer1MOutCached: 0, - ContextWindow: 163_840, - DefaultMaxTokens: 10000, - }, -} diff --git a/internal/provider/openai.go b/internal/provider/openai.go index 275375c..efd4fa2 100644 --- a/internal/provider/openai.go +++ b/internal/provider/openai.go @@ -12,11 +12,18 @@ type OpenAIProvider struct { commonProvider } -func NewOpenAIProvider(apiKey, model string) *OpenAIProvider { +func NewOpenAIProvider(apiKey, model, endpoint string) *OpenAIProvider { if model == "" { model = "gpt-3.5-turbo" } + + // Set default endpoint if none provided + if endpoint == "" { + endpoint = "https://api.openai.com/v1" + } + client := openai.NewClient( + option.WithBaseURL(endpoint), option.WithAPIKey(apiKey), ) return &OpenAIProvider{ diff --git a/internal/provider/openrouter.go b/internal/provider/openrouter.go deleted file mode 100644 index 2cea5ef..0000000 --- a/internal/provider/openrouter.go +++ /dev/null @@ -1,47 +0,0 @@ -package provider - -import ( - "context" - "fmt" - - "github.com/openai/openai-go" - "github.com/openai/openai-go/option" -) - -type OpenRouterProvider struct { - commonProvider -} - -func NewOpenRouterProvider(apiKey, model string) *OpenRouterProvider { - if model == "" { - model = "gpt-3.5-turbo" - } - client := openai.NewClient( - option.WithBaseURL("https://openrouter.ai/api/v1"), - option.WithAPIKey(apiKey), - option.WithHeaderAdd("HTTP-Referer", "https://github.com/m7medvision/lazycommit"), - option.WithHeaderAdd("User-Agent", "LazyCommit/1.0"), - option.WithHeaderAdd("X-Title", "LazyCommit"), - ) - return &OpenRouterProvider{ - commonProvider: commonProvider{ - client: &client, - model: model, - }, - } -} - -func (o *OpenRouterProvider) GenerateCommitMessage(ctx context.Context, diff string) (string, error) { - messages, err := o.generateCommitMessages(ctx, diff) - if err != nil { - return "", err - } - if len(messages) == 0 { - return "", fmt.Errorf("no commit messages generated") - } - return messages[0], nil -} - -func (o *OpenRouterProvider) GenerateCommitMessages(ctx context.Context, diff string) ([]string, error) { - return o.generateCommitMessages(ctx, diff) -}