diff --git a/README.md b/README.md index d5a3770..a66f607 100644 --- a/README.md +++ b/README.md @@ -50,24 +50,26 @@ Fun Fact: Using an AI to write commits and other automations can reduce the risk Before running AutoCommit, it's advisable to set a few environment variables 🔑: -- `OPENAI_URL`: Override openai api eg: azure openai (Optional; Default: openai url) -- `OPENAI_API_KEY`: The API key for the GPT-4 model (🚨 **Required**). -- `OPENAI_MODEL`: Specify a different language model 🔄 (Optional; Default: `gpt-4`). +- `AZURE_OPENAI_ENDPOINT`: Azure OpenAI endpoint URL (🚨 **Required** for Azure OpenAI). +- `AZURE_OPENAI_API_KEY`: The API key for Azure OpenAI (🚨 **Required** for Azure OpenAI). +- `OPENAI_URL`: Override openai api eg: azure openai (Optional; Fallback for backward compatibility) +- `OPENAI_API_KEY`: The API key for the OpenAI model (Optional; Fallback for backward compatibility). +- `OPENAI_MODEL`: Specify a different language model 🔄 (Optional; Default: `o4-mini`). - `FINE_TUNE_PARAMS`: Additional parameters for fine-tuning the model output ⚙️ (Optional; Default: `{}`). Add these environment variables by appending them to your `.bashrc`, `.zshrc`, or other shell configuration files 📄: ```bash -export OPENAI_URL=https://apiendpoint.openai.azure.com -export OPENAI_MODEL=llm-large -export OPENAI_API_KEY=your-openai-api-key-here +export AZURE_OPENAI_ENDPOINT=https://your-resource.openai.azure.com +export AZURE_OPENAI_API_KEY=your-azure-openai-api-key-here +export OPENAI_MODEL=o4-mini export FINE_TUNE_PARAMS='{"temperature": 0.7}' ``` Or, you can set them inline before running the AutoCommit command 🖱️: ```bash -OPENAI_URL=your-openai-api-key-here OPENAI_MODEL=gpt-4 FINE_TUNE_PARAMS='{"temperature": 0.7}' git auto-commit +AZURE_OPENAI_ENDPOINT=https://your-resource.openai.azure.com AZURE_OPENAI_API_KEY=your-api-key OPENAI_MODEL=o4-mini git auto-commit ``` ### Complete Install 📦 diff --git a/util.go b/util.go index d634868..7704753 100644 --- a/util.go +++ b/util.go @@ -7,7 +7,6 @@ import ( "github.com/Azure/azure-sdk-for-go/sdk/azcore/to" "github.com/cli/go-gh/v2/pkg/api" "github.com/joho/godotenv" - openai "github.com/sashabaranov/go-openai" "math" "os" "os/exec" @@ -108,36 +107,60 @@ func getChatCompletionResponse(messages []azopenai.ChatMessage) (string, error) if err != nil { fmt.Errorf(".env file not found: %v", err) } - keyCredential, err := azopenai.NewKeyCredential(os.Getenv("OPENAI_API_KEY")) + + // Try new environment variables first, fall back to old ones for compatibility + apiKey := os.Getenv("AZURE_OPENAI_API_KEY") + if apiKey == "" { + apiKey = os.Getenv("OPENAI_API_KEY") + } + + endpoint := os.Getenv("AZURE_OPENAI_ENDPOINT") + if endpoint == "" { + endpoint = os.Getenv("OPENAI_URL") + } + + deploymentName := os.Getenv("OPENAI_MODEL") + if deploymentName == "" { + deploymentName = "o4-mini" + } + + // Set reasonable token limit for completion + maxTokens := int32(1000) + + if apiKey == "" { + return "", fmt.Errorf("export AZURE_OPENAI_API_KEY= #execute this in your terminal and try again") + } + + if endpoint == "" { + return "", fmt.Errorf("export AZURE_OPENAI_ENDPOINT= #execute this in your terminal and try again") + } + + // Initialize the OpenAI client with API key-based authentication + keyCredential, err := azopenai.NewKeyCredential(apiKey) if err != nil { - fmt.Errorf("export OPENAI_API_KEY= #execute this in your terminal and try again") - return "", fmt.Errorf("error creating Azure OpenAI client: %v", err) + return "", fmt.Errorf("error creating credential: %v", err) } - url := os.Getenv("OPENAI_URL") - model := os.Getenv("OPENAI_MODEL") + var client *azopenai.Client - if strings.Contains(url, "azure") { - client, err = azopenai.NewClientWithKeyCredential(url, keyCredential, nil) + if strings.Contains(endpoint, "azure") { + client, err = azopenai.NewClientWithKeyCredential(endpoint, keyCredential, nil) if err != nil { return "", fmt.Errorf("error creating Azure OpenAI client: %v", err) } } else { - client, err = azopenai.NewClientForOpenAI(url, keyCredential, nil) + client, err = azopenai.NewClientForOpenAI(endpoint, keyCredential, nil) if err != nil { - return "", fmt.Errorf("error creating Azure OpenAI client: %v", err) + return "", fmt.Errorf("error creating OpenAI client: %v", err) } - - } - if model == "" { - model = openai.GPT4 } resp, err := client.GetChatCompletions( - context.Background(), + context.TODO(), azopenai.ChatCompletionsOptions{ Messages: messages, - Deployment: model, + Deployment: deploymentName, + MaxTokens: &maxTokens, }, nil, )