Provider-agnostic Go client for LLM chat (with multimodal support) and embeddings.
go get github.com/MetaDiv-AI/llmpackage main
import (
"context"
"fmt"
"os"
"github.com/MetaDiv-AI/llm"
"github.com/MetaDiv-AI/logger"
)
func main() {
log := logger.New().Development().Build()
defer log.Sync()
// OPENROUTER_API_KEY env var is used when WithAPIKey is omitted
client, err := llm.NewClient(llm.ProviderOpenRouter,
llm.WithAPIKey(os.Getenv("OPENROUTER_API_KEY")),
llm.WithLogger(log),
)
if err != nil {
panic(err)
}
ctx := context.Background()
// Chat completion
resp, err := client.Chat.Create(ctx, &llm.ChatRequest{
Model: "anthropic/claude-sonnet-4",
Messages: []llm.Message{{Role: "user", Content: "Hello"}},
})
if err != nil {
panic(err)
}
fmt.Println(resp.Choices[0].Message.Content)
}Chat supports vision (images) via ContentPart:
resp, err := client.Chat.Create(ctx, &llm.ChatRequest{
Model: "anthropic/claude-sonnet-4",
Messages: []llm.Message{{
Role: "user",
Content: []llm.ContentPart{
{Type: "text", Text: "What's in this image?"},
{Type: "image_url", ImageURL: &llm.ImageURL{URL: "https://example.com/image.jpg"}},
},
}},
})import (
"errors"
"io"
)
stream, err := client.Chat.CreateStream(ctx, &llm.ChatRequest{
Model: "anthropic/claude-sonnet-4",
Messages: []llm.Message{{Role: "user", Content: "Hello"}},
})
if err != nil {
panic(err)
}
defer stream.Close()
for {
chunk, err := stream.Next()
if err != nil {
if errors.Is(err, io.EOF) {
break
}
panic(err)
}
if chunk != nil && len(chunk.Choices) > 0 && chunk.Choices[0].Delta != nil && chunk.Choices[0].Delta.Content != nil {
fmt.Print(chunk.Choices[0].Delta.Content)
}
}emb, err := client.Embeddings.Create(ctx, &llm.EmbeddingRequest{
Model: "openai/text-embedding-3-small",
Input: "The quick brown fox",
})
if err != nil {
panic(err)
}
fmt.Println(emb.Data[0].Embedding)- OpenRouter (
llm.ProviderOpenRouter) - Access to multiple models via OpenRouter API. When using OpenRouter, the API key can be set viaOPENROUTER_API_KEYenv var ifWithAPIKeyis omitted.
ErrUnknownProvideris returned when the provider is not supported. Useerrors.Is(err, &llm.ErrUnknownProvider{Provider: "openrouter"})orerrors.Asto check.ErrInvalidRequestandValidationErrorare returned when a request fails validation (e.g. empty model, empty messages). Useerrors.Is(err, llm.ErrInvalidRequest)to detect validation errors.- For streaming,
StreamReader.Next()returnsio.EOFwhen done. Useerrors.Is(err, io.EOF)for EOF detection.
MIT