Skip to content

Commit

Permalink
Merge pull request #621 from tmc/fix-omitted-temp
Browse files Browse the repository at this point in the history
llms: Remove omitempty from temperature params
  • Loading branch information
tmc committed Feb 20, 2024
2 parents 7193152 + 696fdaf commit 627273a
Show file tree
Hide file tree
Showing 13 changed files with 114 additions and 11 deletions.
13 changes: 13 additions & 0 deletions examples/openai-completion-example-with-http-debugging/go.mod
Original file line number Diff line number Diff line change
@@ -0,0 +1,13 @@
module github.com/tmc/langchaingo/examples/openai-completion-example-with-http-debugging

go 1.21

toolchain go1.21.4

require github.com/tmc/langchaingo v0.1.4-alpha.0

require (
github.com/dlclark/regexp2 v1.8.1 // indirect
github.com/google/uuid v1.4.0 // indirect
github.com/pkoukk/tiktoken-go v0.1.2 // indirect
)
16 changes: 16 additions & 0 deletions examples/openai-completion-example-with-http-debugging/go.sum
Original file line number Diff line number Diff line change
@@ -0,0 +1,16 @@
github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/dlclark/regexp2 v1.8.1 h1:6Lcdwya6GjPUNsBct8Lg/yRPwMhABj269AAzdGSiR+0=
github.com/dlclark/regexp2 v1.8.1/go.mod h1:DHkYz0B9wPfa6wondMfaivmHpzrQ3v9q8cnmRbL6yW8=
github.com/google/uuid v1.4.0 h1:MtMxsa51/r9yyhkyLsVeVt0B+BGQZzpQiTQ4eHZ8bc4=
github.com/google/uuid v1.4.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
github.com/pkoukk/tiktoken-go v0.1.2 h1:u7PCSBiWJ3nJYoTGShyM9iHXz4dNyYkurwwp+GHtyHY=
github.com/pkoukk/tiktoken-go v0.1.2/go.mod h1:boMWvk9pQCOTx11pgu0DrIdrAKgQzzJKUP6vLXaz7Rw=
github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
github.com/stretchr/testify v1.8.4 h1:CcVxjf3Q8PM0mHUKJCdn+eZZtm5yQwehR5yeSVQQcUk=
github.com/stretchr/testify v1.8.4/go.mod h1:sz/lmYIOXD/1dqDmKjjqLyZ2RngseejIcXlSw2iwfAo=
github.com/tmc/langchaingo v0.1.4-alpha.0 h1:WVu2KgHr9wloHAiMbbeytiv2W76vpkA59uUwf0EUgrQ=
github.com/tmc/langchaingo v0.1.4-alpha.0/go.mod h1:PKtJMXizDxJnT86q7lsVsyzTJqd0P2QKF7wt2jF6Lxk=
gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA=
gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
Original file line number Diff line number Diff line change
@@ -0,0 +1,74 @@
package main

import (
"context"
"flag"
"fmt"
"log"
"net/http"
"net/http/httputil"

"github.com/tmc/langchaingo/llms"
"github.com/tmc/langchaingo/llms/openai"
)

var flagDebugHTTP = flag.Bool("debug-http", true, "enable debugging of HTTP requests and responses")

func main() {
// Demonstrates how to use a custom HTTP client to log requests and responses.
flag.Parse()
var opts []openai.Option
if *flagDebugHTTP {
opts = append(opts, openai.WithHTTPClient(&http.Client{
Transport: &logTransport{
Transport: http.DefaultTransport, // Use http.DefaultTransport as the underlying transport
},
}))
}

llm, err := openai.New(opts...)
if err != nil {
log.Fatal(err)
}
ctx := context.Background()
completion, err := llms.GenerateFromSinglePrompt(ctx,
llm,
"The first man to walk on the moon",
llms.WithTemperature(0.0),
)
if err != nil {
log.Fatal(err)
}

fmt.Println(completion)
}

// logTransport wraps around an existing http.RoundTripper, allowing us to
// intercept and log the request and response.
type logTransport struct {
Transport http.RoundTripper
}

// RoundTrip executes a single HTTP transaction and logs the request and response.
func (c *logTransport) RoundTrip(req *http.Request) (*http.Response, error) {
// Log the request
requestDump, err := httputil.DumpRequestOut(req, true)
if err == nil {
log.Println("Request:\n" + string(requestDump))
} else {
log.Println("Error dumping request:", err)
}
// Use the underlying Transport to execute the request
resp, err := c.Transport.RoundTrip(req)
if err != nil {
return nil, err // Return early if there's an error
}
// Log the response
responseDump, err := httputil.DumpResponse(resp, true)
if err == nil {
log.Println("Response:\n" + string(responseDump))
} else {
log.Println("Error dumping response:", err)
}
return resp, err
}
2 changes: 1 addition & 1 deletion llms/anthropic/internal/anthropicclient/anthropicclient.go
Original file line number Diff line number Diff line change
Expand Up @@ -61,7 +61,7 @@ func New(token string, model string, opts ...Option) (*Client, error) {
type CompletionRequest struct {
Model string `json:"model"`
Prompt string `json:"prompt"`
Temperature float64 `json:"temperature,omitempty"`
Temperature float64 `json:"temperature"`
MaxTokens int `json:"max_tokens_to_sample,omitempty"`
StopWords []string `json:"stop_sequences,omitempty"`
TopP float64 `json:"top_p,omitempty"`
Expand Down
2 changes: 1 addition & 1 deletion llms/anthropic/internal/anthropicclient/completions.go
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@ const (
type completionPayload struct {
Model string `json:"model"`
Prompt string `json:"prompt"`
Temperature float64 `json:"temperature,omitempty"`
Temperature float64 `json:"temperature"`
MaxTokens int `json:"max_tokens_to_sample,omitempty"`
TopP float64 `json:"top_p,omitempty"`
StopWords []string `json:"stop_sequences,omitempty"`
Expand Down
2 changes: 1 addition & 1 deletion llms/ernie/internal/ernieclient/chat.go
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@ const (
type ChatRequest struct {
Model string `json:"model,omitempty"`
Messages []*ChatMessage `json:"messages"`
Temperature float64 `json:"temperature,omitempty"`
Temperature float64 `json:"temperature"`
TopP float64 `json:"top_p,omitempty"`
MaxTokens int `json:"max_tokens,omitempty"`
N int `json:"n,omitempty"`
Expand Down
2 changes: 1 addition & 1 deletion llms/ernie/internal/ernieclient/ernieclient.go
Original file line number Diff line number Diff line change
Expand Up @@ -57,7 +57,7 @@ type Message struct {
// CompletionRequest is a request to create a completion.
type CompletionRequest struct {
Messages []Message `json:"messages"`
Temperature float64 `json:"temperature,omitempty"`
Temperature float64 `json:"temperature"`
TopP float64 `json:"top_p,omitempty"`
PenaltyScore float64 `json:"penalty_score,omitempty"`
Stream bool `json:"stream,omitempty"`
Expand Down
4 changes: 2 additions & 2 deletions llms/googleai/internal/palmclient/palmclient.go
Original file line number Diff line number Diff line change
Expand Up @@ -77,7 +77,7 @@ var ErrEmptyResponse = errors.New("empty response")
type CompletionRequest struct {
Prompts []string `json:"prompts"`
MaxTokens int `json:"max_tokens"`
Temperature float64 `json:"temperature,omitempty"`
Temperature float64 `json:"temperature"`
TopP int `json:"top_p,omitempty"`
TopK int `json:"top_k,omitempty"`
StopSequences []string `json:"stop_sequences"`
Expand Down Expand Up @@ -160,7 +160,7 @@ func (c *PaLMClient) CreateEmbedding(ctx context.Context, r *EmbeddingRequest) (
type ChatRequest struct {
Context string `json:"context"`
Messages []*ChatMessage `json:"messages"`
Temperature float64 `json:"temperature,omitempty"`
Temperature float64 `json:"temperature"`
TopP int `json:"top_p,omitempty"`
TopK int `json:"top_k,omitempty"`
CandidateCount int `json:"candidate_count,omitempty"`
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@ type InferenceRequest struct {
Model string `json:"repositoryId"`
Prompt string `json:"prompt"`
Task InferenceTask `json:"task"`
Temperature float64 `json:"temperature,omitempty"`
Temperature float64 `json:"temperature"`
TopP float64 `json:"top_p,omitempty"`
TopK int `json:"top_k,omitempty"`
MinLength int `json:"min_length,omitempty"`
Expand Down
2 changes: 1 addition & 1 deletion llms/huggingface/internal/huggingfaceclient/inference.go
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@ type inferencePayload struct {
}

type parameters struct {
Temperature float64 `json:"temperature,omitempty"`
Temperature float64 `json:"temperature"`
TopP float64 `json:"top_p,omitempty"`
TopK int `json:"top_k,omitempty"`
MinLength int `json:"min_length,omitempty"`
Expand Down
2 changes: 1 addition & 1 deletion llms/ollama/internal/ollamaclient/types.go
Original file line number Diff line number Diff line change
Expand Up @@ -154,7 +154,7 @@ type Options struct {
NumKeep int `json:"num_keep,omitempty"`
Mirostat int `json:"mirostat,omitempty"`
NumPredict int `json:"num_predict,omitempty"`
Temperature float32 `json:"temperature,omitempty"`
Temperature float32 `json:"temperature"`
TypicalP float32 `json:"typical_p,omitempty"`
RepeatPenalty float32 `json:"repeat_penalty,omitempty"`
PresencePenalty float32 `json:"presence_penalty,omitempty"`
Expand Down
2 changes: 1 addition & 1 deletion llms/openai/internal/openaiclient/chat.go
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@ var ErrContentExclusive = errors.New("only one of Content / MultiContent allowed
type ChatRequest struct {
Model string `json:"model"`
Messages []*ChatMessage `json:"messages"`
Temperature float64 `json:"temperature,omitempty"`
Temperature float64 `json:"temperature"`
TopP float64 `json:"top_p,omitempty"`
MaxTokens int `json:"max_tokens,omitempty"`
N int `json:"n,omitempty"`
Expand Down
2 changes: 1 addition & 1 deletion llms/openai/internal/openaiclient/completions.go
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@ import (
type CompletionRequest struct {
Model string `json:"model"`
Prompt string `json:"prompt"`
Temperature float64 `json:"temperature,omitempty"`
Temperature float64 `json:"temperature"`
MaxTokens int `json:"max_tokens,omitempty"`
N int `json:"n,omitempty"`
FrequencyPenalty float64 `json:"frequency_penalty,omitempty"`
Expand Down

0 comments on commit 627273a

Please sign in to comment.