/
openai.go
146 lines (128 loc) · 3.37 KB
/
openai.go
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
package mutators
import (
"context"
"errors"
"io"
"os"
"strconv"
"strings"
"github.com/batmac/ccat/pkg/log"
"github.com/batmac/ccat/pkg/secretprovider"
gpt "github.com/sashabaranov/go-openai"
)
// https://platform.openai.com/docs/guides/chat
func init() {
singleRegister("chatgpt", chatgpt,
withDescription("ask OpenAI ChatGPT, X:<unlimited> max replied tokens, the optional second arg is the model (Requires a valid key in $OPENAI_API_KEY, optional custom endpoint in $OPENAI_BASE_URL.)"),
withConfigBuilder(stdConfigStrings(0, 3)),
withAliases("cgpt"),
withHintSlow(), // output asap (when no other mutator is used)
withCategory("external APIs"),
)
}
func chatgpt(w io.WriteCloser, r io.ReadCloser, conf any) (int64, error) {
args := conf.([]string)
model := gpt.GPT3Dot5Turbo
maxTokens := 0 // unlimited
var err error
if len(args) > 0 && args[0] != "" {
maxTokens, err = strconv.Atoi(args[0])
if err != nil {
log.Println("first arg: ", err)
}
}
if len(args) >= 2 && args[1] != "" {
model = args[1]
}
prePrompt := ""
if len(args) >= 3 && args[2] != "" {
prePrompt = args[2] + ":\n"
}
key, _ := secretprovider.GetSecret("openai", "OPENAI_API_KEY")
if key == "" {
log.Fatal("OPENAI_API_KEY environment variable is not set")
}
config := gpt.DefaultConfig(key)
customBaseURL := os.Getenv("OPENAI_BASE_URL")
if customBaseURL != "" {
config.BaseURL = customBaseURL
}
log.Debugln("baseURL: ", config.BaseURL)
log.Debugln("model: ", model)
log.Debugln("maxTokens: ", maxTokens)
log.Debugln("prePrompt: ", prePrompt)
client := gpt.NewClientWithConfig(config)
ctx := context.Background()
// log.Debugf("models: %+v", listModels(client))
prompt, err := io.ReadAll(r)
if err != nil {
return 0, err
}
req := gpt.ChatCompletionRequest{
Model: model,
Messages: []gpt.ChatCompletionMessage{
{Role: "user", Content: prePrompt + string(prompt)},
},
MaxTokens: maxTokens,
Temperature: 0,
TopP: 0,
N: 0,
Stream: true,
Stop: []string{},
PresencePenalty: 0,
FrequencyPenalty: 0,
LogitBias: map[string]int{},
User: "",
}
log.Debugf("request: %#v", req)
if key == "CI" {
log.Println("OPENAI_API_KEY is set to CI, using fake response")
return io.Copy(w, strings.NewReader("CI"))
}
stream, err := client.CreateChatCompletionStream(ctx, req)
if err != nil {
return 0, err
}
defer stream.Close()
defer func() {
if _, err = w.Write([]byte("\n")); err != nil {
log.Println(err)
}
}()
var totalWritten int64
var steps int
for {
response, err := stream.Recv()
if errors.Is(err, io.EOF) {
log.Debugf("Stream finished after %d steps, response=%#v", steps, response)
return totalWritten, nil
}
if err != nil {
log.Printf("Stream error after %d steps: %v\n", steps, err)
return totalWritten, err
}
log.Debugf("%#v\n", response)
output := response.Choices[0].Delta.Content
n, err := w.Write([]byte(output))
if err != nil {
return totalWritten, err
}
totalWritten += int64(n)
steps++
}
}
/* func listModels(c *gpt.Client) string {
models, err := c.ListModels(context.Background())
if err != nil {
log.Debugln("listModels(): ", err)
return ""
}
// convert models to json string
modelsJSON, err := json.Marshal(models)
if err != nil {
log.Debugln("listModels(): ", err)
return ""
}
return string(modelsJSON)
}
*/