-
Notifications
You must be signed in to change notification settings - Fork 1
/
chat.go
179 lines (156 loc) · 4.53 KB
/
chat.go
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
package openai
import (
"context"
"encoding/json"
"fmt"
"net/http"
"strings"
)
type Role string
const (
System Role = "system"
User Role = "user"
Assistant Role = "assistant"
)
// Msg is a single chat message.
type Msg struct {
Role Role `json:"role"`
Content string `json:"content"`
FunctionCall *FunctionCall `json:"function_call,omitempty"`
}
type FunctionCall struct {
Name string `json:"name"`
Arguments string `json:"arguments"`
}
func (msg *Msg) UnmarshalCallArguments(out any) error {
return msg.FunctionCall.UnmarshalArguments(out) // tolerates nil
}
func (call *FunctionCall) UnmarshalArguments(out any) error {
if call == nil || call.Name == "" {
return &Error{
CallID: "UnmarshalArguments",
Message: "ChatGPT did not respond with a function call",
}
}
err := json.Unmarshal([]byte(call.Arguments), out)
if err != nil {
return &Error{
CallID: "UnmarshalArguments",
Message: fmt.Sprintf("ChatGPT returned invalid arguments JSON for %s call", call.Name),
Cause: err,
}
}
return nil
}
// SystemMsg makes an Msg with a System role.
func SystemMsg(content string) Msg {
return Msg{System, content, nil}
}
// UserMsg makes an Msg with a User role.
func UserMsg(content string) Msg {
return Msg{User, content, nil}
}
// AssistantMsg makes an Msg with an Assistant role.
func AssistantMsg(content string) Msg {
return Msg{Assistant, content, nil}
}
// DefaultChatOptions provides a safe and conservative starting point for Chat call options.
// Note that it sets Temperature to 0 and enables unlimited MaxTokens.
func DefaultChatOptions() Options {
return Options{
Model: ModelDefaultChat,
MaxTokens: 0,
Temperature: 0,
TopP: 1.0,
N: 0,
}
}
// Chat suggests the next assistant's message for the given prompt via ChatGPT..
// When successful, always returns at least one Msg, more if you set opt.N
// (these are multiple choices for the next message, not multiple messages).
// Options should originate from DefaultChatOptions, not DefaultCompleteOptions.
func Chat(ctx context.Context, messages []Msg, opt Options, client *http.Client, creds Credentials) ([]Msg, Usage, error) {
const callID = "Chat"
req := &chatRequest{
Msgs: messages,
Options: opt,
}
var resp chatResponse
err := post(ctx, callID, "https://api.openai.com/v1/chat/completions", client, creds, req, &resp)
if err != nil {
return nil, Usage{}, err
}
if len(resp.Choices) == 0 {
return nil, Usage{}, &Error{
CallID: callID,
Message: "no results",
}
}
result := make([]Msg, 0, len(resp.Choices))
for _, choice := range resp.Choices {
result = append(result, choice.Msg)
}
return result, resp.Usage, nil
}
// StreamChat suggests the next assistant's message for the given prompt
// via ChatGPT, streaming the response.
// Options should originate from DefaultChatOptions, not DefaultCompleteOptions.
// Options.N must be 0 or 1.
func StreamChat(ctx context.Context, messages []Msg, opt Options, client *http.Client, creds Credentials, f func(msg *Msg, delta string) error) (Msg, error) {
const callID = "StreamChat"
req := &chatRequest{
Msgs: messages,
Options: opt,
Stream: true,
}
var msg Msg
var buf strings.Builder
err := post(ctx, callID, "https://api.openai.com/v1/chat/completions", client, creds, req, func(data []byte) error {
var resp chatStreamingResponse
if err := json.Unmarshal(data, &resp); err != nil {
return err
}
if len(resp.Choices) != 1 {
return fmt.Errorf("expected exactly one choice")
}
delta := resp.Choices[0].Delta
if delta.Role != "" {
msg.Role = delta.Role
}
if delta.Content != "" {
buf.WriteString(delta.Content)
msg.Content = buf.String()
}
return f(&msg, delta.Content)
})
return msg, err
}
type chatRequest struct {
Msgs []Msg `json:"messages"`
Options
Stream bool `json:"stream,omitempty"`
}
type message struct {
Role Role `json:"role"`
Content string `json:"content"`
}
type chatResponse struct {
// ID string `json:"id"`
// Object string `json:"object"`
// Created int `json:"created"`
// Model string `json:"model"`
Choices []chatChoice `json:"choices"`
Usage Usage `json:"usage"`
}
type chatChoice struct {
Msg Msg `json:"message"`
Index int `json:"index"`
FinishReason string `json:"finish_reason"`
}
type chatStreamingResponse struct {
Choices []chatStreamingChoice `json:"choices"`
}
type chatStreamingChoice struct {
Delta Msg `json:"delta"`
FinishReason string `json:"finish_reason"`
}