-
Notifications
You must be signed in to change notification settings - Fork 42
/
api.go
147 lines (126 loc) · 3.38 KB
/
api.go
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
package openai
import (
"bytes"
"net/http"
"time"
"github.com/pkg/errors"
)
const (
apiHost = "https://api.openai.com"
apiCompletionURL = "/v1/chat/completions"
apiDalleGenerateImageURL = "/v1/images/generations"
)
const (
roleUser = "user"
roleSystem = "system"
roleAssistant = "assistant"
)
// we don't use our default clients.HttpClient as we need longer timeouts...
var httpClient = http.Client{
Timeout: 60 * time.Second,
}
func doRequest(cfg Config, apiEndpoint string, data []byte) (*http.Response, error) {
req, err := http.NewRequest("POST", cfg.APIHost+apiEndpoint, bytes.NewBuffer(data))
if err != nil {
return nil, err
}
req.Header.Set("Content-Type", "application/json")
req.Header.Set("Authorization", "Bearer "+cfg.APIKey)
return httpClient.Do(req)
}
// https://platform.openai.com/docs/api-reference/chat
type ChatRequest struct {
Model string `json:"model"`
Messages []ChatMessage `json:"messages"`
Temperature float32 `json:"temperature,omitempty"`
TopP float32 `json:"top_p,omitempty"`
N int `json:"n,omitempty"`
Stop []string `json:"stop,omitempty"`
Stream bool `json:"stream,omitempty"`
MaxTokens int `json:"max_tokens,omitempty"`
PresencePenalty float32 `json:"presence_penalty,omitempty"`
FrequencyPenalty float32 `json:"frequency_penalty,omitempty"`
User string `json:"user,omitempty"`
Seed string `json:"seed,omitempty"`
}
type ChatMessage struct {
Role string `json:"role"`
Content string `json:"content"`
}
type ChatResponse struct {
ID string `json:"id"`
Object string `json:"object"`
Created int64 `json:"created"`
Choices []ChatChoice `json:"choices"`
Error struct {
Message string `json:"message"`
Type string `json:"type"`
} `json:"error"`
Usage struct {
PromptTokens int `json:"prompt_tokens"`
CompletionTokens int `json:"completion_tokens"`
TotalTokens int `json:"total_tokens"`
} `json:"usage"`
}
func (r ChatResponse) GetMessage() ChatMessage {
return r.Choices[0].Message
}
func (r ChatResponse) GetDelta() ChatMessage {
return r.Choices[0].Delta
}
func (r ChatResponse) GetError() error {
if r.Error.Message == "" {
return nil
}
return errors.New(r.Error.Message)
}
type ChatChoice struct {
Index int `json:"index"`
Message ChatMessage `json:"message"`
FinishReason string `json:"finish_reason"`
Delta ChatMessage `json:"delta"`
}
/*
{
"model": "dall-e-3",
"prompt": "a white siamese cat",
"n": 1,
"size": "1024x1024"
}
*/
type DalleRequest struct {
Model string `json:"model"`
Prompt string `json:"prompt"`
N int `json:"n"`
Size string `json:"size"`
}
/*
{
"created": 1700233554,
"data": [
{
"url": "https://XXXX"
}
]
}
or:
{
"error": {
"code": "invalid_size",
"message": "The size is not supported by this model.",
"param": null,
"type": "invalid_request_error"
}
}
*/
type DalleResponse struct {
Data []DalleResponseImage `json:"data"`
Error struct {
Code string `json:"code"`
Message string `json:"message"`
} `json:"error"`
}
type DalleResponseImage struct {
URL string `json:"url"`
RevisedPrompt string `json:"revised_prompt"`
}