forked from tmc/langchaingo
-
Notifications
You must be signed in to change notification settings - Fork 0
/
openai.go
73 lines (57 loc) · 1.6 KB
/
openai.go
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
package openai
import (
"context"
"strings"
"github.com/aresa7796/langchaingo/embeddings"
"github.com/aresa7796/langchaingo/llms/openai"
)
// OpenAI is the embedder using the OpenAI api.
type OpenAI struct {
client *openai.LLM
StripNewLines bool
BatchSize int
}
var _ embeddings.Embedder = OpenAI{}
// NewOpenAI creates a new OpenAI with options. Options for client, strip new lines and batch.
func NewOpenAI(opts ...Option) (OpenAI, error) {
o, err := applyClientOptions(opts...)
if err != nil {
return OpenAI{}, err
}
return o, nil
}
// EmbedDocuments creates one vector embedding for each of the texts.
func (e OpenAI) EmbedDocuments(ctx context.Context, texts []string) ([][]float64, error) {
batchedTexts := embeddings.BatchTexts(
embeddings.MaybeRemoveNewLines(texts, e.StripNewLines),
e.BatchSize,
)
emb := make([][]float64, 0, len(texts))
for _, texts := range batchedTexts {
curTextEmbeddings, err := e.client.CreateEmbedding(ctx, texts)
if err != nil {
return nil, err
}
textLengths := make([]int, 0, len(texts))
for _, text := range texts {
textLengths = append(textLengths, len(text))
}
combined, err := embeddings.CombineVectors(curTextEmbeddings, textLengths)
if err != nil {
return nil, err
}
emb = append(emb, combined)
}
return emb, nil
}
// EmbedQuery embeds a single text.
func (e OpenAI) EmbedQuery(ctx context.Context, text string) ([]float64, error) {
if e.StripNewLines {
text = strings.ReplaceAll(text, "\n", " ")
}
emb, err := e.client.CreateEmbedding(ctx, []string{text})
if err != nil {
return nil, err
}
return emb[0], nil
}