Skip to content

Commit

Permalink
fix: fix gemini panic (close #833)
Browse files Browse the repository at this point in the history
  • Loading branch information
songquanpeng committed Dec 17, 2023
1 parent 461f5da commit 97030e2
Showing 1 changed file with 17 additions and 6 deletions.
23 changes: 17 additions & 6 deletions controller/relay-gemini.go
Original file line number Diff line number Diff line change
Expand Up @@ -114,7 +114,7 @@ func requestOpenAI2Gemini(textRequest GeneralOpenAIRequest) *GeminiChatRequest {
Role: "model",
Parts: []GeminiPart{
{
Text: "ok",
Text: "Okay",
},
},
})
Expand All @@ -130,6 +130,16 @@ type GeminiChatResponse struct {
PromptFeedback GeminiChatPromptFeedback `json:"promptFeedback"`
}

func (g *GeminiChatResponse) GetResponseText() string {
if g == nil {
return ""
}
if len(g.Candidates) > 0 && len(g.Candidates[0].Content.Parts) > 0 {
return g.Candidates[0].Content.Parts[0].Text
}
return ""
}

type GeminiChatCandidate struct {
Content GeminiChatContent `json:"content"`
FinishReason string `json:"finishReason"`
Expand Down Expand Up @@ -158,20 +168,21 @@ func responseGeminiChat2OpenAI(response *GeminiChatResponse) *OpenAITextResponse
Index: i,
Message: Message{
Role: "assistant",
Content: candidate.Content.Parts[0].Text,
Content: "",
},
FinishReason: stopFinishReason,
}
if len(candidate.Content.Parts) > 0 {
choice.Message.Content = candidate.Content.Parts[0].Text
}
fullTextResponse.Choices = append(fullTextResponse.Choices, choice)
}
return &fullTextResponse
}

func streamResponseGeminiChat2OpenAI(geminiResponse *GeminiChatResponse) *ChatCompletionsStreamResponse {
var choice ChatCompletionsStreamResponseChoice
if len(geminiResponse.Candidates) > 0 && len(geminiResponse.Candidates[0].Content.Parts) > 0 {
choice.Delta.Content = geminiResponse.Candidates[0].Content.Parts[0].Text
}
choice.Delta.Content = geminiResponse.GetResponseText()
choice.FinishReason = &stopFinishReason
var response ChatCompletionsStreamResponse
response.Object = "chat.completion.chunk"
Expand Down Expand Up @@ -276,7 +287,7 @@ func geminiChatHandler(c *gin.Context, resp *http.Response, promptTokens int, mo
}, nil
}
fullTextResponse := responseGeminiChat2OpenAI(&geminiResponse)
completionTokens := countTokenText(geminiResponse.Candidates[0].Content.Parts[0].Text, model)
completionTokens := countTokenText(geminiResponse.GetResponseText(), model)
usage := Usage{
PromptTokens: promptTokens,
CompletionTokens: completionTokens,
Expand Down

0 comments on commit 97030e2

Please sign in to comment.