Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
71 changes: 69 additions & 2 deletions core/relay/adaptor/ali/adaptor.go
Original file line number Diff line number Diff line change
Expand Up @@ -41,7 +41,12 @@ func (a *Adaptor) SupportMode(m mode.Mode) bool {
m == mode.AudioTranscription ||
m == mode.AudioTranslation ||
m == mode.Anthropic ||
m == mode.Gemini
m == mode.Gemini ||
m == mode.Responses ||
m == mode.ResponsesGet ||
m == mode.ResponsesDelete ||
m == mode.ResponsesCancel ||
m == mode.ResponsesInputItems
}

func (a *Adaptor) GetRequestURL(
Expand Down Expand Up @@ -132,6 +137,56 @@ func (a *Adaptor) GetRequestURL(
Method: http.MethodPost,
URL: url,
}, nil
case mode.Responses:
url, err := url.JoinPath(u, "/compatible-mode/v1/responses")
if err != nil {
return adaptor.RequestURL{}, err
}

return adaptor.RequestURL{
Method: http.MethodPost,
URL: url,
}, nil
case mode.ResponsesGet:
url, err := url.JoinPath(u, "/compatible-mode/v1/responses", meta.ResponseID)
if err != nil {
return adaptor.RequestURL{}, err
}

return adaptor.RequestURL{
Method: http.MethodGet,
URL: url,
}, nil
case mode.ResponsesDelete:
url, err := url.JoinPath(u, "/compatible-mode/v1/responses", meta.ResponseID)
if err != nil {
return adaptor.RequestURL{}, err
}

return adaptor.RequestURL{
Method: http.MethodDelete,
URL: url,
}, nil
case mode.ResponsesCancel:
url, err := url.JoinPath(u, "/compatible-mode/v1/responses", meta.ResponseID, "cancel")
if err != nil {
return adaptor.RequestURL{}, err
}

return adaptor.RequestURL{
Method: http.MethodPost,
URL: url,
}, nil
case mode.ResponsesInputItems:
url, err := url.JoinPath(u, "/compatible-mode/v1/responses", meta.ResponseID, "input_items")
if err != nil {
return adaptor.RequestURL{}, err
}

return adaptor.RequestURL{
Method: http.MethodGet,
URL: url,
}, nil
default:
return adaptor.RequestURL{}, fmt.Errorf("unsupported mode: %s", meta.Mode)
}
Expand Down Expand Up @@ -173,6 +228,12 @@ func (a *Adaptor) ConvertRequest(
return anthropic.ConvertRequest(meta, req)
case mode.Gemini:
return openai.ConvertGeminiRequest(meta, req)
case mode.Responses,
mode.ResponsesGet,
mode.ResponsesDelete,
mode.ResponsesCancel,
mode.ResponsesInputItems:
return openai.ConvertRequest(meta, store, req)
default:
return adaptor.ConvertResult{}, fmt.Errorf("unsupported mode: %s", meta.Mode)
}
Expand Down Expand Up @@ -223,6 +284,12 @@ func (a *Adaptor) DoResponse(
return openai.GeminiStreamHandler(meta, c, resp)
}
return openai.GeminiHandler(meta, c, resp)
case mode.Responses,
mode.ResponsesGet,
mode.ResponsesDelete,
mode.ResponsesCancel,
mode.ResponsesInputItems:
return openai.DoResponse(meta, store, c, resp)
default:
return adaptor.DoResponseResult{}, relaymodel.WrapperOpenAIErrorWithMessage(
fmt.Sprintf("unsupported mode: %s", meta.Mode),
Expand All @@ -234,7 +301,7 @@ func (a *Adaptor) DoResponse(

func (a *Adaptor) Metadata() adaptor.Metadata {
return adaptor.Metadata{
Readme: "OpenAI compatibility\nNetwork search metering support\nRerank support: https://help.aliyun.com/zh/model-studio/text-rerank-api\nSTT support: https://help.aliyun.com/zh/model-studio/sambert-speech-synthesis/\nAnthropic support: /api/v2/apps/claude-code-proxy\nGemini support",
Readme: "OpenAI compatibility\nNative Responses API support\nNetwork search metering support\nRerank support: https://help.aliyun.com/zh/model-studio/text-rerank-api\nSTT support: https://help.aliyun.com/zh/model-studio/sambert-speech-synthesis/\nAnthropic support: /api/v2/apps/claude-code-proxy\nGemini support",
Models: ModelList,
}
}
149 changes: 149 additions & 0 deletions core/relay/adaptor/ali/adaptor_test.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,149 @@
//nolint:testpackage
package ali

import (
"context"
"encoding/json"
"io"
"net/http"
"strings"
"testing"

coremodel "github.com/labring/aiproxy/core/model"
"github.com/labring/aiproxy/core/relay/meta"
"github.com/labring/aiproxy/core/relay/mode"
relaymodel "github.com/labring/aiproxy/core/relay/model"
)

func TestAdaptorSupportModeResponses(t *testing.T) {
adaptor := &Adaptor{}

supportedModes := []mode.Mode{
mode.Responses,
mode.ResponsesGet,
mode.ResponsesDelete,
mode.ResponsesCancel,
mode.ResponsesInputItems,
}
for _, m := range supportedModes {
if !adaptor.SupportMode(m) {
t.Fatalf("expected mode %s to be supported", m)
}
}
}

func TestAdaptorGetRequestURLResponses(t *testing.T) {
adaptor := &Adaptor{}
channel := &coremodel.Channel{BaseURL: "https://dashscope.aliyuncs.com"}

tests := []struct {
name string
mode mode.Mode
responseID string
wantMethod string
wantURL string
}{
{
name: "responses create",
mode: mode.Responses,
wantMethod: http.MethodPost,
wantURL: "https://dashscope.aliyuncs.com/compatible-mode/v1/responses",
},
{
name: "responses get",
mode: mode.ResponsesGet,
responseID: "resp_123",
wantMethod: http.MethodGet,
wantURL: "https://dashscope.aliyuncs.com/compatible-mode/v1/responses/resp_123",
},
{
name: "responses delete",
mode: mode.ResponsesDelete,
responseID: "resp_123",
wantMethod: http.MethodDelete,
wantURL: "https://dashscope.aliyuncs.com/compatible-mode/v1/responses/resp_123",
},
{
name: "responses cancel",
mode: mode.ResponsesCancel,
responseID: "resp_123",
wantMethod: http.MethodPost,
wantURL: "https://dashscope.aliyuncs.com/compatible-mode/v1/responses/resp_123/cancel",
},
{
name: "responses input items",
mode: mode.ResponsesInputItems,
responseID: "resp_123",
wantMethod: http.MethodGet,
wantURL: "https://dashscope.aliyuncs.com/compatible-mode/v1/responses/resp_123/input_items",
},
}

for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
m := meta.NewMeta(
channel,
tt.mode,
"qwen-plus",
coremodel.ModelConfig{},
meta.WithResponseID(tt.responseID),
)

got, err := adaptor.GetRequestURL(m, nil, nil)
if err != nil {
t.Fatalf("GetRequestURL returned error: %v", err)
}

if got.Method != tt.wantMethod {
t.Fatalf("expected method %s, got %s", tt.wantMethod, got.Method)
}

if got.URL != tt.wantURL {
t.Fatalf("expected URL %s, got %s", tt.wantURL, got.URL)
}
})
}
}

func TestAdaptorConvertRequestResponses(t *testing.T) {
adaptor := &Adaptor{}
m := meta.NewMeta(
nil,
mode.Responses,
"qwen-plus",
coremodel.ModelConfig{},
)

req, err := http.NewRequestWithContext(
context.Background(),
http.MethodPost,
"/v1/responses",
strings.NewReader(`{"model":"qwen-plus","input":"hello","stream":true}`),
)
if err != nil {
t.Fatalf("failed to create request: %v", err)
}

result, err := adaptor.ConvertRequest(m, nil, req)
if err != nil {
t.Fatalf("ConvertRequest returned error: %v", err)
}

body, err := io.ReadAll(result.Body)
if err != nil {
t.Fatalf("failed to read converted body: %v", err)
}

var responseReq relaymodel.CreateResponseRequest
if err := json.Unmarshal(body, &responseReq); err != nil {
t.Fatalf("failed to unmarshal converted body: %v", err)
}

if responseReq.Model != "qwen-plus" {
t.Fatalf("expected model qwen-plus, got %s", responseReq.Model)
}

if !responseReq.Stream {
t.Fatal("expected stream to remain enabled")
}
}
69 changes: 66 additions & 3 deletions core/relay/adaptor/doubao/main.go
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@ func init() {
func GetRequestURL(meta *meta.Meta) (adaptor.RequestURL, error) {
u := meta.Channel.BaseURL
switch meta.Mode {
case mode.ChatCompletions, mode.Anthropic:
case mode.ChatCompletions, mode.Anthropic, mode.Gemini:
if strings.HasPrefix(meta.ActualModel, "bot-") {
url, err := url.JoinPath(u, "/api/v3/bots/chat/completions")
if err != nil {
Expand Down Expand Up @@ -67,6 +67,56 @@ func GetRequestURL(meta *meta.Meta) (adaptor.RequestURL, error) {
Method: http.MethodPost,
URL: url,
}, nil
case mode.Responses:
url, err := url.JoinPath(u, "/api/v3/responses")
if err != nil {
return adaptor.RequestURL{}, err
}

return adaptor.RequestURL{
Method: http.MethodPost,
URL: url,
}, nil
case mode.ResponsesGet:
url, err := url.JoinPath(u, "/api/v3/responses", meta.ResponseID)
if err != nil {
return adaptor.RequestURL{}, err
}

return adaptor.RequestURL{
Method: http.MethodGet,
URL: url,
}, nil
case mode.ResponsesDelete:
url, err := url.JoinPath(u, "/api/v3/responses", meta.ResponseID)
if err != nil {
return adaptor.RequestURL{}, err
}

return adaptor.RequestURL{
Method: http.MethodDelete,
URL: url,
}, nil
case mode.ResponsesCancel:
url, err := url.JoinPath(u, "/api/v3/responses", meta.ResponseID, "cancel")
if err != nil {
return adaptor.RequestURL{}, err
}

return adaptor.RequestURL{
Method: http.MethodPost,
URL: url,
}, nil
case mode.ResponsesInputItems:
url, err := url.JoinPath(u, "/api/v3/responses", meta.ResponseID, "input_items")
if err != nil {
return adaptor.RequestURL{}, err
}

return adaptor.RequestURL{
Method: http.MethodGet,
URL: url,
}, nil
default:
return adaptor.RequestURL{}, fmt.Errorf("unsupported relay mode %d for doubao", meta.Mode)
}
Expand All @@ -85,12 +135,18 @@ func (a *Adaptor) DefaultBaseURL() string {
func (a *Adaptor) SupportMode(m mode.Mode) bool {
return m == mode.ChatCompletions ||
m == mode.Anthropic ||
m == mode.Embeddings
m == mode.Gemini ||
m == mode.Embeddings ||
m == mode.Responses ||
m == mode.ResponsesGet ||
m == mode.ResponsesDelete ||
m == mode.ResponsesCancel ||
m == mode.ResponsesInputItems
}

func (a *Adaptor) Metadata() adaptor.Metadata {
return adaptor.Metadata{
Readme: "Doubao / Volcano Engine endpoint\nSupports bot-style models and network search metering fields",
Readme: "Doubao / Volcano Engine endpoint\nSupports bot-style models, native Responses API, Gemini-compatible request conversion, and network search metering fields",
Models: ModelList,
}
}
Expand All @@ -116,6 +172,8 @@ func (a *Adaptor) ConvertRequest(
return openai.ConvertEmbeddingsRequest(meta, req, true)
case mode.ChatCompletions:
return ConvertChatCompletionsRequest(meta, req)
case mode.Gemini:
return openai.ConvertGeminiRequest(meta, req)
default:
return openai.ConvertRequest(meta, store, req)
}
Expand Down Expand Up @@ -152,6 +210,11 @@ func (a *Adaptor) DoResponse(
resp,
embeddingPreHandler,
)
case mode.Gemini:
if utils.IsStreamResponse(resp) {
return openai.GeminiStreamHandler(meta, c, resp)
}
return openai.GeminiHandler(meta, c, resp)
default:
return openai.DoResponse(meta, store, c, resp)
}
Expand Down
Loading
Loading