From 38597184ad18a97eccf0d0492ce54cc4b2bde4fe Mon Sep 17 00:00:00 2001 From: diemus <9145160+diemus@users.noreply.github.com> Date: Thu, 6 Apr 2023 11:56:18 +0800 Subject: [PATCH] fix /v1/models bug --- main.go | 111 +++++++++++++++++++++++++++++++++------------ pkg/azure/types.go | 31 +++++++++++++ 2 files changed, 114 insertions(+), 28 deletions(-) create mode 100644 pkg/azure/types.go diff --git a/main.go b/main.go index 3abb6c0..3f1c572 100644 --- a/main.go +++ b/main.go @@ -27,34 +27,89 @@ func init() { } func main() { - r := gin.Default() - r.Any("*path", func(c *gin.Context) { - if ProxyMode == "azure" { - // BUGFIX: fix options request, see https://github.com/diemus/azure-openai-proxy/issues/1 - if c.Request.Method == http.MethodOptions { - c.Header("Access-Control-Allow-Origin", "*") - c.Header("Access-Control-Allow-Methods", "POST, GET, OPTIONS, PUT, DELETE") - c.Header("Access-Control-Allow-Headers", "Content-Type, Authorization") - c.Status(200) - return - } - - server := azure.NewOpenAIReverseProxy() - server.ServeHTTP(c.Writer, c.Request) - //BUGFIX: try to fix the difference between azure and openai - //Azure's response is missing a \n at the end of the stream - //see https://github.com/Chanzhaoyu/chatgpt-web/issues/831 - if c.Writer.Header().Get("Content-Type") == "text/event-stream" { - if _, err := c.Writer.Write([]byte("\n")); err != nil { - log.Printf("rewrite azure response error: %v", err) - } - } - } else { - server := openai.NewOpenAIReverseProxy() - server.ServeHTTP(c.Writer, c.Request) - } - }) + router := gin.Default() + if ProxyMode == "azure" { + router.GET("/v1/models", handleGetModels) + router.OPTIONS("/v1/*path", handleOptions) + + router.POST("/v1/chat/completions", handleAzureProxy) + router.POST("/v1/completions", handleAzureProxy) + router.POST("/v1/embeddings", handleAzureProxy) + } else { + router.Any("*path", handleOpenAIProxy) + } - r.Run(Address) + router.Run(Address) + +} + +func handleGetModels(c *gin.Context) { + // BUGFIX: fix options request, see https://github.com/diemus/azure-openai-proxy/issues/3 + models := []string{"gpt-4", "gpt-4-0314", "gpt-4-32k", "gpt-4-32k-0314", "gpt-3.5-turbo", "gpt-3.5-turbo-0301", "text-davinci-003", "text-embedding-ada-002"} + result := azure.ListModelResponse{ + Object: "list", + } + for _, model := range models { + result.Data = append(result.Data, azure.Model{ + ID: model, + Object: "model", + Created: 1677649963, + OwnedBy: "openai", + Permission: []azure.ModelPermission{ + { + ID: "", + Object: "model", + Created: 1679602087, + AllowCreateEngine: true, + AllowSampling: true, + AllowLogprobs: true, + AllowSearchIndices: true, + AllowView: true, + AllowFineTuning: true, + Organization: "*", + Group: nil, + IsBlocking: false, + }, + }, + Root: model, + Parent: nil, + }) + } + c.JSON(200, result) +} + +func handleOptions(c *gin.Context) { + // BUGFIX: fix options request, see https://github.com/diemus/azure-openai-proxy/issues/1 + c.Header("Access-Control-Allow-Origin", "*") + c.Header("Access-Control-Allow-Methods", "POST, GET, OPTIONS, PUT, DELETE") + c.Header("Access-Control-Allow-Headers", "Content-Type, Authorization") + c.Status(200) + return +} + +func handleAzureProxy(c *gin.Context) { + // BUGFIX: fix options request, see https://github.com/diemus/azure-openai-proxy/issues/1 + if c.Request.Method == http.MethodOptions { + c.Header("Access-Control-Allow-Origin", "*") + c.Header("Access-Control-Allow-Methods", "POST, GET, OPTIONS, PUT, DELETE") + c.Header("Access-Control-Allow-Headers", "Content-Type, Authorization") + c.Status(200) + return + } + + server := azure.NewOpenAIReverseProxy() + server.ServeHTTP(c.Writer, c.Request) + //BUGFIX: try to fix the difference between azure and openai + //Azure's response is missing a \n at the end of the stream + //see https://github.com/Chanzhaoyu/chatgpt-web/issues/831 + if c.Writer.Header().Get("Content-Type") == "text/event-stream" { + if _, err := c.Writer.Write([]byte("\n")); err != nil { + log.Printf("rewrite azure response error: %v", err) + } + } +} +func handleOpenAIProxy(c *gin.Context) { + server := openai.NewOpenAIReverseProxy() + server.ServeHTTP(c.Writer, c.Request) } diff --git a/pkg/azure/types.go b/pkg/azure/types.go new file mode 100644 index 0000000..0701d4a --- /dev/null +++ b/pkg/azure/types.go @@ -0,0 +1,31 @@ +package azure + +type ListModelResponse struct { + Object string `json:"object"` + Data []Model `json:"data"` +} + +type Model struct { + ID string `json:"id"` + Object string `json:"object"` + Created int `json:"created"` + OwnedBy string `json:"owned_by"` + Permission []ModelPermission `json:"permission"` + Root string `json:"root"` + Parent any `json:"parent"` +} + +type ModelPermission struct { + ID string `json:"id"` + Object string `json:"object"` + Created int `json:"created"` + AllowCreateEngine bool `json:"allow_create_engine"` + AllowSampling bool `json:"allow_sampling"` + AllowLogprobs bool `json:"allow_logprobs"` + AllowSearchIndices bool `json:"allow_search_indices"` + AllowView bool `json:"allow_view"` + AllowFineTuning bool `json:"allow_fine_tuning"` + Organization string `json:"organization"` + Group any `json:"group"` + IsBlocking bool `json:"is_blocking"` +}