From a1fa11e09ed5a865b11f2b8a25589dcfe362bcd6 Mon Sep 17 00:00:00 2001 From: Danny Kopping Date: Tue, 30 Sep 2025 12:48:10 +0200 Subject: [PATCH] chore: correcting openai stream error handling Signed-off-by: Danny Kopping --- bridge_integration_test.go | 21 ++++++++++++++++++--- intercept_openai_chat_streaming.go | 2 +- 2 files changed, 19 insertions(+), 4 deletions(-) diff --git a/bridge_integration_test.go b/bridge_integration_test.go index de64532..b11431c 100644 --- a/bridge_integration_test.go +++ b/bridge_integration_test.go @@ -871,10 +871,25 @@ func TestErrorHandling(t *testing.T) { return aibridge.NewRequestBridge(t.Context(), []aibridge.Provider{aibridge.NewOpenAIProvider(cfg(addr, apiKey))}, logger, client, srvProxyMgr) }, responseHandlerFn: func(streaming bool, resp *http.Response) { - body, err := io.ReadAll(resp.Body) - require.NoError(t, err) + if streaming { + // Server responds first with 200 OK then starts streaming. + require.Equal(t, http.StatusOK, resp.StatusCode) - t.Log(body) + sp := aibridge.NewSSEParser() + require.NoError(t, sp.Parse(resp.Body)) + // OpenAI sends all events under the same type. + messageEvents := sp.MessageEvents() + require.NotEmpty(t, messageEvents) + + errEvent := sp.MessageEvents()[len(sp.MessageEvents())-2] // Last event is termination marker ("[DONE]"). + require.NotEmpty(t, errEvent) + require.Contains(t, errEvent.Data, "The server had an error while processing your request. Sorry about that!") + } else { + require.Equal(t, resp.StatusCode, http.StatusInternalServerError) + body, err := io.ReadAll(resp.Body) + require.NoError(t, err) + require.Contains(t, string(body), "The server had an error while processing your request. Sorry about that") + } }, }, } diff --git a/intercept_openai_chat_streaming.go b/intercept_openai_chat_streaming.go index 2396a5e..314d20c 100644 --- a/intercept_openai_chat_streaming.go +++ b/intercept_openai_chat_streaming.go @@ -185,7 +185,7 @@ func (i *OpenAIStreamingChatInterception) ProcessRequest(w http.ResponseWriter, // We can't reflect an error back if there's a connection error or the request context was canceled. } else if oaiErr := getOpenAIErrorResponse(streamErr); oaiErr != nil { logger.Warn(ctx, "openai stream error", slog.Error(streamErr)) - interceptionErr = fmt.Errorf("stream error: %w", oaiErr) + interceptionErr = oaiErr } else { logger.Warn(ctx, "unknown error", slog.Error(streamErr)) // Unfortunately, the OpenAI SDK does not support parsing errors received in the stream