Skip to content

Commit 1d0efcd

Browse files
committed
Fix OpenAI payload normalization for list content
1 parent 7357848 commit 1d0efcd

File tree

2 files changed

+94
-1
lines changed

2 files changed

+94
-1
lines changed

src/connectors/openai.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -337,7 +337,7 @@ def _get_value(message: Any, key: str) -> Any:
337337
return getattr(message, key, None)
338338

339339
def _normalize_content(value: Any) -> Any:
340-
if isinstance(value, list | tuple):
340+
if isinstance(value, (list, tuple)):
341341
normalized_parts: list[Any] = []
342342
for part in value:
343343
if hasattr(part, "model_dump") and callable(
Lines changed: 93 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,93 @@
1+
from __future__ import annotations
2+
3+
from typing import Any
4+
from unittest.mock import AsyncMock, MagicMock
5+
6+
import pytest
7+
8+
from src.connectors.openai import OpenAIConnector
9+
from src.core.config.app_config import AppConfig
10+
from src.core.domain.chat import (
11+
CanonicalChatRequest,
12+
ChatMessage,
13+
MessageContentPartText,
14+
)
15+
from src.core.domain.responses import ResponseEnvelope
16+
17+
18+
@pytest.mark.asyncio
19+
async def test_prepare_payload_handles_sequence_content(
20+
monkeypatch: pytest.MonkeyPatch,
21+
) -> None:
22+
"""Ensure list-based message content does not raise during payload normalization."""
23+
24+
client = AsyncMock()
25+
translation_service = MagicMock()
26+
translation_service.from_domain_request.return_value = {
27+
"model": "gpt-4",
28+
"messages": [],
29+
}
30+
31+
connector = OpenAIConnector(
32+
client=client,
33+
config=AppConfig(),
34+
translation_service=translation_service,
35+
)
36+
connector.disable_health_check()
37+
connector.api_key = "test-token"
38+
39+
observed_payloads: list[dict[str, Any]] = []
40+
41+
async def fake_handle(
42+
self: OpenAIConnector,
43+
url: str,
44+
payload: dict[str, Any],
45+
headers: dict[str, str] | None,
46+
session_id: str,
47+
) -> ResponseEnvelope:
48+
observed_payloads.append(payload)
49+
return ResponseEnvelope(content={}, headers={}, status_code=200)
50+
51+
monkeypatch.setattr(
52+
OpenAIConnector,
53+
"_handle_non_streaming_response",
54+
fake_handle,
55+
)
56+
57+
request = CanonicalChatRequest(
58+
model="gpt-4",
59+
messages=[
60+
ChatMessage(
61+
role="user",
62+
content=[
63+
MessageContentPartText(text="first"),
64+
MessageContentPartText(text="second"),
65+
],
66+
)
67+
],
68+
stream=False,
69+
)
70+
71+
processed_messages = [
72+
{
73+
"role": "user",
74+
"content": [
75+
{"type": "text", "text": "first"},
76+
{"type": "text", "text": "second"},
77+
],
78+
}
79+
]
80+
81+
await connector.chat_completions(
82+
request,
83+
processed_messages,
84+
"gpt-4",
85+
identity=None,
86+
)
87+
88+
assert observed_payloads, "Expected payload normalization to occur"
89+
payload = observed_payloads[0]
90+
assert payload["messages"][0]["content"] == [
91+
{"type": "text", "text": "first"},
92+
{"type": "text", "text": "second"},
93+
]

0 commit comments

Comments
 (0)