From 942a88e9e1e81394cbc04788ca7339591129117a Mon Sep 17 00:00:00 2001 From: Pauline Vos Date: Tue, 11 Nov 2025 16:19:49 +0100 Subject: [PATCH 1/7] Replace OpenAI's `o1-preview` with `o3` Since `o1` models are deprecated, and the Responses API no longer supports `o1`. See: https://platform.openai.com/docs/deprecations#2025-04-28-o1-preview-and-o1-mini --- examples/openai/{chat-o1.php => chat-o3.php} | 2 +- src/platform/src/Bridge/OpenAi/ModelCatalog.php | 15 +++------------ .../tests/Bridge/OpenAi/ModelCatalogTest.php | 3 +-- 3 files changed, 5 insertions(+), 15 deletions(-) rename examples/openai/{chat-o1.php => chat-o3.php} (96%) diff --git a/examples/openai/chat-o1.php b/examples/openai/chat-o3.php similarity index 96% rename from examples/openai/chat-o1.php rename to examples/openai/chat-o3.php index 67287517a..c8f2e7297 100644 --- a/examples/openai/chat-o1.php +++ b/examples/openai/chat-o3.php @@ -32,7 +32,7 @@ at the beginning and end, not throughout the code. PROMPT; -$agent = new Agent($platform, 'o1-preview'); +$agent = new Agent($platform, 'o3'); $result = $agent->call(new MessageBag(Message::ofUser($prompt))); echo $result->getContent().\PHP_EOL; diff --git a/src/platform/src/Bridge/OpenAi/ModelCatalog.php b/src/platform/src/Bridge/OpenAi/ModelCatalog.php index b853da045..8f15a83bd 100644 --- a/src/platform/src/Bridge/OpenAi/ModelCatalog.php +++ b/src/platform/src/Bridge/OpenAi/ModelCatalog.php @@ -91,22 +91,12 @@ public function __construct(array $additionalModels = []) Capability::OUTPUT_TEXT, Capability::OUTPUT_STREAMING, Capability::TOOL_CALLING, - Capability::INPUT_AUDIO, Capability::INPUT_IMAGE, Capability::OUTPUT_STRUCTURED, + Capability::INPUT_AUDIO, ], ], - 'o1-mini' => [ - 'class' => Gpt::class, - 'capabilities' => [ - Capability::INPUT_MESSAGES, - Capability::OUTPUT_TEXT, - Capability::OUTPUT_STREAMING, - Capability::TOOL_CALLING, - Capability::INPUT_IMAGE, - ], - ], - 'o1-preview' => [ + 'o3' => [ 'class' => Gpt::class, 'capabilities' => [ Capability::INPUT_MESSAGES, @@ -114,6 +104,7 @@ public function __construct(array $additionalModels = []) Capability::OUTPUT_STREAMING, Capability::TOOL_CALLING, Capability::INPUT_IMAGE, + Capability::OUTPUT_STRUCTURED, ], ], 'o3-mini' => [ diff --git a/src/platform/tests/Bridge/OpenAi/ModelCatalogTest.php b/src/platform/tests/Bridge/OpenAi/ModelCatalogTest.php index 260af4f1f..9b673ebb8 100644 --- a/src/platform/tests/Bridge/OpenAi/ModelCatalogTest.php +++ b/src/platform/tests/Bridge/OpenAi/ModelCatalogTest.php @@ -36,8 +36,7 @@ public static function modelsProvider(): iterable yield 'gpt-4o' => ['gpt-4o', Gpt::class, [Capability::INPUT_MESSAGES, Capability::OUTPUT_TEXT, Capability::OUTPUT_STREAMING, Capability::TOOL_CALLING, Capability::INPUT_IMAGE, Capability::OUTPUT_STRUCTURED]]; yield 'gpt-4o-mini' => ['gpt-4o-mini', Gpt::class, [Capability::INPUT_MESSAGES, Capability::OUTPUT_TEXT, Capability::OUTPUT_STREAMING, Capability::TOOL_CALLING, Capability::INPUT_IMAGE, Capability::OUTPUT_STRUCTURED]]; yield 'gpt-4o-audio-preview' => ['gpt-4o-audio-preview', Gpt::class, [Capability::INPUT_MESSAGES, Capability::OUTPUT_TEXT, Capability::OUTPUT_STREAMING, Capability::TOOL_CALLING, Capability::INPUT_AUDIO, Capability::INPUT_IMAGE, Capability::OUTPUT_STRUCTURED]]; - yield 'o1-mini' => ['o1-mini', Gpt::class, [Capability::INPUT_MESSAGES, Capability::OUTPUT_TEXT, Capability::OUTPUT_STREAMING, Capability::TOOL_CALLING, Capability::INPUT_IMAGE]]; - yield 'o1-preview' => ['o1-preview', Gpt::class, [Capability::INPUT_MESSAGES, Capability::OUTPUT_TEXT, Capability::OUTPUT_STREAMING, Capability::TOOL_CALLING, Capability::INPUT_IMAGE]]; + yield 'o3' => ['o3', Gpt::class, [Capability::INPUT_MESSAGES, Capability::OUTPUT_TEXT, Capability::OUTPUT_STREAMING, Capability::OUTPUT_STRUCTURED, Capability::TOOL_CALLING, Capability::INPUT_IMAGE]]; yield 'o3-mini' => ['o3-mini', Gpt::class, [Capability::INPUT_MESSAGES, Capability::OUTPUT_TEXT, Capability::OUTPUT_STREAMING, Capability::TOOL_CALLING, Capability::INPUT_IMAGE, Capability::OUTPUT_STRUCTURED]]; yield 'o3-mini-high' => ['o3-mini-high', Gpt::class, [Capability::INPUT_MESSAGES, Capability::OUTPUT_TEXT, Capability::OUTPUT_STREAMING, Capability::TOOL_CALLING]]; yield 'gpt-4.5-preview' => ['gpt-4.5-preview', Gpt::class, [Capability::INPUT_MESSAGES, Capability::OUTPUT_TEXT, Capability::OUTPUT_STREAMING, Capability::TOOL_CALLING, Capability::INPUT_IMAGE, Capability::OUTPUT_STRUCTURED]]; From 0c7ac1a1cb80010f13d6bd6839eb91b03ae7ec09 Mon Sep 17 00:00:00 2001 From: Pauline Vos Date: Tue, 11 Nov 2025 16:19:49 +0100 Subject: [PATCH 2/7] Disable GPT audio example and capability Since we're migrating to the Responses API (from chat completions), and that API doesn't support audio yet. It should be added back when it does ("coming soon", apparently) See: https://platform.openai.com/docs/guides/migrate-to-responses --- examples/openai/audio-input.php | 23 ++++++++++--------- .../src/Bridge/OpenAi/ModelCatalog.php | 5 +++- .../tests/Bridge/OpenAi/ModelCatalogTest.php | 2 +- 3 files changed, 17 insertions(+), 13 deletions(-) diff --git a/examples/openai/audio-input.php b/examples/openai/audio-input.php index 9022cc912..ad7423582 100644 --- a/examples/openai/audio-input.php +++ b/examples/openai/audio-input.php @@ -16,14 +16,15 @@ require_once dirname(__DIR__).'/bootstrap.php'; -$platform = PlatformFactory::create(env('OPENAI_API_KEY'), http_client()); - -$messages = new MessageBag( - Message::ofUser( - 'What is this recording about?', - Audio::fromFile(dirname(__DIR__, 2).'/fixtures/audio.mp3'), - ), -); -$result = $platform->invoke('gpt-4o-audio-preview', $messages); - -echo $result->asText().\PHP_EOL; +throw new RuntimeException('This example is temporarily unavailable due to migration to Responses API (which does not support audio yet).'); +// $platform = PlatformFactory::create(env('OPENAI_API_KEY'), http_client()); +// +// $messages = new MessageBag( +// Message::ofUser( +// 'What is this recording about?', +// Audio::fromFile(dirname(__DIR__, 2).'/fixtures/audio.mp3'), +// ), +// ); +// $result = $platform->invoke('gpt-4o-audio-preview', $messages); +// +// echo $result->asText().\PHP_EOL; diff --git a/src/platform/src/Bridge/OpenAi/ModelCatalog.php b/src/platform/src/Bridge/OpenAi/ModelCatalog.php index 8f15a83bd..8453e2be5 100644 --- a/src/platform/src/Bridge/OpenAi/ModelCatalog.php +++ b/src/platform/src/Bridge/OpenAi/ModelCatalog.php @@ -93,7 +93,10 @@ public function __construct(array $additionalModels = []) Capability::TOOL_CALLING, Capability::INPUT_IMAGE, Capability::OUTPUT_STRUCTURED, - Capability::INPUT_AUDIO, + // Audio is unsupported temporarily due to migration to Responses API; + // Capability will be reintroduced when Responses API supports audio ("coming soon") + // See: https://platform.openai.com/docs/guides/migrate-to-responses#responses-benefits + // Capability::INPUT_AUDIO, ], ], 'o3' => [ diff --git a/src/platform/tests/Bridge/OpenAi/ModelCatalogTest.php b/src/platform/tests/Bridge/OpenAi/ModelCatalogTest.php index 9b673ebb8..94b15f84e 100644 --- a/src/platform/tests/Bridge/OpenAi/ModelCatalogTest.php +++ b/src/platform/tests/Bridge/OpenAi/ModelCatalogTest.php @@ -35,8 +35,8 @@ public static function modelsProvider(): iterable yield 'gpt-4-turbo' => ['gpt-4-turbo', Gpt::class, [Capability::INPUT_MESSAGES, Capability::OUTPUT_TEXT, Capability::OUTPUT_STREAMING, Capability::TOOL_CALLING, Capability::INPUT_IMAGE]]; yield 'gpt-4o' => ['gpt-4o', Gpt::class, [Capability::INPUT_MESSAGES, Capability::OUTPUT_TEXT, Capability::OUTPUT_STREAMING, Capability::TOOL_CALLING, Capability::INPUT_IMAGE, Capability::OUTPUT_STRUCTURED]]; yield 'gpt-4o-mini' => ['gpt-4o-mini', Gpt::class, [Capability::INPUT_MESSAGES, Capability::OUTPUT_TEXT, Capability::OUTPUT_STREAMING, Capability::TOOL_CALLING, Capability::INPUT_IMAGE, Capability::OUTPUT_STRUCTURED]]; - yield 'gpt-4o-audio-preview' => ['gpt-4o-audio-preview', Gpt::class, [Capability::INPUT_MESSAGES, Capability::OUTPUT_TEXT, Capability::OUTPUT_STREAMING, Capability::TOOL_CALLING, Capability::INPUT_AUDIO, Capability::INPUT_IMAGE, Capability::OUTPUT_STRUCTURED]]; yield 'o3' => ['o3', Gpt::class, [Capability::INPUT_MESSAGES, Capability::OUTPUT_TEXT, Capability::OUTPUT_STREAMING, Capability::OUTPUT_STRUCTURED, Capability::TOOL_CALLING, Capability::INPUT_IMAGE]]; + yield 'gpt-4o-audio-preview' => ['gpt-4o-audio-preview', Gpt::class, [Capability::INPUT_MESSAGES, Capability::OUTPUT_TEXT, Capability::OUTPUT_STREAMING, Capability::TOOL_CALLING, Capability::INPUT_IMAGE, Capability::OUTPUT_STRUCTURED]]; yield 'o3-mini' => ['o3-mini', Gpt::class, [Capability::INPUT_MESSAGES, Capability::OUTPUT_TEXT, Capability::OUTPUT_STREAMING, Capability::TOOL_CALLING, Capability::INPUT_IMAGE, Capability::OUTPUT_STRUCTURED]]; yield 'o3-mini-high' => ['o3-mini-high', Gpt::class, [Capability::INPUT_MESSAGES, Capability::OUTPUT_TEXT, Capability::OUTPUT_STREAMING, Capability::TOOL_CALLING]]; yield 'gpt-4.5-preview' => ['gpt-4.5-preview', Gpt::class, [Capability::INPUT_MESSAGES, Capability::OUTPUT_TEXT, Capability::OUTPUT_STREAMING, Capability::TOOL_CALLING, Capability::INPUT_IMAGE, Capability::OUTPUT_STRUCTURED]]; From 3c9299a2f64ae65d80fca3852ee49de21bdc148e Mon Sep 17 00:00:00 2001 From: Pauline Vos Date: Tue, 11 Nov 2025 16:19:49 +0100 Subject: [PATCH 3/7] Add PDF capabilities to OpenAI vision capable models As every OpenAI model with vision capabilities also support PDF input, as per docs: https://platform.openai.com/docs/guides/pdf-files?api-mode=responses --- src/platform/src/Bridge/OpenAi/ModelCatalog.php | 11 +++++++++++ 1 file changed, 11 insertions(+) diff --git a/src/platform/src/Bridge/OpenAi/ModelCatalog.php b/src/platform/src/Bridge/OpenAi/ModelCatalog.php index 8453e2be5..b4ba1b671 100644 --- a/src/platform/src/Bridge/OpenAi/ModelCatalog.php +++ b/src/platform/src/Bridge/OpenAi/ModelCatalog.php @@ -60,6 +60,7 @@ public function __construct(array $additionalModels = []) Capability::OUTPUT_STREAMING, Capability::TOOL_CALLING, Capability::INPUT_IMAGE, + Capability::INPUT_PDF, ], ], 'gpt-4o' => [ @@ -69,6 +70,7 @@ public function __construct(array $additionalModels = []) Capability::OUTPUT_TEXT, Capability::OUTPUT_STREAMING, Capability::TOOL_CALLING, + Capability::INPUT_PDF, Capability::INPUT_IMAGE, Capability::OUTPUT_STRUCTURED, ], @@ -81,6 +83,7 @@ public function __construct(array $additionalModels = []) Capability::OUTPUT_STREAMING, Capability::TOOL_CALLING, Capability::INPUT_IMAGE, + Capability::INPUT_PDF, Capability::OUTPUT_STRUCTURED, ], ], @@ -137,6 +140,7 @@ public function __construct(array $additionalModels = []) Capability::OUTPUT_TEXT, Capability::OUTPUT_STREAMING, Capability::TOOL_CALLING, + Capability::INPUT_PDF, Capability::INPUT_IMAGE, Capability::OUTPUT_STRUCTURED, ], @@ -149,6 +153,7 @@ public function __construct(array $additionalModels = []) Capability::OUTPUT_STREAMING, Capability::TOOL_CALLING, Capability::INPUT_IMAGE, + Capability::INPUT_PDF, Capability::OUTPUT_STRUCTURED, ], ], @@ -160,6 +165,7 @@ public function __construct(array $additionalModels = []) Capability::OUTPUT_STREAMING, Capability::TOOL_CALLING, Capability::INPUT_IMAGE, + Capability::INPUT_PDF, Capability::OUTPUT_STRUCTURED, ], ], @@ -171,6 +177,7 @@ public function __construct(array $additionalModels = []) Capability::OUTPUT_STREAMING, Capability::TOOL_CALLING, Capability::INPUT_IMAGE, + Capability::INPUT_PDF, Capability::OUTPUT_STRUCTURED, ], ], @@ -182,6 +189,7 @@ public function __construct(array $additionalModels = []) Capability::OUTPUT_STREAMING, Capability::TOOL_CALLING, Capability::INPUT_IMAGE, + Capability::INPUT_PDF, Capability::OUTPUT_STRUCTURED, ], ], @@ -192,12 +200,14 @@ public function __construct(array $additionalModels = []) Capability::OUTPUT_TEXT, Capability::OUTPUT_STREAMING, Capability::INPUT_IMAGE, + Capability::INPUT_PDF, ], ], 'gpt-5-mini' => [ 'class' => Gpt::class, 'capabilities' => [ Capability::INPUT_MESSAGES, + Capability::INPUT_PDF, Capability::OUTPUT_TEXT, Capability::OUTPUT_STREAMING, Capability::TOOL_CALLING, @@ -209,6 +219,7 @@ public function __construct(array $additionalModels = []) 'class' => Gpt::class, 'capabilities' => [ Capability::INPUT_MESSAGES, + Capability::INPUT_PDF, Capability::OUTPUT_TEXT, Capability::OUTPUT_STREAMING, Capability::TOOL_CALLING, From d2ddf2eb0c5ad093933736d4033458a35c595b3f Mon Sep 17 00:00:00 2001 From: Pauline Vos Date: Tue, 11 Nov 2025 16:19:49 +0100 Subject: [PATCH 4/7] Create content normalizers for Responses API In order to migrate from chat completions to Responses, which enforces different data structures --- .../Message/Content/DocumentNormalizer.php | 49 +++++++++++++++ .../Gpt/Message/Content/ImageNormalizer.php | 50 +++++++++++++++ .../Message/Content/ImageUrlNormalizer.php | 50 +++++++++++++++ .../Gpt/Message/Content/TextNormalizer.php | 49 +++++++++++++++ .../Content/DocumentNormalizerTest.php | 61 +++++++++++++++++++ .../Message/Content/ImageNormalizerTest.php | 60 ++++++++++++++++++ .../Content/ImageUrlNormalizerTest.php | 60 ++++++++++++++++++ .../Message/Content/TextNormalizerTest.php | 56 +++++++++++++++++ 8 files changed, 435 insertions(+) create mode 100644 src/platform/src/Bridge/OpenAi/Contract/Gpt/Message/Content/DocumentNormalizer.php create mode 100644 src/platform/src/Bridge/OpenAi/Contract/Gpt/Message/Content/ImageNormalizer.php create mode 100644 src/platform/src/Bridge/OpenAi/Contract/Gpt/Message/Content/ImageUrlNormalizer.php create mode 100644 src/platform/src/Bridge/OpenAi/Contract/Gpt/Message/Content/TextNormalizer.php create mode 100644 src/platform/tests/Bridge/OpenAi/Contract/Gpt/Message/Content/DocumentNormalizerTest.php create mode 100644 src/platform/tests/Bridge/OpenAi/Contract/Gpt/Message/Content/ImageNormalizerTest.php create mode 100644 src/platform/tests/Bridge/OpenAi/Contract/Gpt/Message/Content/ImageUrlNormalizerTest.php create mode 100644 src/platform/tests/Bridge/OpenAi/Contract/Gpt/Message/Content/TextNormalizerTest.php diff --git a/src/platform/src/Bridge/OpenAi/Contract/Gpt/Message/Content/DocumentNormalizer.php b/src/platform/src/Bridge/OpenAi/Contract/Gpt/Message/Content/DocumentNormalizer.php new file mode 100644 index 000000000..9bcb6d0d7 --- /dev/null +++ b/src/platform/src/Bridge/OpenAi/Contract/Gpt/Message/Content/DocumentNormalizer.php @@ -0,0 +1,49 @@ + + * + * For the full copyright and license information, please view the LICENSE + * file that was distributed with this source code. + */ + +namespace Symfony\AI\Platform\Bridge\OpenAi\Contract\Gpt\Message\Content; + +use Symfony\AI\Platform\Bridge\OpenAi\Gpt; +use Symfony\AI\Platform\Capability; +use Symfony\AI\Platform\Contract\Normalizer\ModelContractNormalizer; +use Symfony\AI\Platform\Message\Content\Document; +use Symfony\AI\Platform\Message\Content\File; +use Symfony\AI\Platform\Model; + +/** + * @author Guillermo Lengemann + */ +class DocumentNormalizer extends ModelContractNormalizer +{ + /** + * @param File $data + * + * @return array{type: 'input_file', filename: string, file_data: string} + */ + public function normalize(mixed $data, ?string $format = null, array $context = []): array + { + return [ + 'type' => 'input_file', + 'filename' => $data->getFilename(), + 'file_data' => $data->asDataUrl(), + ]; + } + + protected function supportedDataClass(): string + { + return Document::class; + } + + protected function supportsModel(Model $model): bool + { + return $model instanceof Gpt && $model->supports(Capability::INPUT_PDF); + } +} diff --git a/src/platform/src/Bridge/OpenAi/Contract/Gpt/Message/Content/ImageNormalizer.php b/src/platform/src/Bridge/OpenAi/Contract/Gpt/Message/Content/ImageNormalizer.php new file mode 100644 index 000000000..21e602618 --- /dev/null +++ b/src/platform/src/Bridge/OpenAi/Contract/Gpt/Message/Content/ImageNormalizer.php @@ -0,0 +1,50 @@ + + * + * For the full copyright and license information, please view the LICENSE + * file that was distributed with this source code. + */ + +namespace Symfony\AI\Platform\Bridge\OpenAi\Contract\Gpt\Message\Content; + +use Symfony\AI\Platform\Bridge\OpenAi\Gpt; +use Symfony\AI\Platform\Capability; +use Symfony\AI\Platform\Contract\Normalizer\ModelContractNormalizer; +use Symfony\AI\Platform\Message\Content\Image; +use Symfony\AI\Platform\Model; + +/** + * See: https://platform.openai.com/docs/guides/images-vision#giving-a-model-images-as-input. + */ +final class ImageNormalizer extends ModelContractNormalizer +{ + /** + * @param Image $data + * + * @return array{ + * type: 'input_image', + * image_url: string + * } + */ + public function normalize(mixed $data, ?string $format = null, array $context = []): array + { + return [ + 'type' => 'input_image', + 'image_url' => $data->asDataUrl(), + ]; + } + + protected function supportedDataClass(): string + { + return Image::class; + } + + protected function supportsModel(Model $model): bool + { + return $model instanceof Gpt && $model->supports(Capability::INPUT_IMAGE); + } +} diff --git a/src/platform/src/Bridge/OpenAi/Contract/Gpt/Message/Content/ImageUrlNormalizer.php b/src/platform/src/Bridge/OpenAi/Contract/Gpt/Message/Content/ImageUrlNormalizer.php new file mode 100644 index 000000000..80212215c --- /dev/null +++ b/src/platform/src/Bridge/OpenAi/Contract/Gpt/Message/Content/ImageUrlNormalizer.php @@ -0,0 +1,50 @@ + + * + * For the full copyright and license information, please view the LICENSE + * file that was distributed with this source code. + */ + +namespace Symfony\AI\Platform\Bridge\OpenAi\Contract\Gpt\Message\Content; + +use Symfony\AI\Platform\Bridge\OpenAi\Gpt; +use Symfony\AI\Platform\Capability; +use Symfony\AI\Platform\Contract\Normalizer\ModelContractNormalizer; +use Symfony\AI\Platform\Message\Content\ImageUrl; +use Symfony\AI\Platform\Model; + +/** + * See: https://platform.openai.com/docs/guides/images-vision#giving-a-model-images-as-input. + */ +final class ImageUrlNormalizer extends ModelContractNormalizer +{ + /** + * @param ImageUrl $data + * + * @return array{ + * type: 'input_image', + * image_url: string + * } + */ + public function normalize(mixed $data, ?string $format = null, array $context = []): array + { + return [ + 'type' => 'input_image', + 'image_url' => $data->getUrl(), + ]; + } + + protected function supportedDataClass(): string + { + return ImageUrl::class; + } + + protected function supportsModel(Model $model): bool + { + return $model instanceof Gpt && $model->supports(Capability::INPUT_IMAGE); + } +} diff --git a/src/platform/src/Bridge/OpenAi/Contract/Gpt/Message/Content/TextNormalizer.php b/src/platform/src/Bridge/OpenAi/Contract/Gpt/Message/Content/TextNormalizer.php new file mode 100644 index 000000000..f48a56fab --- /dev/null +++ b/src/platform/src/Bridge/OpenAi/Contract/Gpt/Message/Content/TextNormalizer.php @@ -0,0 +1,49 @@ + + * + * For the full copyright and license information, please view the LICENSE + * file that was distributed with this source code. + */ + +namespace Symfony\AI\Platform\Bridge\OpenAi\Contract\Gpt\Message\Content; + +use Symfony\AI\Platform\Bridge\OpenAi\Gpt; +use Symfony\AI\Platform\Contract\Normalizer\ModelContractNormalizer; +use Symfony\AI\Platform\Message\Content\Text; +use Symfony\AI\Platform\Model; + +/** + * See: https://platform.openai.com/docs/guides/images-vision#giving-a-model-images-as-input. + */ +final class TextNormalizer extends ModelContractNormalizer +{ + /** + * @param Text $data + * + * @return array{ + * type: 'input_text', + * text: string + * } + */ + public function normalize(mixed $data, ?string $format = null, array $context = []): array + { + return [ + 'type' => 'input_text', + 'text' => $data->getText(), + ]; + } + + protected function supportedDataClass(): string + { + return Text::class; + } + + protected function supportsModel(Model $model): bool + { + return $model instanceof Gpt; + } +} diff --git a/src/platform/tests/Bridge/OpenAi/Contract/Gpt/Message/Content/DocumentNormalizerTest.php b/src/platform/tests/Bridge/OpenAi/Contract/Gpt/Message/Content/DocumentNormalizerTest.php new file mode 100644 index 000000000..4ba524b15 --- /dev/null +++ b/src/platform/tests/Bridge/OpenAi/Contract/Gpt/Message/Content/DocumentNormalizerTest.php @@ -0,0 +1,61 @@ + + * + * For the full copyright and license information, please view the LICENSE + * file that was distributed with this source code. + */ + +namespace Bridge\OpenAi\Contract\Gpt\Message\Content; + +use PHPUnit\Framework\Attributes\DataProvider; +use PHPUnit\Framework\TestCase; +use Symfony\AI\Platform\Bridge\Gemini\Gemini; +use Symfony\AI\Platform\Bridge\OpenAi\Contract\Gpt\Message\Content\DocumentNormalizer; +use Symfony\AI\Platform\Bridge\OpenAi\Gpt; +use Symfony\AI\Platform\Capability; +use Symfony\AI\Platform\Contract; +use Symfony\AI\Platform\Message\Content\Document; +use Symfony\AI\Platform\Message\Content\Text; +use Symfony\AI\Platform\Model; + +class DocumentNormalizerTest extends TestCase +{ + public function testNormalize() + { + $doc = Document::fromFile(\dirname(__DIR__, 9).'/fixtures/document.pdf'); + $actual = (new DocumentNormalizer())->normalize($doc, null, [Contract::CONTEXT_MODEL => new Gpt('o3')]); + + $this->assertEquals([ + 'type' => 'input_file', + 'filename' => $doc->getFilename(), + 'file_data' => $doc->asDataUrl(), + ], $actual); + } + + #[DataProvider('supportsNormalizationProvider')] + public function testSupportsNormalization(mixed $data, Model $model, bool $expected) + { + $this->assertSame( + $expected, + (new DocumentNormalizer())->supportsNormalization($data, null, [Contract::CONTEXT_MODEL => $model]) + ); + } + + public static function supportsNormalizationProvider(): \Generator + { + $doc = Document::fromFile(\dirname(__DIR__, 9).'/fixtures/document.pdf'); + $gpt = new Gpt('o3', [Capability::INPUT_PDF]); + + yield 'supported' => [$doc, $gpt, true]; + + yield 'unsupported model' => [$doc, new Gemini('foo', [Capability::INPUT_PDF]), false]; + + yield 'model lacks image input capability' => [$doc, new Gpt('o3'), false]; + + yield 'unsupported data' => [new Text('foo'), $gpt, false]; + } +} diff --git a/src/platform/tests/Bridge/OpenAi/Contract/Gpt/Message/Content/ImageNormalizerTest.php b/src/platform/tests/Bridge/OpenAi/Contract/Gpt/Message/Content/ImageNormalizerTest.php new file mode 100644 index 000000000..07752fc9d --- /dev/null +++ b/src/platform/tests/Bridge/OpenAi/Contract/Gpt/Message/Content/ImageNormalizerTest.php @@ -0,0 +1,60 @@ + + * + * For the full copyright and license information, please view the LICENSE + * file that was distributed with this source code. + */ + +namespace Bridge\OpenAi\Contract\Gpt\Message\Content; + +use PHPUnit\Framework\Attributes\DataProvider; +use PHPUnit\Framework\TestCase; +use Symfony\AI\Platform\Bridge\Gemini\Gemini; +use Symfony\AI\Platform\Bridge\OpenAi\Contract\Gpt\Message\Content\ImageNormalizer; +use Symfony\AI\Platform\Bridge\OpenAi\Gpt; +use Symfony\AI\Platform\Capability; +use Symfony\AI\Platform\Contract; +use Symfony\AI\Platform\Message\Content\Image; +use Symfony\AI\Platform\Message\Content\Text; +use Symfony\AI\Platform\Model; + +class ImageNormalizerTest extends TestCase +{ + public function testNormalize() + { + $image = Image::fromFile(\dirname(__DIR__, 9).'/fixtures/image.jpg'); + $actual = (new ImageNormalizer())->normalize($image, null, [Contract::CONTEXT_MODEL => new Gpt('o3')]); + + $this->assertEquals([ + 'type' => 'input_image', + 'image_url' => $image->asDataUrl(), + ], $actual); + } + + #[DataProvider('supportsNormalizationProvider')] + public function testSupportsNormalization(mixed $data, Model $model, bool $expected) + { + $this->assertSame( + $expected, + (new ImageNormalizer())->supportsNormalization($data, null, [Contract::CONTEXT_MODEL => $model]) + ); + } + + public static function supportsNormalizationProvider(): \Generator + { + $image = Image::fromFile(\dirname(__DIR__, 9).'/fixtures/image.jpg'); + $gpt = new Gpt('o3', [Capability::INPUT_IMAGE]); + + yield 'supported' => [$image, $gpt, true]; + + yield 'unsupported model' => [$image, new Gemini('foo', [Capability::INPUT_IMAGE]), false]; + + yield 'model lacks image input capability' => [$image, new Gpt('o3'), false]; + + yield 'unsupported data' => [new Text('foo'), $gpt, false]; + } +} diff --git a/src/platform/tests/Bridge/OpenAi/Contract/Gpt/Message/Content/ImageUrlNormalizerTest.php b/src/platform/tests/Bridge/OpenAi/Contract/Gpt/Message/Content/ImageUrlNormalizerTest.php new file mode 100644 index 000000000..60ae8fe94 --- /dev/null +++ b/src/platform/tests/Bridge/OpenAi/Contract/Gpt/Message/Content/ImageUrlNormalizerTest.php @@ -0,0 +1,60 @@ + + * + * For the full copyright and license information, please view the LICENSE + * file that was distributed with this source code. + */ + +namespace Symfony\AI\Platform\Tests\Bridge\OpenAi\Contract\Gpt\Message\Content; + +use PHPUnit\Framework\Attributes\DataProvider; +use PHPUnit\Framework\TestCase; +use Symfony\AI\Platform\Bridge\Gemini\Gemini; +use Symfony\AI\Platform\Bridge\OpenAi\Contract\Gpt\Message\Content\ImageUrlNormalizer; +use Symfony\AI\Platform\Bridge\OpenAi\Gpt; +use Symfony\AI\Platform\Capability; +use Symfony\AI\Platform\Contract; +use Symfony\AI\Platform\Message\Content\ImageUrl; +use Symfony\AI\Platform\Message\Content\Text; +use Symfony\AI\Platform\Model; + +class ImageUrlNormalizerTest extends TestCase +{ + public function testNormalize() + { + $url = new ImageUrl('https://example.com/image.jpg'); + $actual = (new ImageUrlNormalizer())->normalize($url, null, [Contract::CONTEXT_MODEL => new Gpt('o3')]); + + $this->assertEquals([ + 'type' => 'input_image', + 'image_url' => $url->getUrl(), + ], $actual); + } + + #[DataProvider('supportsNormalizationProvider')] + public function testSupportsNormalization(mixed $data, Model $model, bool $expected) + { + $this->assertSame( + $expected, + (new ImageUrlNormalizer())->supportsNormalization($data, null, [Contract::CONTEXT_MODEL => $model]) + ); + } + + public static function supportsNormalizationProvider(): \Generator + { + $url = new ImageUrl('https://example.com/image.jpg'); + $gpt = new Gpt('o3', [Capability::INPUT_IMAGE]); + + yield 'supported' => [$url, $gpt, true]; + + yield 'unsupported model' => [$url, new Gemini('foo', [Capability::INPUT_IMAGE]), false]; + + yield 'model lacks image input capability' => [$url, new Gpt('o3'), false]; + + yield 'unsupported data' => [new Text('foo'), $gpt, false]; + } +} diff --git a/src/platform/tests/Bridge/OpenAi/Contract/Gpt/Message/Content/TextNormalizerTest.php b/src/platform/tests/Bridge/OpenAi/Contract/Gpt/Message/Content/TextNormalizerTest.php new file mode 100644 index 000000000..2527638b6 --- /dev/null +++ b/src/platform/tests/Bridge/OpenAi/Contract/Gpt/Message/Content/TextNormalizerTest.php @@ -0,0 +1,56 @@ + + * + * For the full copyright and license information, please view the LICENSE + * file that was distributed with this source code. + */ + +namespace Symfony\AI\Platform\Tests\Bridge\OpenAi\Contract\Gpt\Message\Content; + +use PHPUnit\Framework\Attributes\DataProvider; +use PHPUnit\Framework\TestCase; +use Symfony\AI\Platform\Bridge\Gemini\Gemini; +use Symfony\AI\Platform\Bridge\OpenAi\Contract\Gpt\Message\Content\TextNormalizer; +use Symfony\AI\Platform\Bridge\OpenAi\Gpt; +use Symfony\AI\Platform\Contract; +use Symfony\AI\Platform\Message\Content\Text; +use Symfony\AI\Platform\Model; + +class TextNormalizerTest extends TestCase +{ + public function testNormalize() + { + $text = new Text('Foo'); + $actual = (new TextNormalizer())->normalize($text, null, [Contract::CONTEXT_MODEL => new Gpt('o3')]); + + $this->assertEquals([ + 'type' => 'input_text', + 'text' => $text->getText(), + ], $actual); + } + + #[DataProvider('supportsNormalizationProvider')] + public function testSupportsNormalization(mixed $data, Model $model, bool $expected) + { + $this->assertSame( + $expected, + (new TextNormalizer())->supportsNormalization($data, null, [Contract::CONTEXT_MODEL => $model]) + ); + } + + public static function supportsNormalizationProvider(): \Generator + { + $text = new Text('Foo'); + $gpt = new Gpt('o3'); + + yield 'supported' => [$text, $gpt, true]; + + yield 'unsupported model' => [$text, new Gemini('foo'), false]; + + yield 'unsupported data' => [[], $gpt, false]; + } +} From 1abdea1b07f718dff9269b4a9323db0e40f5f3ca Mon Sep 17 00:00:00 2001 From: Pauline Vos Date: Tue, 11 Nov 2025 16:19:49 +0100 Subject: [PATCH 5/7] Create tool (call) normalizers for Responses API Since we're migration from chat completions to Responses, which has a differnt data structure for function calls --- .../Contract/Gpt/ToolCallNormalizer.php | 50 +++++++++++++ .../OpenAi/Contract/Gpt/ToolNormalizer.php | 61 ++++++++++++++++ .../Contract/Gpt/ToolCallNormalizerTest.php | 48 +++++++++++++ .../Contract/Gpt/ToolNormalizerTest.php | 71 +++++++++++++++++++ 4 files changed, 230 insertions(+) create mode 100644 src/platform/src/Bridge/OpenAi/Contract/Gpt/ToolCallNormalizer.php create mode 100644 src/platform/src/Bridge/OpenAi/Contract/Gpt/ToolNormalizer.php create mode 100644 src/platform/tests/Bridge/OpenAi/Contract/Gpt/ToolCallNormalizerTest.php create mode 100644 src/platform/tests/Bridge/OpenAi/Contract/Gpt/ToolNormalizerTest.php diff --git a/src/platform/src/Bridge/OpenAi/Contract/Gpt/ToolCallNormalizer.php b/src/platform/src/Bridge/OpenAi/Contract/Gpt/ToolCallNormalizer.php new file mode 100644 index 000000000..b87059430 --- /dev/null +++ b/src/platform/src/Bridge/OpenAi/Contract/Gpt/ToolCallNormalizer.php @@ -0,0 +1,50 @@ + + * + * For the full copyright and license information, please view the LICENSE + * file that was distributed with this source code. + */ + +namespace Symfony\AI\Platform\Bridge\OpenAi\Contract\Gpt; + +use Symfony\AI\Platform\Bridge\OpenAi\Gpt; +use Symfony\AI\Platform\Contract\Normalizer\ModelContractNormalizer; +use Symfony\AI\Platform\Model; +use Symfony\AI\Platform\Result\ToolCall; + +final class ToolCallNormalizer extends ModelContractNormalizer +{ + /** + * @param ToolCall $data + * + * @return array{ + * arguments: string, + * call_id: string, + * name: string, + * type: 'function_call' + * } + */ + public function normalize(mixed $data, ?string $format = null, array $context = []): array + { + return [ + 'arguments' => json_encode($data->getArguments()), + 'call_id' => $data->getId(), + 'name' => $data->getName(), + 'type' => 'function_call', + ]; + } + + protected function supportedDataClass(): string + { + return ToolCall::class; + } + + protected function supportsModel(Model $model): bool + { + return $model instanceof Gpt; + } +} diff --git a/src/platform/src/Bridge/OpenAi/Contract/Gpt/ToolNormalizer.php b/src/platform/src/Bridge/OpenAi/Contract/Gpt/ToolNormalizer.php new file mode 100644 index 000000000..191658f5c --- /dev/null +++ b/src/platform/src/Bridge/OpenAi/Contract/Gpt/ToolNormalizer.php @@ -0,0 +1,61 @@ + + * + * For the full copyright and license information, please view the LICENSE + * file that was distributed with this source code. + */ + +namespace Symfony\AI\Platform\Bridge\OpenAi\Contract\Gpt; + +use Symfony\AI\Platform\Bridge\OpenAi\Gpt; +use Symfony\AI\Platform\Contract\JsonSchema\Factory; +use Symfony\AI\Platform\Contract\Normalizer\ModelContractNormalizer; +use Symfony\AI\Platform\Model; +use Symfony\AI\Platform\Tool\Tool; + +/** + * @phpstan-import-type JsonSchema from Factory + * + * @author Christopher Hertel + */ +class ToolNormalizer extends ModelContractNormalizer +{ + /** + * @param Tool $data + * + * @return array{ + * type: 'function', + * name: string, + * description: string, + * parameters?: JsonSchema + * } + */ + public function normalize(mixed $data, ?string $format = null, array $context = []): array + { + $function = [ + 'type' => 'function', + 'name' => $data->getName(), + 'description' => $data->getDescription(), + ]; + + if (null !== $data->getParameters()) { + $function['parameters'] = $data->getParameters(); + } + + return $function; + } + + protected function supportedDataClass(): string + { + return Tool::class; + } + + protected function supportsModel(Model $model): bool + { + return $model instanceof Gpt; + } +} diff --git a/src/platform/tests/Bridge/OpenAi/Contract/Gpt/ToolCallNormalizerTest.php b/src/platform/tests/Bridge/OpenAi/Contract/Gpt/ToolCallNormalizerTest.php new file mode 100644 index 000000000..afbf4954f --- /dev/null +++ b/src/platform/tests/Bridge/OpenAi/Contract/Gpt/ToolCallNormalizerTest.php @@ -0,0 +1,48 @@ + 24]); + + $actual = (new ToolCallNormalizer())->normalize($toolCall, null, [Contract::CONTEXT_MODEL => new Gpt('o3')]); + $this->assertEquals([ + 'arguments' => json_encode($toolCall->getArguments()), + 'call_id' => $toolCall->getId(), + 'name' => $toolCall->getName(), + 'type' => 'function_call', + ], $actual); + } + + #[DataProvider('supportsNormalizationProvider')] + public function testSupportsNormalization(mixed $data, Model $model, bool $expected) + { + $this->assertSame( + $expected, + (new ToolCallNormalizer())->supportsNormalization($data, null, [Contract::CONTEXT_MODEL => $model]) + ); + } + + public static function supportsNormalizationProvider(): \Generator + { + $toolCall = new ToolCall('some-id', 'roll-die', ['sides' => 24]); + $gpt = new Gpt('o3'); + + yield 'supported' => [$toolCall, $gpt, true]; + yield 'unsupported model' => [$toolCall, new Gemini('foo'), false]; + yield 'unsupported data' => [new Text('foo'), $gpt, false]; + } +} diff --git a/src/platform/tests/Bridge/OpenAi/Contract/Gpt/ToolNormalizerTest.php b/src/platform/tests/Bridge/OpenAi/Contract/Gpt/ToolNormalizerTest.php new file mode 100644 index 000000000..e30840ea9 --- /dev/null +++ b/src/platform/tests/Bridge/OpenAi/Contract/Gpt/ToolNormalizerTest.php @@ -0,0 +1,71 @@ + + * + * For the full copyright and license information, please view the LICENSE + * file that was distributed with this source code. + */ + +namespace Symfony\AI\Platform\Tests\Bridge\OpenAi\Contract\Gpt; + +use PHPUnit\Framework\Attributes\DataProvider; +use PHPUnit\Framework\TestCase; +use Symfony\AI\Platform\Bridge\Gemini\Gemini; +use Symfony\AI\Platform\Bridge\OpenAi\Contract\Gpt\ToolNormalizer; +use Symfony\AI\Platform\Bridge\OpenAi\Gpt; +use Symfony\AI\Platform\Contract; +use Symfony\AI\Platform\Message\Content\Text; +use Symfony\AI\Platform\Model; +use Symfony\AI\Platform\Tool\ExecutionReference; +use Symfony\AI\Platform\Tool\Tool; + +class ToolNormalizerTest extends TestCase +{ + #[DataProvider('normalizeProvider')] + public function testNormalize(array $expected, Tool $tool) + { + $actual = (new ToolNormalizer())->normalize($tool, null, [Contract::CONTEXT_MODEL => new Gpt('o3')]); + $this->assertEquals($expected, $actual); + } + + public static function normalizeProvider(): \Generator + { + $tool = new Tool(new ExecutionReference('Foo\Bar'), 'bar', 'description'); + + $expected = [ + 'type' => 'function', + 'name' => $tool->getName(), + 'description' => $tool->getDescription(), + ]; + + $parameters = ['foo' => 'bar']; + + yield 'no parameters' => [$expected, $tool]; + yield 'with parameters' => [ + array_merge($expected, ['parameters' => $parameters]), + new Tool(new ExecutionReference('Foo\Bar'), 'bar', 'description', $parameters), + ]; + } + + #[DataProvider('supportsNormalizationProvider')] + public function testSupportsNormalization(mixed $data, Model $model, bool $expected) + { + $this->assertSame( + $expected, + (new ToolNormalizer())->supportsNormalization($data, null, [Contract::CONTEXT_MODEL => $model]) + ); + } + + public static function supportsNormalizationProvider(): \Generator + { + $tool = new Tool(new ExecutionReference('Foo\Bar'), 'bar', 'description'); + $gpt = new Gpt('o3'); + + yield 'supported' => [$tool, $gpt, true]; + yield 'unsupported model' => [$tool, new Gemini('foo'), false]; + yield 'unsupported data' => [new Text('foo'), $gpt, false]; + } +} From 22e70743e039eecd42334408f00b139d44f5de52 Mon Sep 17 00:00:00 2001 From: Pauline Vos Date: Tue, 11 Nov 2025 16:19:49 +0100 Subject: [PATCH 6/7] Create Responses normalizers for message types To migrate from OpenAI's chat completions to Responses, we need to change incoming messages to the updated data structure --- .../Message/AssistantMessageNormalizer.php | 59 ++++++++++ .../Gpt/Message/MessageBagNormalizer.php | 72 ++++++++++++ .../Gpt/Message/ToolCallMessageNormalizer.php | 54 +++++++++ src/platform/src/Message/MessageBag.php | 43 +++++++ .../AssistantMessageNormalizerTest.php | 84 +++++++++++++ .../Gpt/Message/MessageBagNormalizerTest.php | 110 ++++++++++++++++++ .../Message/ToolCallMessageNormalizerTest.php | 61 ++++++++++ .../Contract/Gpt/ToolCallNormalizerTest.php | 11 +- .../Contract/Gpt/ToolNormalizerTest.php | 12 +- 9 files changed, 504 insertions(+), 2 deletions(-) create mode 100644 src/platform/src/Bridge/OpenAi/Contract/Gpt/Message/AssistantMessageNormalizer.php create mode 100644 src/platform/src/Bridge/OpenAi/Contract/Gpt/Message/MessageBagNormalizer.php create mode 100644 src/platform/src/Bridge/OpenAi/Contract/Gpt/Message/ToolCallMessageNormalizer.php create mode 100644 src/platform/tests/Bridge/OpenAi/Contract/Gpt/Message/AssistantMessageNormalizerTest.php create mode 100644 src/platform/tests/Bridge/OpenAi/Contract/Gpt/Message/MessageBagNormalizerTest.php create mode 100644 src/platform/tests/Bridge/OpenAi/Contract/Gpt/Message/ToolCallMessageNormalizerTest.php diff --git a/src/platform/src/Bridge/OpenAi/Contract/Gpt/Message/AssistantMessageNormalizer.php b/src/platform/src/Bridge/OpenAi/Contract/Gpt/Message/AssistantMessageNormalizer.php new file mode 100644 index 000000000..58cb2b08f --- /dev/null +++ b/src/platform/src/Bridge/OpenAi/Contract/Gpt/Message/AssistantMessageNormalizer.php @@ -0,0 +1,59 @@ + + * + * For the full copyright and license information, please view the LICENSE + * file that was distributed with this source code. + */ + +namespace Symfony\AI\Platform\Bridge\OpenAi\Contract\Gpt\Message; + +use Symfony\AI\Platform\Bridge\OpenAi\Gpt; +use Symfony\AI\Platform\Contract\Normalizer\ModelContractNormalizer; +use Symfony\AI\Platform\Message\AssistantMessage; +use Symfony\AI\Platform\Model; +use Symfony\Component\Serializer\Normalizer\NormalizerAwareInterface; +use Symfony\Component\Serializer\Normalizer\NormalizerAwareTrait; + +/** + * @author Guillermo Lengemann + */ +final class AssistantMessageNormalizer extends ModelContractNormalizer implements NormalizerAwareInterface +{ + use NormalizerAwareTrait; + + /** + * @param AssistantMessage $data + * + * @return array{ + * role: 'assistant', + * type: 'message', + * content: ?string + * } + */ + public function normalize(mixed $data, ?string $format = null, array $context = []): array + { + if ($data->hasToolCalls()) { + return $this->normalizer->normalize($data->getToolCalls(), $format, $context); + } + + return [ + 'role' => $data->getRole()->value, + 'type' => 'message', + 'content' => $data->getContent(), + ]; + } + + protected function supportedDataClass(): string + { + return AssistantMessage::class; + } + + protected function supportsModel(Model $model): bool + { + return $model instanceof Gpt; + } +} diff --git a/src/platform/src/Bridge/OpenAi/Contract/Gpt/Message/MessageBagNormalizer.php b/src/platform/src/Bridge/OpenAi/Contract/Gpt/Message/MessageBagNormalizer.php new file mode 100644 index 000000000..72f593fea --- /dev/null +++ b/src/platform/src/Bridge/OpenAi/Contract/Gpt/Message/MessageBagNormalizer.php @@ -0,0 +1,72 @@ + + * + * For the full copyright and license information, please view the LICENSE + * file that was distributed with this source code. + */ + +namespace Symfony\AI\Platform\Bridge\OpenAi\Contract\Gpt\Message; + +use Symfony\AI\Platform\Bridge\OpenAi\Gpt; +use Symfony\AI\Platform\Contract\Normalizer\ModelContractNormalizer; +use Symfony\AI\Platform\Message\AssistantMessage; +use Symfony\AI\Platform\Message\MessageBag; +use Symfony\AI\Platform\Model; +use Symfony\Component\Serializer\Exception\ExceptionInterface; +use Symfony\Component\Serializer\Normalizer\NormalizerAwareInterface; +use Symfony\Component\Serializer\Normalizer\NormalizerAwareTrait; + +/** + * @author Pauline Vos + */ +final class MessageBagNormalizer extends ModelContractNormalizer implements NormalizerAwareInterface +{ + use NormalizerAwareTrait; + + /** + * @param MessageBag $data + * + * @return array{ + * input: array, + * model?: string, + * system?: string, + * } + * + * @throws ExceptionInterface + */ + public function normalize(mixed $data, ?string $format = null, array $context = []): array + { + $messages['input'] = []; + + foreach ($data->withoutSystemMessage()->getMessages() as $message) { + $normalized = $this->normalizer->normalize($message, $format, $context); + + if ($message instanceof AssistantMessage && $message->hasToolCalls()) { + $messages['input'] = array_merge($messages['input'], $normalized); + continue; + } + + $messages['input'][] = $normalized; + } + + if ($data->getSystemMessage()) { + $messages['instructions'] = $data->getSystemMessage()->getContent(); + } + + return $messages; + } + + protected function supportedDataClass(): string + { + return MessageBag::class; + } + + protected function supportsModel(Model $model): bool + { + return $model instanceof Gpt; + } +} diff --git a/src/platform/src/Bridge/OpenAi/Contract/Gpt/Message/ToolCallMessageNormalizer.php b/src/platform/src/Bridge/OpenAi/Contract/Gpt/Message/ToolCallMessageNormalizer.php new file mode 100644 index 000000000..107e8bbc1 --- /dev/null +++ b/src/platform/src/Bridge/OpenAi/Contract/Gpt/Message/ToolCallMessageNormalizer.php @@ -0,0 +1,54 @@ + + * + * For the full copyright and license information, please view the LICENSE + * file that was distributed with this source code. + */ + +namespace Symfony\AI\Platform\Bridge\OpenAi\Contract\Gpt\Message; + +use Symfony\AI\Platform\Bridge\OpenAi\Gpt; +use Symfony\AI\Platform\Contract\Normalizer\ModelContractNormalizer; +use Symfony\AI\Platform\Message\ToolCallMessage; +use Symfony\AI\Platform\Model; +use Symfony\Component\Serializer\Normalizer\NormalizerAwareTrait; + +/** + * @author Christopher Hertel + */ +final class ToolCallMessageNormalizer extends ModelContractNormalizer +{ + use NormalizerAwareTrait; + + /** + * @param ToolCallMessage $data + * + * @return array{ + * type: 'function_call_output', + * call_id: string, + * output: string + * } + */ + public function normalize(mixed $data, ?string $format = null, array $context = []): array + { + return [ + 'type' => 'function_call_output', + 'call_id' => $data->getToolCall()->getId(), + 'output' => $data->getContent(), + ]; + } + + protected function supportedDataClass(): string + { + return ToolCallMessage::class; + } + + protected function supportsModel(Model $model): bool + { + return $model instanceof Gpt; + } +} diff --git a/src/platform/src/Message/MessageBag.php b/src/platform/src/Message/MessageBag.php index c480f107f..0619b9284 100644 --- a/src/platform/src/Message/MessageBag.php +++ b/src/platform/src/Message/MessageBag.php @@ -54,6 +54,16 @@ public function getSystemMessage(): ?SystemMessage return null; } + /** + * @return AssistantMessage[] + */ + public function getAssistantMessages(): array + { + return array_filter($this->messages, function (MessageInterface $message) { + return $message instanceof AssistantMessage; + }); + } + public function getUserMessage(): ?UserMessage { foreach ($this->messages as $message) { @@ -92,6 +102,39 @@ public function withoutSystemMessage(): self return $messages; } + /** + * @return ToolCallMessage[] + */ + public function getToolCallMessages(): array + { + return array_filter( + $this->messages, + static fn (MessageInterface $message) => $message instanceof ToolCallMessage, + ); + } + + public function withoutToolCallMessages(): self + { + $messages = clone $this; + $messages->messages = array_values(array_filter( + $messages->messages, + static fn (MessageInterface $message) => !$message instanceof ToolCallMessage, + )); + + return $messages; + } + + public function withoutAssistantMessages(): self + { + $messages = clone $this; + $messages->messages = array_values(array_filter( + $messages->messages, + static fn (MessageInterface $message) => !$message instanceof AssistantMessage, + )); + + return $messages; + } + public function prepend(MessageInterface $message): self { $messages = clone $this; diff --git a/src/platform/tests/Bridge/OpenAi/Contract/Gpt/Message/AssistantMessageNormalizerTest.php b/src/platform/tests/Bridge/OpenAi/Contract/Gpt/Message/AssistantMessageNormalizerTest.php new file mode 100644 index 000000000..b8c401a81 --- /dev/null +++ b/src/platform/tests/Bridge/OpenAi/Contract/Gpt/Message/AssistantMessageNormalizerTest.php @@ -0,0 +1,84 @@ + + * + * For the full copyright and license information, please view the LICENSE + * file that was distributed with this source code. + */ + +namespace Symfony\AI\Platform\Tests\Bridge\OpenAi\Contract\Gpt\Message; + +use PHPUnit\Framework\Attributes\DataProvider; +use PHPUnit\Framework\TestCase; +use Symfony\AI\Platform\Bridge\Gemini\Gemini; +use Symfony\AI\Platform\Bridge\OpenAi\Contract\Gpt\Message\AssistantMessageNormalizer; +use Symfony\AI\Platform\Bridge\OpenAi\Contract\Gpt\ToolCallNormalizer; +use Symfony\AI\Platform\Bridge\OpenAi\Gpt; +use Symfony\AI\Platform\Contract; +use Symfony\AI\Platform\Message\AssistantMessage; +use Symfony\AI\Platform\Message\Content\Text; +use Symfony\AI\Platform\Message\Message; +use Symfony\AI\Platform\Model; +use Symfony\AI\Platform\Result\ToolCall; +use Symfony\Component\Serializer\Serializer; + +class AssistantMessageNormalizerTest extends TestCase +{ + #[DataProvider('normalizeProvider')] + public function testNormalize(AssistantMessage $message, array $expected) + { + $normalizer = new AssistantMessageNormalizer(); + $normalizer->setNormalizer(new Serializer([new ToolCallNormalizer()])); + + $actual = $normalizer->normalize($message, null, [Contract::CONTEXT_MODEL => new Gpt('o3')]); + $this->assertEquals($expected, $actual); + } + + public static function normalizeProvider(): \Generator + { + $message = Message::ofAssistant('Foo'); + yield 'without tool calls' => [ + $message, + [ + 'role' => 'assistant', + 'type' => 'message', + 'content' => 'Foo', + ], + ]; + + $toolCall = new ToolCall('some-id', 'roll-die', ['sides' => 24]); + yield 'with tool calls' => [ + Message::ofAssistant(null, [$toolCall]), + [ + [ + 'arguments' => json_encode($toolCall->getArguments()), + 'call_id' => $toolCall->getId(), + 'name' => $toolCall->getName(), + 'type' => 'function_call', + ], + ], + ]; + } + + #[DataProvider('supportsNormalizationProvider')] + public function testSupportsNormalization(mixed $data, Model $model, bool $expected) + { + $this->assertSame( + $expected, + (new AssistantMessageNormalizer())->supportsNormalization($data, null, [Contract::CONTEXT_MODEL => $model]) + ); + } + + public static function supportsNormalizationProvider(): \Generator + { + $assistantMessage = Message::ofAssistant('Foo'); + $gpt = new Gpt('o3'); + + yield 'supported' => [$assistantMessage, $gpt, true]; + yield 'unsupported model' => [$assistantMessage, new Gemini('foo'), false]; + yield 'unsupported data' => [new Text('foo'), $gpt, false]; + } +} diff --git a/src/platform/tests/Bridge/OpenAi/Contract/Gpt/Message/MessageBagNormalizerTest.php b/src/platform/tests/Bridge/OpenAi/Contract/Gpt/Message/MessageBagNormalizerTest.php new file mode 100644 index 000000000..019220c8c --- /dev/null +++ b/src/platform/tests/Bridge/OpenAi/Contract/Gpt/Message/MessageBagNormalizerTest.php @@ -0,0 +1,110 @@ + + * + * For the full copyright and license information, please view the LICENSE + * file that was distributed with this source code. + */ + +namespace Symfony\AI\Platform\Tests\Bridge\OpenAi\Contract\Gpt\Message; + +use PHPUnit\Framework\Attributes\DataProvider; +use PHPUnit\Framework\TestCase; +use Symfony\AI\Platform\Bridge\Gemini\Gemini; +use Symfony\AI\Platform\Bridge\OpenAi\Contract\Gpt\Message\AssistantMessageNormalizer; +use Symfony\AI\Platform\Bridge\OpenAi\Contract\Gpt\Message\MessageBagNormalizer; +use Symfony\AI\Platform\Bridge\OpenAi\Contract\Gpt\Message\ToolCallMessageNormalizer; +use Symfony\AI\Platform\Bridge\OpenAi\Contract\Gpt\ToolCallNormalizer; +use Symfony\AI\Platform\Bridge\OpenAi\Contract\Gpt\ToolNormalizer; +use Symfony\AI\Platform\Bridge\OpenAi\Gpt; +use Symfony\AI\Platform\Contract; +use Symfony\AI\Platform\Message\Content\Text; +use Symfony\AI\Platform\Message\Message; +use Symfony\AI\Platform\Message\MessageBag; +use Symfony\AI\Platform\Model; +use Symfony\AI\Platform\Result\ToolCall; +use Symfony\Component\Serializer\Serializer; + +class MessageBagNormalizerTest extends TestCase +{ + #[DataProvider('normalizeProvider')] + public function testNormalize(MessageBag $messageBag, array $expected) + { + $normalizer = new MessageBagNormalizer(); + $normalizer->setNormalizer(new Serializer([ + new Contract\Normalizer\Message\UserMessageNormalizer(), + new AssistantMessageNormalizer(), + new ToolCallMessageNormalizer(), + new ToolNormalizer(), + new ToolCallNormalizer(), + new Contract\Normalizer\Message\SystemMessageNormalizer(), + ])); + + $actual = $normalizer->normalize($messageBag, null, [Contract::CONTEXT_MODEL => new Gpt('o3')]); + + $this->assertEquals($expected, $actual); + } + + public static function normalizeProvider(): \Generator + { + $message = Message::ofUser('Foo'); + $toolCall = new ToolCall('some-id', 'roll-die', ['sides' => 24]); + $toolCallMessage = Message::ofToolCall($toolCall, 'Critical hit'); + $systemMessage = Message::forSystem('You\'re a nice bot that will not overthrow humanity.'); + $assistantMessage = Message::ofAssistant('Anything else?'); + $toolCallAssistantMessage = Message::ofAssistant(null, [$toolCall]); + + $messageBag = new MessageBag($message, $assistantMessage, $toolCallAssistantMessage, $toolCallMessage); + $expected = ['input' => [ + [ + 'role' => 'user', + 'content' => 'Foo', + ], + [ + 'role' => 'assistant', + 'type' => 'message', + 'content' => $assistantMessage->getContent(), + ], + [ + 'arguments' => json_encode($toolCall->getArguments()), + 'call_id' => $toolCall->getId(), + 'name' => $toolCall->getName(), + 'type' => 'function_call', + ], + [ + 'type' => 'function_call_output', + 'call_id' => $toolCallMessage->getToolCall()->getId(), + 'output' => $toolCallMessage->getContent(), + ], + ]]; + + yield 'normalize messages' => [$messageBag, $expected]; + + yield 'with system message' => [ + $messageBag->with($systemMessage), + array_merge($expected, ['instructions' => $systemMessage->getContent()]), + ]; + } + + #[DataProvider('supportsNormalizationProvider')] + public function testSupportsNormalization(mixed $data, Model $model, bool $expected) + { + $this->assertSame( + $expected, + (new MessageBagNormalizer())->supportsNormalization($data, null, [Contract::CONTEXT_MODEL => $model]) + ); + } + + public static function supportsNormalizationProvider(): \Generator + { + $messageBad = new MessageBag(); + $gpt = new Gpt('o3'); + + yield 'supported' => [$messageBad, $gpt, true]; + yield 'unsupported model' => [$messageBad, new Gemini('foo'), false]; + yield 'unsupported data' => [new Text('foo'), $gpt, false]; + } +} diff --git a/src/platform/tests/Bridge/OpenAi/Contract/Gpt/Message/ToolCallMessageNormalizerTest.php b/src/platform/tests/Bridge/OpenAi/Contract/Gpt/Message/ToolCallMessageNormalizerTest.php new file mode 100644 index 000000000..4bb69a131 --- /dev/null +++ b/src/platform/tests/Bridge/OpenAi/Contract/Gpt/Message/ToolCallMessageNormalizerTest.php @@ -0,0 +1,61 @@ + + * + * For the full copyright and license information, please view the LICENSE + * file that was distributed with this source code. + */ + +namespace Symfony\AI\Platform\Tests\Bridge\OpenAi\Contract\Gpt\Message; + +use PHPUnit\Framework\Attributes\DataProvider; +use PHPUnit\Framework\TestCase; +use Symfony\AI\Platform\Bridge\Gemini\Gemini; +use Symfony\AI\Platform\Bridge\OpenAi\Contract\Gpt\Message\ToolCallMessageNormalizer; +use Symfony\AI\Platform\Bridge\OpenAi\Gpt; +use Symfony\AI\Platform\Contract; +use Symfony\AI\Platform\Message\Content\Text; +use Symfony\AI\Platform\Message\ToolCallMessage; +use Symfony\AI\Platform\Model; +use Symfony\AI\Platform\Result\ToolCall; + +class ToolCallMessageNormalizerTest extends TestCase +{ + public function testNormalize() + { + $toolCall = new ToolCall('some-id', 'roll-die', ['sides' => 24]); + $toolCallMessage = new ToolCallMessage($toolCall, 'Critical hit!'); + + $actual = (new ToolCallMessageNormalizer())->normalize($toolCallMessage, null, [Contract::CONTEXT_MODEL => new Gpt('o3')]); + $this->assertEquals([ + 'type' => 'function_call_output', + 'call_id' => $toolCall->getId(), + 'output' => $toolCallMessage->getContent(), + ], $actual); + } + + #[DataProvider('supportsNormalizationProvider')] + public function testSupportsNormalization(mixed $data, Model $model, bool $expected) + { + $this->assertSame( + $expected, + (new ToolCallMessageNormalizer())->supportsNormalization($data, null, [Contract::CONTEXT_MODEL => $model]) + ); + } + + public static function supportsNormalizationProvider(): \Generator + { + $toolCallMessage = new ToolCallMessage( + new ToolCall('some-id', 'roll-die', ['sides' => 24]), + 'Critical hit!' + ); + $gpt = new Gpt('o3'); + + yield 'supported' => [$toolCallMessage, $gpt, true]; + yield 'unsupported model' => [$toolCallMessage, new Gemini('foo'), false]; + yield 'unsupported data' => [new Text('foo'), $gpt, false]; + } +} diff --git a/src/platform/tests/Bridge/OpenAi/Contract/Gpt/ToolCallNormalizerTest.php b/src/platform/tests/Bridge/OpenAi/Contract/Gpt/ToolCallNormalizerTest.php index afbf4954f..0b4e7c188 100644 --- a/src/platform/tests/Bridge/OpenAi/Contract/Gpt/ToolCallNormalizerTest.php +++ b/src/platform/tests/Bridge/OpenAi/Contract/Gpt/ToolCallNormalizerTest.php @@ -1,11 +1,20 @@ + * + * For the full copyright and license information, please view the LICENSE + * file that was distributed with this source code. + */ + namespace Symfony\AI\Platform\Tests\Bridge\OpenAi\Contract\Gpt; use PHPUnit\Framework\Attributes\DataProvider; +use PHPUnit\Framework\TestCase; use Symfony\AI\Platform\Bridge\Gemini\Gemini; use Symfony\AI\Platform\Bridge\OpenAi\Contract\Gpt\ToolCallNormalizer; -use PHPUnit\Framework\TestCase; use Symfony\AI\Platform\Bridge\OpenAi\Gpt; use Symfony\AI\Platform\Contract; use Symfony\AI\Platform\Message\Content\Text; diff --git a/src/platform/tests/Bridge/OpenAi/Contract/Gpt/ToolNormalizerTest.php b/src/platform/tests/Bridge/OpenAi/Contract/Gpt/ToolNormalizerTest.php index e30840ea9..71b7d2868 100644 --- a/src/platform/tests/Bridge/OpenAi/Contract/Gpt/ToolNormalizerTest.php +++ b/src/platform/tests/Bridge/OpenAi/Contract/Gpt/ToolNormalizerTest.php @@ -41,7 +41,17 @@ public static function normalizeProvider(): \Generator 'description' => $tool->getDescription(), ]; - $parameters = ['foo' => 'bar']; + $parameters = [ + 'type' => 'object', + 'properties' => [ + 'text' => [ + 'type' => 'string', + 'description' => 'The text given to the tool', + ], + ], + 'required' => ['text'], + 'additionalProperties' => false, + ]; yield 'no parameters' => [$expected, $tool]; yield 'with parameters' => [ From 816623202f51f1b9c570d1f56c3779083182f9fd Mon Sep 17 00:00:00 2001 From: Pauline Vos Date: Tue, 11 Nov 2025 16:19:49 +0100 Subject: [PATCH 7/7] Migrate from OpenAI chat completions to Responses Chat completions is deprecated. Responses is the recommended API to use now. See: https://platform.openai.com/docs/guides/migrate-to-responses --- examples/openai/audio-input.php | 5 +- examples/openai/chat-with-string-options.php | 2 +- examples/openai/chat.php | 2 +- examples/openai/structured-output-clock.php | 24 ++- examples/openai/token-metadata.php | 2 +- .../Bridge/OpenAi/Contract/OpenAiContract.php | 10 +- .../src/Bridge/OpenAi/Gpt/ModelClient.php | 14 +- .../src/Bridge/OpenAi/Gpt/ResultConverter.php | 171 +++++++++--------- .../src/Bridge/OpenAi/ModelCatalog.php | 2 + .../Bridge/OpenAi/Gpt/ModelClientTest.php | 25 ++- .../Bridge/OpenAi/Gpt/ResultConverterTest.php | 90 +++------ .../tests/Bridge/OpenAi/ModelCatalogTest.php | 26 +-- 12 files changed, 187 insertions(+), 186 deletions(-) diff --git a/examples/openai/audio-input.php b/examples/openai/audio-input.php index ad7423582..25c5a256d 100644 --- a/examples/openai/audio-input.php +++ b/examples/openai/audio-input.php @@ -9,10 +9,7 @@ * file that was distributed with this source code. */ -use Symfony\AI\Platform\Bridge\OpenAi\PlatformFactory; -use Symfony\AI\Platform\Message\Content\Audio; -use Symfony\AI\Platform\Message\Message; -use Symfony\AI\Platform\Message\MessageBag; +use Symfony\AI\Platform\Exception\RuntimeException; require_once dirname(__DIR__).'/bootstrap.php'; diff --git a/examples/openai/chat-with-string-options.php b/examples/openai/chat-with-string-options.php index 878f1ac97..a0ec07056 100644 --- a/examples/openai/chat-with-string-options.php +++ b/examples/openai/chat-with-string-options.php @@ -21,6 +21,6 @@ Message::forSystem('You are a pirate and you write funny.'), Message::ofUser('What is the Symfony framework?'), ); -$result = $platform->invoke('gpt-4o-mini?max_tokens=7', $messages); +$result = $platform->invoke('gpt-4o-mini?max_output_tokens=16', $messages); echo $result->asText().\PHP_EOL; diff --git a/examples/openai/chat.php b/examples/openai/chat.php index b1ab48cf7..63c3986ce 100644 --- a/examples/openai/chat.php +++ b/examples/openai/chat.php @@ -22,7 +22,7 @@ Message::ofUser('What is the Symfony framework?'), ); $result = $platform->invoke('gpt-4o-mini', $messages, [ - 'max_tokens' => 500, // specific options just for this call + 'max_output_tokens' => 500, // specific options just for this call ]); echo $result->asText().\PHP_EOL; diff --git a/examples/openai/structured-output-clock.php b/examples/openai/structured-output-clock.php index 2da321377..aab7bedca 100644 --- a/examples/openai/structured-output-clock.php +++ b/examples/openai/structured-output-clock.php @@ -33,21 +33,19 @@ $agent = new Agent($platform, 'gpt-4o-mini', [$toolProcessor], [$toolProcessor]); $messages = new MessageBag(Message::ofUser('What date and time is it?')); -$result = $agent->call($messages, ['response_format' => [ +$result = $agent->call($messages, ['text' => ['format' => [ 'type' => 'json_schema', - 'json_schema' => [ - 'name' => 'clock', - 'strict' => true, - 'schema' => [ - 'type' => 'object', - 'properties' => [ - 'date' => ['type' => 'string', 'description' => 'The current date in the format YYYY-MM-DD.'], - 'time' => ['type' => 'string', 'description' => 'The current time in the format HH:MM:SS.'], - ], - 'required' => ['date', 'time'], - 'additionalProperties' => false, + 'name' => 'clock', + 'strict' => true, + 'schema' => [ + 'type' => 'object', + 'properties' => [ + 'date' => ['type' => 'string', 'description' => 'The current date in the format YYYY-MM-DD.'], + 'time' => ['type' => 'string', 'description' => 'The current time in the format HH:MM:SS.'], ], + 'required' => ['date', 'time'], + 'additionalProperties' => false, ], -]]); +]]]); dump($result->getContent()); diff --git a/examples/openai/token-metadata.php b/examples/openai/token-metadata.php index 57e798b06..1020f013f 100644 --- a/examples/openai/token-metadata.php +++ b/examples/openai/token-metadata.php @@ -25,7 +25,7 @@ Message::ofUser('What is the Symfony framework?'), ); $result = $agent->call($messages, [ - 'max_tokens' => 500, // specific options just for this call + 'max_output_tokens' => 500, // specific options just for this call ]); print_token_usage($result->getMetadata()); diff --git a/src/platform/src/Bridge/OpenAi/Contract/OpenAiContract.php b/src/platform/src/Bridge/OpenAi/Contract/OpenAiContract.php index 5aa4f13fa..4e5b2a665 100644 --- a/src/platform/src/Bridge/OpenAi/Contract/OpenAiContract.php +++ b/src/platform/src/Bridge/OpenAi/Contract/OpenAiContract.php @@ -23,8 +23,16 @@ final class OpenAiContract extends Contract public static function create(NormalizerInterface ...$normalizer): Contract { return parent::create( + new Gpt\Message\MessageBagNormalizer(), + new Gpt\Message\AssistantMessageNormalizer(), + new Gpt\Message\Content\ImageNormalizer(), + new Gpt\Message\Content\ImageUrlNormalizer(), + new Gpt\Message\Content\TextNormalizer(), + new Gpt\ToolNormalizer(), + new Gpt\ToolCallNormalizer(), + new Gpt\Message\ToolCallMessageNormalizer(), + new Gpt\Message\Content\DocumentNormalizer(), new AudioNormalizer(), - new DocumentNormalizer(), ...$normalizer ); } diff --git a/src/platform/src/Bridge/OpenAi/Gpt/ModelClient.php b/src/platform/src/Bridge/OpenAi/Gpt/ModelClient.php index 5317e82c2..37b77a3b0 100644 --- a/src/platform/src/Bridge/OpenAi/Gpt/ModelClient.php +++ b/src/platform/src/Bridge/OpenAi/Gpt/ModelClient.php @@ -16,6 +16,7 @@ use Symfony\AI\Platform\Model; use Symfony\AI\Platform\ModelClientInterface; use Symfony\AI\Platform\Result\RawHttpResult; +use Symfony\AI\Platform\StructuredOutput\PlatformSubscriber; use Symfony\Component\HttpClient\EventSourceHttpClient; use Symfony\Contracts\HttpClient\HttpClientInterface; @@ -42,9 +43,18 @@ public function supports(Model $model): bool public function request(Model $model, array|string $payload, array $options = []): RawHttpResult { - return new RawHttpResult($this->httpClient->request('POST', self::getBaseUrl($this->region).'/v1/chat/completions', [ + if (isset($options[PlatformSubscriber::RESPONSE_FORMAT]['json_schema']['schema'])) { + $schema = $options[PlatformSubscriber::RESPONSE_FORMAT]['json_schema']; + $options['text']['format'] = $schema; + $options['text']['format']['name'] = $schema['name']; + $options['text']['format']['type'] = $options[PlatformSubscriber::RESPONSE_FORMAT]['type']; + + unset($options[PlatformSubscriber::RESPONSE_FORMAT]); + } + + return new RawHttpResult($this->httpClient->request('POST', self::getBaseUrl($this->region).'/v1/responses', [ 'auth_bearer' => $this->apiKey, - 'json' => array_merge($options, $payload), + 'json' => array_merge($options, ['model' => $model->getName()], $payload), ])); } } diff --git a/src/platform/src/Bridge/OpenAi/Gpt/ResultConverter.php b/src/platform/src/Bridge/OpenAi/Gpt/ResultConverter.php index 675468aad..eb1de7c7b 100644 --- a/src/platform/src/Bridge/OpenAi/Gpt/ResultConverter.php +++ b/src/platform/src/Bridge/OpenAi/Gpt/ResultConverter.php @@ -31,9 +31,17 @@ /** * @author Christopher Hertel * @author Denis Zunke + * + * @phpstan-type OutputMessage array{content: array, id: string, role: string, type: 'message'} + * @phpstan-type OutputText array{type: 'output_text', text: string} + * @phpstan-type Refusal array{type: 'refusal', refusal: string} + * @phpstan-type FunctionCall array{id: string, arguments: string, call_id: string, name: string, type: 'function_call'} + * @phpstan-type Reasoning array{summary: array{text?: string}, id: string} */ final class ResultConverter implements ResultConverterInterface { + private const KEY_OUTPUT = 'output'; + public function supports(Model $model): bool { return $model instanceof Gpt; @@ -76,128 +84,114 @@ public function convert(RawResultInterface|RawHttpResult $result, array $options throw new RuntimeException(\sprintf('Error "%s"-%s (%s): "%s".', $data['error']['code'], $data['error']['type'], $data['error']['param'], $data['error']['message'])); } - if (!isset($data['choices'])) { - throw new RuntimeException('Response does not contain choices.'); + if (!isset($data[self::KEY_OUTPUT])) { + throw new RuntimeException('Response does not contain output.'); } - $choices = array_map($this->convertChoice(...), $data['choices']); + $results = $this->convertOutputArray($data[self::KEY_OUTPUT]); - return 1 === \count($choices) ? $choices[0] : new ChoiceResult(...$choices); + return 1 === \count($results) ? array_pop($results) : new ChoiceResult(...$results); } - private function convertStream(RawResultInterface|RawHttpResult $result): \Generator + /** + * @param array $output + * + * @return ResultInterface[] + */ + private function convertOutputArray(array $output): array { - $toolCalls = []; - foreach ($result->getDataStream() as $data) { - if ($this->streamIsToolCall($data)) { - $toolCalls = $this->convertStreamToToolCalls($toolCalls, $data); - } + [$toolCallResult, $output] = $this->extractFunctionCalls($output); - if ([] !== $toolCalls && $this->isToolCallsStreamFinished($data)) { - yield new ToolCallResult(...array_map($this->convertToolCall(...), $toolCalls)); - } - - if (!isset($data['choices'][0]['delta']['content'])) { - continue; - } - - yield $data['choices'][0]['delta']['content']; + $results = array_filter(array_map($this->processOutputItem(...), $output)); + if ($toolCallResult) { + $results[] = $toolCallResult; } + + return $results; } /** - * @param array $toolCalls - * @param array $data - * - * @return array + * @param OutputMessage|Reasoning $item */ - private function convertStreamToToolCalls(array $toolCalls, array $data): array + private function processOutputItem(array $item): ?ResultInterface { - if (!isset($data['choices'][0]['delta']['tool_calls'])) { - return $toolCalls; - } + $type = $item['type'] ?? null; - foreach ($data['choices'][0]['delta']['tool_calls'] as $i => $toolCall) { - if (isset($toolCall['id'])) { - // initialize tool call - $toolCalls[$i] = [ - 'id' => $toolCall['id'], - 'function' => $toolCall['function'], - ]; + return match ($type) { + 'message' => $this->convertOutputMessage($item), + 'reasoning' => $this->convertReasoning($item), + default => throw new RuntimeException(\sprintf('Unsupported output type "%s".', $type)), + }; + } + + private function convertStream(RawResultInterface|RawHttpResult $result): \Generator + { + foreach ($result->getDataStream() as $event) { + if (isset($event['delta'])) { + yield $event['delta']; + } + if (!str_contains('completed', $event['type'] ?? '')) { continue; } - // add arguments delta to tool call - $toolCalls[$i]['function']['arguments'] .= $toolCall['function']['arguments']; - } + [$toolCallResult] = $this->extractFunctionCalls($event['response'][self::KEY_OUTPUT] ?? []); - return $toolCalls; + if ($toolCallResult && 'response.completed' === $event['type']) { + yield $toolCallResult; + } + } } /** - * @param array $data + * @param array $output + * + * @return list|null> */ - private function streamIsToolCall(array $data): bool + private function extractFunctionCalls(array $output): array { - return isset($data['choices'][0]['delta']['tool_calls']); - } + $functionCalls = []; + foreach ($output as $key => $item) { + if ('function_call' === ($item['type'] ?? null)) { + $functionCalls[] = $item; + unset($output[$key]); + } + } - /** - * @param array $data - */ - private function isToolCallsStreamFinished(array $data): bool - { - return isset($data['choices'][0]['finish_reason']) && 'tool_calls' === $data['choices'][0]['finish_reason']; + $toolCallResult = $functionCalls ? new ToolCallResult( + ...array_map($this->convertFunctionCall(...), $functionCalls) + ) : null; + + return [$toolCallResult, $output]; } /** - * @param array{ - * index: int, - * message: array{ - * role: 'assistant', - * content: ?string, - * tool_calls: array{ - * id: string, - * type: 'function', - * function: array{ - * name: string, - * arguments: string - * }, - * }, - * refusal: ?mixed - * }, - * logprobs: string, - * finish_reason: 'stop'|'length'|'tool_calls'|'content_filter', - * } $choice + * @param OutputMessage $output */ - private function convertChoice(array $choice): ToolCallResult|TextResult + private function convertOutputMessage(array $output): ?TextResult { - if ('tool_calls' === $choice['finish_reason']) { - return new ToolCallResult(...array_map([$this, 'convertToolCall'], $choice['message']['tool_calls'])); + $content = $output['content'] ?? []; + if ([] === $content) { + return null; } - if (\in_array($choice['finish_reason'], ['stop', 'length'], true)) { - return new TextResult($choice['message']['content']); + $content = array_pop($content); + if ('refusal' === $content['type']) { + return new TextResult(\sprintf('Model refused to generate output: %s', $content['refusal'])); } - throw new RuntimeException(\sprintf('Unsupported finish reason "%s".', $choice['finish_reason'])); + return new TextResult($content['text']); } /** - * @param array{ - * id: string, - * type: 'function', - * function: array{ - * name: string, - * arguments: string - * } - * } $toolCall + * @param FunctionCall $toolCall + * + * @throws \JsonException */ - private function convertToolCall(array $toolCall): ToolCall + private function convertFunctionCall(array $toolCall): ToolCall { - $arguments = json_decode($toolCall['function']['arguments'], true, flags: \JSON_THROW_ON_ERROR); + $arguments = json_decode($toolCall['arguments'], true, flags: \JSON_THROW_ON_ERROR); - return new ToolCall($toolCall['id'], $toolCall['function']['name'], $arguments); + return new ToolCall($toolCall['id'], $toolCall['name'], $arguments); } /** @@ -219,4 +213,15 @@ private static function parseResetTime(string $resetTime): ?int return null; } + + /** + * @param Reasoning $item + */ + private function convertReasoning(array $item): ?ResultInterface + { + // Reasoning is sometimes missing if it exceeds the context limit. + $summary = $item['summary']['text'] ?? null; + + return $summary ? new TextResult($summary) : null; + } } diff --git a/src/platform/src/Bridge/OpenAi/ModelCatalog.php b/src/platform/src/Bridge/OpenAi/ModelCatalog.php index b4ba1b671..9faa5d0cc 100644 --- a/src/platform/src/Bridge/OpenAi/ModelCatalog.php +++ b/src/platform/src/Bridge/OpenAi/ModelCatalog.php @@ -95,6 +95,7 @@ public function __construct(array $additionalModels = []) Capability::OUTPUT_STREAMING, Capability::TOOL_CALLING, Capability::INPUT_IMAGE, + Capability::INPUT_PDF, Capability::OUTPUT_STRUCTURED, // Audio is unsupported temporarily due to migration to Responses API; // Capability will be reintroduced when Responses API supports audio ("coming soon") @@ -121,6 +122,7 @@ public function __construct(array $additionalModels = []) Capability::OUTPUT_STREAMING, Capability::TOOL_CALLING, Capability::INPUT_IMAGE, + Capability::INPUT_PDF, Capability::OUTPUT_STRUCTURED, ], ], diff --git a/src/platform/tests/Bridge/OpenAi/Gpt/ModelClientTest.php b/src/platform/tests/Bridge/OpenAi/Gpt/ModelClientTest.php index 47cf7bfb1..be7bb756a 100644 --- a/src/platform/tests/Bridge/OpenAi/Gpt/ModelClientTest.php +++ b/src/platform/tests/Bridge/OpenAi/Gpt/ModelClientTest.php @@ -82,7 +82,7 @@ public function testItIsExecutingTheCorrectRequest() { $resultCallback = static function (string $method, string $url, array $options): HttpResponse { self::assertSame('POST', $method); - self::assertSame('https://api.openai.com/v1/chat/completions', $url); + self::assertSame('https://api.openai.com/v1/responses', $url); self::assertSame('Authorization: Bearer sk-api-key', $options['normalized_headers']['authorization'][0]); self::assertSame('{"temperature":1,"model":"gpt-4o","messages":[{"role":"user","content":"test message"}]}', $options['body']); @@ -97,20 +97,31 @@ public function testItIsExecutingTheCorrectRequestWithArrayPayload() { $resultCallback = static function (string $method, string $url, array $options): HttpResponse { self::assertSame('POST', $method); - self::assertSame('https://api.openai.com/v1/chat/completions', $url); + self::assertSame('https://api.openai.com/v1/responses', $url); self::assertSame('Authorization: Bearer sk-api-key', $options['normalized_headers']['authorization'][0]); - self::assertSame('{"temperature":0.7,"model":"gpt-4o","messages":[{"role":"user","content":"Hello"}]}', $options['body']); + self::assertSame('{"temperature":0.7,"text":{"format":{"name":"foo","schema":[],"type":"json"}},"model":"gpt-4o","messages":[{"role":"user","content":"Hello"}]}', $options['body']); return new MockResponse(); }; + + $options = [ + 'temperature' => 0.7, + 'response_format' => [ + 'type' => 'json', + 'json_schema' => [ + 'name' => 'foo', + 'schema' => []], + ], + ]; + $httpClient = new MockHttpClient([$resultCallback]); $modelClient = new ModelClient($httpClient, 'sk-api-key'); - $modelClient->request(new Gpt('gpt-4o'), ['model' => 'gpt-4o', 'messages' => [['role' => 'user', 'content' => 'Hello']]], ['temperature' => 0.7]); + $modelClient->request(new Gpt('gpt-4o'), ['model' => 'gpt-4o', 'messages' => [['role' => 'user', 'content' => 'Hello']]], $options); } - #[TestWith(['EU', 'https://eu.api.openai.com/v1/chat/completions'])] - #[TestWith(['US', 'https://us.api.openai.com/v1/chat/completions'])] - #[TestWith([null, 'https://api.openai.com/v1/chat/completions'])] + #[TestWith(['EU', 'https://eu.api.openai.com/v1/responses'])] + #[TestWith(['US', 'https://us.api.openai.com/v1/responses'])] + #[TestWith([null, 'https://api.openai.com/v1/responses'])] public function testItUsesCorrectBaseUrl(?string $region, string $expectedUrl) { $resultCallback = static function (string $method, string $url, array $options) use ($expectedUrl): HttpResponse { diff --git a/src/platform/tests/Bridge/OpenAi/Gpt/ResultConverterTest.php b/src/platform/tests/Bridge/OpenAi/Gpt/ResultConverterTest.php index 2d06095de..1138fef56 100644 --- a/src/platform/tests/Bridge/OpenAi/Gpt/ResultConverterTest.php +++ b/src/platform/tests/Bridge/OpenAi/Gpt/ResultConverterTest.php @@ -31,13 +31,14 @@ public function testConvertTextResult() $converter = new ResultConverter(); $httpResponse = self::createMock(ResponseInterface::class); $httpResponse->method('toArray')->willReturn([ - 'choices' => [ + 'output' => [ [ - 'message' => [ - 'role' => 'assistant', - 'content' => 'Hello world', - ], - 'finish_reason' => 'stop', + 'type' => 'message', + 'role' => 'assistant', + 'content' => [[ + 'type' => 'output_text', + 'text' => 'Hello world', + ]], ], ], ]); @@ -53,23 +54,12 @@ public function testConvertToolCallResult() $converter = new ResultConverter(); $httpResponse = self::createMock(ResponseInterface::class); $httpResponse->method('toArray')->willReturn([ - 'choices' => [ + 'output' => [ [ - 'message' => [ - 'role' => 'assistant', - 'content' => null, - 'tool_calls' => [ - [ - 'id' => 'call_123', - 'type' => 'function', - 'function' => [ - 'name' => 'test_function', - 'arguments' => '{"arg1": "value1"}', - ], - ], - ], - ], - 'finish_reason' => 'tool_calls', + 'type' => 'function_call', + 'id' => 'call_123', + 'name' => 'test_function', + 'arguments' => '{"arg1": "value1"}', ], ], ]); @@ -89,20 +79,22 @@ public function testConvertMultipleChoices() $converter = new ResultConverter(); $httpResponse = self::createMock(ResponseInterface::class); $httpResponse->method('toArray')->willReturn([ - 'choices' => [ + 'output' => [ [ - 'message' => [ - 'role' => 'assistant', - 'content' => 'Choice 1', - ], - 'finish_reason' => 'stop', + 'role' => 'assistant', + 'type' => 'message', + 'content' => [[ + 'type' => 'output_text', + 'text' => 'Choice 1', + ]], ], [ - 'message' => [ - 'role' => 'assistant', - 'content' => 'Choice 2', - ], - 'finish_reason' => 'stop', + 'role' => 'assistant', + 'content' => [[ + 'type' => 'output_text', + 'text' => 'Choice 2', + ]], + 'type' => 'message', ], ], ]); @@ -110,10 +102,10 @@ public function testConvertMultipleChoices() $result = $converter->convert(new RawHttpResult($httpResponse)); $this->assertInstanceOf(ChoiceResult::class, $result); - $choices = $result->getContent(); - $this->assertCount(2, $choices); - $this->assertSame('Choice 1', $choices[0]->getContent()); - $this->assertSame('Choice 2', $choices[1]->getContent()); + $output = $result->getContent(); + $this->assertCount(2, $output); + $this->assertSame('Choice 1', $output[0]->getContent()); + $this->assertSame('Choice 2', $output[1]->getContent()); } public function testContentFilterException() @@ -171,29 +163,7 @@ public function testThrowsExceptionWhenNoChoices() $httpResponse->method('toArray')->willReturn([]); $this->expectException(RuntimeException::class); - $this->expectExceptionMessage('Response does not contain choices'); - - $converter->convert(new RawHttpResult($httpResponse)); - } - - public function testThrowsExceptionForUnsupportedFinishReason() - { - $converter = new ResultConverter(); - $httpResponse = self::createMock(ResponseInterface::class); - $httpResponse->method('toArray')->willReturn([ - 'choices' => [ - [ - 'message' => [ - 'role' => 'assistant', - 'content' => 'Test content', - ], - 'finish_reason' => 'unsupported_reason', - ], - ], - ]); - - $this->expectException(RuntimeException::class); - $this->expectExceptionMessage('Unsupported finish reason "unsupported_reason"'); + $this->expectExceptionMessage('Response does not contain output'); $converter->convert(new RawHttpResult($httpResponse)); } diff --git a/src/platform/tests/Bridge/OpenAi/ModelCatalogTest.php b/src/platform/tests/Bridge/OpenAi/ModelCatalogTest.php index 94b15f84e..a4ec73868 100644 --- a/src/platform/tests/Bridge/OpenAi/ModelCatalogTest.php +++ b/src/platform/tests/Bridge/OpenAi/ModelCatalogTest.php @@ -32,21 +32,21 @@ public static function modelsProvider(): iterable yield 'gpt-3.5-turbo' => ['gpt-3.5-turbo', Gpt::class, [Capability::INPUT_MESSAGES, Capability::OUTPUT_TEXT, Capability::OUTPUT_STREAMING, Capability::TOOL_CALLING]]; yield 'gpt-3.5-turbo-instruct' => ['gpt-3.5-turbo-instruct', Gpt::class, [Capability::INPUT_MESSAGES, Capability::OUTPUT_TEXT, Capability::OUTPUT_STREAMING, Capability::TOOL_CALLING]]; yield 'gpt-4' => ['gpt-4', Gpt::class, [Capability::INPUT_MESSAGES, Capability::OUTPUT_TEXT, Capability::OUTPUT_STREAMING, Capability::TOOL_CALLING]]; - yield 'gpt-4-turbo' => ['gpt-4-turbo', Gpt::class, [Capability::INPUT_MESSAGES, Capability::OUTPUT_TEXT, Capability::OUTPUT_STREAMING, Capability::TOOL_CALLING, Capability::INPUT_IMAGE]]; - yield 'gpt-4o' => ['gpt-4o', Gpt::class, [Capability::INPUT_MESSAGES, Capability::OUTPUT_TEXT, Capability::OUTPUT_STREAMING, Capability::TOOL_CALLING, Capability::INPUT_IMAGE, Capability::OUTPUT_STRUCTURED]]; - yield 'gpt-4o-mini' => ['gpt-4o-mini', Gpt::class, [Capability::INPUT_MESSAGES, Capability::OUTPUT_TEXT, Capability::OUTPUT_STREAMING, Capability::TOOL_CALLING, Capability::INPUT_IMAGE, Capability::OUTPUT_STRUCTURED]]; + yield 'gpt-4-turbo' => ['gpt-4-turbo', Gpt::class, [Capability::INPUT_MESSAGES, Capability::OUTPUT_TEXT, Capability::OUTPUT_STREAMING, Capability::TOOL_CALLING, Capability::INPUT_IMAGE, Capability::INPUT_PDF]]; + yield 'gpt-4o' => ['gpt-4o', Gpt::class, [Capability::INPUT_MESSAGES, Capability::OUTPUT_TEXT, Capability::OUTPUT_STREAMING, Capability::TOOL_CALLING, Capability::INPUT_IMAGE, Capability::INPUT_PDF, Capability::OUTPUT_STRUCTURED]]; + yield 'gpt-4o-mini' => ['gpt-4o-mini', Gpt::class, [Capability::INPUT_MESSAGES, Capability::OUTPUT_TEXT, Capability::OUTPUT_STREAMING, Capability::TOOL_CALLING, Capability::INPUT_IMAGE, Capability::INPUT_PDF, Capability::OUTPUT_STRUCTURED]]; + yield 'gpt-4o-audio-preview' => ['gpt-4o-audio-preview', Gpt::class, [Capability::INPUT_MESSAGES, Capability::OUTPUT_TEXT, Capability::OUTPUT_STREAMING, Capability::TOOL_CALLING, Capability::INPUT_IMAGE, Capability::INPUT_PDF, Capability::OUTPUT_STRUCTURED]]; yield 'o3' => ['o3', Gpt::class, [Capability::INPUT_MESSAGES, Capability::OUTPUT_TEXT, Capability::OUTPUT_STREAMING, Capability::OUTPUT_STRUCTURED, Capability::TOOL_CALLING, Capability::INPUT_IMAGE]]; - yield 'gpt-4o-audio-preview' => ['gpt-4o-audio-preview', Gpt::class, [Capability::INPUT_MESSAGES, Capability::OUTPUT_TEXT, Capability::OUTPUT_STREAMING, Capability::TOOL_CALLING, Capability::INPUT_IMAGE, Capability::OUTPUT_STRUCTURED]]; - yield 'o3-mini' => ['o3-mini', Gpt::class, [Capability::INPUT_MESSAGES, Capability::OUTPUT_TEXT, Capability::OUTPUT_STREAMING, Capability::TOOL_CALLING, Capability::INPUT_IMAGE, Capability::OUTPUT_STRUCTURED]]; + yield 'o3-mini' => ['o3-mini', Gpt::class, [Capability::INPUT_MESSAGES, Capability::OUTPUT_TEXT, Capability::OUTPUT_STREAMING, Capability::TOOL_CALLING, Capability::INPUT_IMAGE, Capability::INPUT_PDF, Capability::OUTPUT_STRUCTURED]]; yield 'o3-mini-high' => ['o3-mini-high', Gpt::class, [Capability::INPUT_MESSAGES, Capability::OUTPUT_TEXT, Capability::OUTPUT_STREAMING, Capability::TOOL_CALLING]]; - yield 'gpt-4.5-preview' => ['gpt-4.5-preview', Gpt::class, [Capability::INPUT_MESSAGES, Capability::OUTPUT_TEXT, Capability::OUTPUT_STREAMING, Capability::TOOL_CALLING, Capability::INPUT_IMAGE, Capability::OUTPUT_STRUCTURED]]; - yield 'gpt-4.1' => ['gpt-4.1', Gpt::class, [Capability::INPUT_MESSAGES, Capability::OUTPUT_TEXT, Capability::OUTPUT_STREAMING, Capability::TOOL_CALLING, Capability::INPUT_IMAGE, Capability::OUTPUT_STRUCTURED]]; - yield 'gpt-4.1-mini' => ['gpt-4.1-mini', Gpt::class, [Capability::INPUT_MESSAGES, Capability::OUTPUT_TEXT, Capability::OUTPUT_STREAMING, Capability::TOOL_CALLING, Capability::INPUT_IMAGE, Capability::OUTPUT_STRUCTURED]]; - yield 'gpt-4.1-nano' => ['gpt-4.1-nano', Gpt::class, [Capability::INPUT_MESSAGES, Capability::OUTPUT_TEXT, Capability::OUTPUT_STREAMING, Capability::TOOL_CALLING, Capability::INPUT_IMAGE, Capability::OUTPUT_STRUCTURED]]; - yield 'gpt-5' => ['gpt-5', Gpt::class, [Capability::INPUT_MESSAGES, Capability::OUTPUT_TEXT, Capability::OUTPUT_STREAMING, Capability::TOOL_CALLING, Capability::INPUT_IMAGE, Capability::OUTPUT_STRUCTURED]]; - yield 'gpt-5-chat-latest' => ['gpt-5-chat-latest', Gpt::class, [Capability::INPUT_MESSAGES, Capability::OUTPUT_TEXT, Capability::OUTPUT_STREAMING, Capability::INPUT_IMAGE]]; - yield 'gpt-5-mini' => ['gpt-5-mini', Gpt::class, [Capability::INPUT_MESSAGES, Capability::OUTPUT_TEXT, Capability::OUTPUT_STREAMING, Capability::TOOL_CALLING, Capability::INPUT_IMAGE, Capability::OUTPUT_STRUCTURED]]; - yield 'gpt-5-nano' => ['gpt-5-nano', Gpt::class, [Capability::INPUT_MESSAGES, Capability::OUTPUT_TEXT, Capability::OUTPUT_STREAMING, Capability::TOOL_CALLING, Capability::INPUT_IMAGE, Capability::OUTPUT_STRUCTURED]]; + yield 'gpt-4.5-preview' => ['gpt-4.5-preview', Gpt::class, [Capability::INPUT_MESSAGES, Capability::OUTPUT_TEXT, Capability::OUTPUT_STREAMING, Capability::TOOL_CALLING, Capability::INPUT_IMAGE, Capability::INPUT_PDF, Capability::OUTPUT_STRUCTURED]]; + yield 'gpt-4.1' => ['gpt-4.1', Gpt::class, [Capability::INPUT_MESSAGES, Capability::OUTPUT_TEXT, Capability::OUTPUT_STREAMING, Capability::TOOL_CALLING, Capability::INPUT_IMAGE, Capability::INPUT_PDF, Capability::OUTPUT_STRUCTURED]]; + yield 'gpt-4.1-mini' => ['gpt-4.1-mini', Gpt::class, [Capability::INPUT_MESSAGES, Capability::OUTPUT_TEXT, Capability::OUTPUT_STREAMING, Capability::TOOL_CALLING, Capability::INPUT_IMAGE, Capability::INPUT_PDF, Capability::OUTPUT_STRUCTURED]]; + yield 'gpt-4.1-nano' => ['gpt-4.1-nano', Gpt::class, [Capability::INPUT_MESSAGES, Capability::OUTPUT_TEXT, Capability::OUTPUT_STREAMING, Capability::TOOL_CALLING, Capability::INPUT_IMAGE, Capability::INPUT_PDF, Capability::OUTPUT_STRUCTURED]]; + yield 'gpt-5' => ['gpt-5', Gpt::class, [Capability::INPUT_MESSAGES, Capability::OUTPUT_TEXT, Capability::OUTPUT_STREAMING, Capability::TOOL_CALLING, Capability::INPUT_IMAGE, Capability::INPUT_PDF, Capability::OUTPUT_STRUCTURED]]; + yield 'gpt-5-chat-latest' => ['gpt-5-chat-latest', Gpt::class, [Capability::INPUT_MESSAGES, Capability::OUTPUT_TEXT, Capability::OUTPUT_STREAMING, Capability::INPUT_IMAGE, Capability::INPUT_PDF]]; + yield 'gpt-5-mini' => ['gpt-5-mini', Gpt::class, [Capability::INPUT_MESSAGES, Capability::OUTPUT_TEXT, Capability::OUTPUT_STREAMING, Capability::TOOL_CALLING, Capability::INPUT_IMAGE, Capability::INPUT_PDF, Capability::OUTPUT_STRUCTURED]]; + yield 'gpt-5-nano' => ['gpt-5-nano', Gpt::class, [Capability::INPUT_MESSAGES, Capability::OUTPUT_TEXT, Capability::OUTPUT_STREAMING, Capability::TOOL_CALLING, Capability::INPUT_IMAGE, Capability::INPUT_PDF, Capability::OUTPUT_STRUCTURED]]; // Embedding models yield 'text-embedding-ada-002' => ['text-embedding-ada-002', Embeddings::class, [Capability::INPUT_TEXT]];