diff --git a/examples/ollama/stream.php b/examples/ollama/stream.php new file mode 100644 index 000000000..3d0d3245b --- /dev/null +++ b/examples/ollama/stream.php @@ -0,0 +1,36 @@ + + * + * For the full copyright and license information, please view the LICENSE + * file that was distributed with this source code. + */ + +use Symfony\AI\Agent\Agent; +use Symfony\AI\Platform\Bridge\Ollama\Ollama; +use Symfony\AI\Platform\Bridge\Ollama\PlatformFactory; +use Symfony\AI\Platform\Message\Message; +use Symfony\AI\Platform\Message\MessageBag; + +require_once dirname(__DIR__).'/bootstrap.php'; + +$platform = PlatformFactory::create(env('OLLAMA_HOST_URL'), http_client()); +$model = new Ollama(); + +$agent = new Agent($platform, $model, logger: logger()); +$messages = new MessageBag( + Message::forSystem('You are a helpful assistant.'), + Message::ofUser('Tina has one brother and one sister. How many sisters do Tina\'s siblings have?'), +); + +// Stream the response +$result = $agent->call($messages, ['stream' => true]); + +// Emit each chunk as it is received +foreach ($result->getContent() as $chunk) { + echo $chunk->getContent(); +} +echo \PHP_EOL; diff --git a/src/platform/CHANGELOG.md b/src/platform/CHANGELOG.md index 3d4fe407b..9b9f0fe58 100644 --- a/src/platform/CHANGELOG.md +++ b/src/platform/CHANGELOG.md @@ -60,5 +60,6 @@ CHANGELOG * Add InMemoryPlatform and InMemoryRawResult for testing Platform without external Providers calls * Add tool calling support for Ollama platform * Allow beta feature flags to be passed into Anthropic model options + * Add Ollama streaming output support diff --git a/src/platform/src/Bridge/Ollama/OllamaMessageChunk.php b/src/platform/src/Bridge/Ollama/OllamaMessageChunk.php new file mode 100644 index 000000000..83f6fc1b1 --- /dev/null +++ b/src/platform/src/Bridge/Ollama/OllamaMessageChunk.php @@ -0,0 +1,45 @@ + + * + * For the full copyright and license information, please view the LICENSE + * file that was distributed with this source code. + */ + +namespace Symfony\AI\Platform\Bridge\Ollama; + +/** + * @author Shaun Johnston + */ +final readonly class OllamaMessageChunk +{ + /** + * @param array $message + */ + public function __construct( + public readonly string $model, + public readonly \DateTimeImmutable $created_at, + public readonly array $message, + public readonly bool $done, + ) { + } + + public function __toString(): string + { + // Return the assistant's message content if available + return $this->message['content'] ?? ''; + } + + public function getContent(): ?string + { + return $this->message['content'] ?? null; + } + + public function getRole(): ?string + { + return $this->message['role'] ?? null; + } +} diff --git a/src/platform/src/Bridge/Ollama/OllamaResultConverter.php b/src/platform/src/Bridge/Ollama/OllamaResultConverter.php index 30e40c8db..ba5b8f896 100644 --- a/src/platform/src/Bridge/Ollama/OllamaResultConverter.php +++ b/src/platform/src/Bridge/Ollama/OllamaResultConverter.php @@ -15,12 +15,17 @@ use Symfony\AI\Platform\Model; use Symfony\AI\Platform\Result\RawResultInterface; use Symfony\AI\Platform\Result\ResultInterface; +use Symfony\AI\Platform\Result\StreamResult; use Symfony\AI\Platform\Result\TextResult; use Symfony\AI\Platform\Result\ToolCall; use Symfony\AI\Platform\Result\ToolCallResult; use Symfony\AI\Platform\Result\VectorResult; use Symfony\AI\Platform\ResultConverterInterface; use Symfony\AI\Platform\Vector\Vector; +use Symfony\Component\HttpClient\Chunk\FirstChunk; +use Symfony\Component\HttpClient\Chunk\LastChunk; +use Symfony\Component\HttpClient\EventSourceHttpClient; +use Symfony\Contracts\HttpClient\ResponseInterface; /** * @author Christopher Hertel @@ -34,6 +39,10 @@ public function supports(Model $model): bool public function convert(RawResultInterface $result, array $options = []): ResultInterface { + if ($options['stream'] ?? false) { + return new StreamResult($this->convertStream($result->getObject())); + } + $data = $result->getData(); return \array_key_exists('embeddings', $data) @@ -83,4 +92,26 @@ public function doConvertEmbeddings(array $data): ResultInterface ), ); } + + private function convertStream(ResponseInterface $result): \Generator + { + foreach ((new EventSourceHttpClient())->stream($result) as $chunk) { + if ($chunk instanceof FirstChunk || $chunk instanceof LastChunk) { + continue; + } + + try { + $data = json_decode($chunk->getContent(), true, 512, \JSON_THROW_ON_ERROR); + } catch (\JsonException $e) { + throw new RuntimeException('Failed to decode JSON: '.$e->getMessage()); + } + + yield new OllamaMessageChunk( + $data['model'], + new \DateTimeImmutable($data['created_at']), + $data['message'], + $data['done'], + ); + } + } } diff --git a/src/platform/tests/Bridge/Ollama/OllamaClientTest.php b/src/platform/tests/Bridge/Ollama/OllamaClientTest.php index fcb236647..e786bfa33 100644 --- a/src/platform/tests/Bridge/Ollama/OllamaClientTest.php +++ b/src/platform/tests/Bridge/Ollama/OllamaClientTest.php @@ -16,9 +16,12 @@ use PHPUnit\Framework\TestCase; use Symfony\AI\Platform\Bridge\Ollama\Ollama; use Symfony\AI\Platform\Bridge\Ollama\OllamaClient; +use Symfony\AI\Platform\Bridge\Ollama\PlatformFactory; use Symfony\AI\Platform\Model; +use Symfony\AI\Platform\Result\StreamResult; use Symfony\Component\HttpClient\MockHttpClient; use Symfony\Component\HttpClient\Response\JsonMockResponse; +use Symfony\Component\HttpClient\Response\MockResponse; #[CoversClass(OllamaClient::class)] #[UsesClass(Ollama::class)] @@ -87,4 +90,89 @@ public function testOutputStructureIsSupported() 'done' => true, ], $response->getData()); } + + public function testStreamingIsSupported() + { + $httpClient = new MockHttpClient([ + new JsonMockResponse([ + 'capabilities' => ['completion'], + ]), + new MockResponse('data: '.json_encode([ + 'model' => 'llama3.2', + 'created_at' => '2025-08-23T10:00:00Z', + 'message' => ['role' => 'assistant', 'content' => 'Hello world'], + 'done' => true, + ])."\n\n", [ + 'response_headers' => [ + 'content-type' => 'text/event-stream', + ], + ]), + ], 'http://127.0.0.1:1234'); + + $platform = PlatformFactory::create('http://127.0.0.1:1234', $httpClient); + $response = $platform->invoke(new Ollama(), [ + 'messages' => [ + [ + 'role' => 'user', + 'content' => 'Say hello world', + ], + ], + 'model' => 'llama3.2', + ], [ + 'stream' => true, + ]); + + $result = $response->getResult(); + + $this->assertInstanceOf(StreamResult::class, $result); + $this->assertInstanceOf(\Generator::class, $result->getContent()); + $this->assertSame(2, $httpClient->getRequestsCount()); + } + + public function testStreamingConverterWithDirectResponse() + { + $streamingData = 'data: '.json_encode([ + 'model' => 'llama3.2', + 'created_at' => '2025-08-23T10:00:00Z', + 'message' => ['role' => 'assistant', 'content' => 'Hello'], + 'done' => false, + ])."\n\n". + 'data: '.json_encode([ + 'model' => 'llama3.2', + 'created_at' => '2025-08-23T10:00:01Z', + 'message' => ['role' => 'assistant', 'content' => ' world'], + 'done' => true, + ])."\n\n"; + + $mockHttpClient = new MockHttpClient([ + new MockResponse($streamingData, [ + 'response_headers' => [ + 'content-type' => 'text/event-stream', + ], + ]), + ]); + + $mockResponse = $mockHttpClient->request('GET', 'http://test.example'); + $rawResult = new \Symfony\AI\Platform\Result\RawHttpResult($mockResponse); + $converter = new \Symfony\AI\Platform\Bridge\Ollama\OllamaResultConverter(); + + $result = $converter->convert($rawResult, ['stream' => true]); + + $this->assertInstanceOf(StreamResult::class, $result); + $this->assertInstanceOf(\Generator::class, $result->getContent()); + + $regularMockHttpClient = new MockHttpClient([ + new JsonMockResponse([ + 'model' => 'llama3.2', + 'message' => ['role' => 'assistant', 'content' => 'Hello world'], + 'done' => true, + ]), + ]); + + $regularMockResponse = $regularMockHttpClient->request('GET', 'http://test.example'); + $regularRawResult = new \Symfony\AI\Platform\Result\RawHttpResult($regularMockResponse); + $regularResult = $converter->convert($regularRawResult, ['stream' => false]); + + $this->assertNotInstanceOf(StreamResult::class, $regularResult); + } }