Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
36 changes: 36 additions & 0 deletions examples/ollama/stream.php
Original file line number Diff line number Diff line change
@@ -0,0 +1,36 @@
<?php

/*
* This file is part of the Symfony package.
*
* (c) Fabien Potencier <fabien@symfony.com>
*
* For the full copyright and license information, please view the LICENSE
* file that was distributed with this source code.
*/

use Symfony\AI\Agent\Agent;
use Symfony\AI\Platform\Bridge\Ollama\Ollama;
use Symfony\AI\Platform\Bridge\Ollama\PlatformFactory;
use Symfony\AI\Platform\Message\Message;
use Symfony\AI\Platform\Message\MessageBag;

require_once dirname(__DIR__).'/bootstrap.php';

$platform = PlatformFactory::create(env('OLLAMA_HOST_URL'), http_client());
$model = new Ollama();

$agent = new Agent($platform, $model, logger: logger());
$messages = new MessageBag(
Message::forSystem('You are a helpful assistant.'),
Message::ofUser('Tina has one brother and one sister. How many sisters do Tina\'s siblings have?'),
);

// Stream the response
$result = $agent->call($messages, ['stream' => true]);

// Emit each chunk as it is received
foreach ($result->getContent() as $chunk) {
echo $chunk->getContent();
}
echo \PHP_EOL;
1 change: 1 addition & 0 deletions src/platform/CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -60,5 +60,6 @@ CHANGELOG
* Add InMemoryPlatform and InMemoryRawResult for testing Platform without external Providers calls
* Add tool calling support for Ollama platform
* Allow beta feature flags to be passed into Anthropic model options
* Add Ollama streaming output support


45 changes: 45 additions & 0 deletions src/platform/src/Bridge/Ollama/OllamaMessageChunk.php
Original file line number Diff line number Diff line change
@@ -0,0 +1,45 @@
<?php

/*
* This file is part of the Symfony package.
*
* (c) Fabien Potencier <fabien@symfony.com>
*
* For the full copyright and license information, please view the LICENSE
* file that was distributed with this source code.
*/

namespace Symfony\AI\Platform\Bridge\Ollama;

/**
* @author Shaun Johnston <shaun@snj.au>
*/
final readonly class OllamaMessageChunk
{
/**
* @param array<string, mixed> $message
*/
public function __construct(
public readonly string $model,
public readonly \DateTimeImmutable $created_at,
public readonly array $message,
public readonly bool $done,
) {
}

public function __toString(): string
{
// Return the assistant's message content if available
return $this->message['content'] ?? '';
}

public function getContent(): ?string
{
return $this->message['content'] ?? null;
}

public function getRole(): ?string
{
return $this->message['role'] ?? null;
}
}
31 changes: 31 additions & 0 deletions src/platform/src/Bridge/Ollama/OllamaResultConverter.php
Original file line number Diff line number Diff line change
Expand Up @@ -15,12 +15,17 @@
use Symfony\AI\Platform\Model;
use Symfony\AI\Platform\Result\RawResultInterface;
use Symfony\AI\Platform\Result\ResultInterface;
use Symfony\AI\Platform\Result\StreamResult;
use Symfony\AI\Platform\Result\TextResult;
use Symfony\AI\Platform\Result\ToolCall;
use Symfony\AI\Platform\Result\ToolCallResult;
use Symfony\AI\Platform\Result\VectorResult;
use Symfony\AI\Platform\ResultConverterInterface;
use Symfony\AI\Platform\Vector\Vector;
use Symfony\Component\HttpClient\Chunk\FirstChunk;
use Symfony\Component\HttpClient\Chunk\LastChunk;
use Symfony\Component\HttpClient\EventSourceHttpClient;
use Symfony\Contracts\HttpClient\ResponseInterface;

/**
* @author Christopher Hertel <mail@christopher-hertel.de>
Expand All @@ -34,6 +39,10 @@ public function supports(Model $model): bool

public function convert(RawResultInterface $result, array $options = []): ResultInterface
{
if ($options['stream'] ?? false) {
return new StreamResult($this->convertStream($result->getObject()));
}

$data = $result->getData();

return \array_key_exists('embeddings', $data)
Expand Down Expand Up @@ -83,4 +92,26 @@ public function doConvertEmbeddings(array $data): ResultInterface
),
);
}

private function convertStream(ResponseInterface $result): \Generator
{
foreach ((new EventSourceHttpClient())->stream($result) as $chunk) {
if ($chunk instanceof FirstChunk || $chunk instanceof LastChunk) {
continue;
}

try {
$data = json_decode($chunk->getContent(), true, 512, \JSON_THROW_ON_ERROR);
} catch (\JsonException $e) {
throw new RuntimeException('Failed to decode JSON: '.$e->getMessage());
}

yield new OllamaMessageChunk(
$data['model'],
new \DateTimeImmutable($data['created_at']),
$data['message'],
$data['done'],
);
}
}
}
88 changes: 88 additions & 0 deletions src/platform/tests/Bridge/Ollama/OllamaClientTest.php
Original file line number Diff line number Diff line change
Expand Up @@ -16,9 +16,12 @@
use PHPUnit\Framework\TestCase;
use Symfony\AI\Platform\Bridge\Ollama\Ollama;
use Symfony\AI\Platform\Bridge\Ollama\OllamaClient;
use Symfony\AI\Platform\Bridge\Ollama\PlatformFactory;
use Symfony\AI\Platform\Model;
use Symfony\AI\Platform\Result\StreamResult;
use Symfony\Component\HttpClient\MockHttpClient;
use Symfony\Component\HttpClient\Response\JsonMockResponse;
use Symfony\Component\HttpClient\Response\MockResponse;

#[CoversClass(OllamaClient::class)]
#[UsesClass(Ollama::class)]
Expand Down Expand Up @@ -87,4 +90,89 @@ public function testOutputStructureIsSupported()
'done' => true,
], $response->getData());
}

public function testStreamingIsSupported()
{
$httpClient = new MockHttpClient([
new JsonMockResponse([
'capabilities' => ['completion'],
]),
new MockResponse('data: '.json_encode([
'model' => 'llama3.2',
'created_at' => '2025-08-23T10:00:00Z',
'message' => ['role' => 'assistant', 'content' => 'Hello world'],
'done' => true,
])."\n\n", [
'response_headers' => [
'content-type' => 'text/event-stream',
],
]),
], 'http://127.0.0.1:1234');

$platform = PlatformFactory::create('http://127.0.0.1:1234', $httpClient);
$response = $platform->invoke(new Ollama(), [
'messages' => [
[
'role' => 'user',
'content' => 'Say hello world',
],
],
'model' => 'llama3.2',
], [
'stream' => true,
]);

$result = $response->getResult();

$this->assertInstanceOf(StreamResult::class, $result);
$this->assertInstanceOf(\Generator::class, $result->getContent());
$this->assertSame(2, $httpClient->getRequestsCount());
}

public function testStreamingConverterWithDirectResponse()
{
$streamingData = 'data: '.json_encode([
'model' => 'llama3.2',
'created_at' => '2025-08-23T10:00:00Z',
'message' => ['role' => 'assistant', 'content' => 'Hello'],
'done' => false,
])."\n\n".
'data: '.json_encode([
'model' => 'llama3.2',
'created_at' => '2025-08-23T10:00:01Z',
'message' => ['role' => 'assistant', 'content' => ' world'],
'done' => true,
])."\n\n";

$mockHttpClient = new MockHttpClient([
new MockResponse($streamingData, [
'response_headers' => [
'content-type' => 'text/event-stream',
],
]),
]);

$mockResponse = $mockHttpClient->request('GET', 'http://test.example');
$rawResult = new \Symfony\AI\Platform\Result\RawHttpResult($mockResponse);
$converter = new \Symfony\AI\Platform\Bridge\Ollama\OllamaResultConverter();

$result = $converter->convert($rawResult, ['stream' => true]);

$this->assertInstanceOf(StreamResult::class, $result);
$this->assertInstanceOf(\Generator::class, $result->getContent());

$regularMockHttpClient = new MockHttpClient([
new JsonMockResponse([
'model' => 'llama3.2',
'message' => ['role' => 'assistant', 'content' => 'Hello world'],
'done' => true,
]),
]);

$regularMockResponse = $regularMockHttpClient->request('GET', 'http://test.example');
$regularRawResult = new \Symfony\AI\Platform\Result\RawHttpResult($regularMockResponse);
$regularResult = $converter->convert($regularRawResult, ['stream' => false]);

$this->assertNotInstanceOf(StreamResult::class, $regularResult);
}
}