-
-
Notifications
You must be signed in to change notification settings - Fork 114
Closed
Labels
PlatformIssues & PRs about the AI Platform componentIssues & PRs about the AI Platform componentStatus: Needs Review
Description
Ollama streaming is not working with toolcalls.
I'm using the following configuration:
ai.yaml
ai:
platform:
ollama:
host_url: 'http://172.27.48.1:11434'
agent:
default:
model: 'llama3.2'
tools:
- 'Symfony\AI\Agent\Toolbox\Tool\Clock'
- 'Symfony\AI\Agent\Toolbox\Tool\Wikipedia'
services:
_defaults:
autowire: true
autoconfigure: true
Symfony\AI\Agent\Toolbox\Tool\Clock: ~
Symfony\AI\Agent\Toolbox\Tool\Wikipedia: ~
StreamCommand.php
<?php
namespace App\Command;
use Symfony\Component\Console\Attribute\AsCommand;
use Symfony\Component\Console\Command\Command;
use Symfony\Component\Console\Input\InputInterface;
use Symfony\Component\Console\Output\OutputInterface;
use Symfony\AI\Agent\AgentInterface;
use Symfony\AI\Platform\Message\Message;
use Symfony\AI\Platform\Message\MessageBag;
use Symfony\AI\Platform\Result\StreamResult;
use Symfony\AI\Platform\PlatformInterface;
#[AsCommand(
name: 'app:stream-chat',
description: '',
)]
class StreamCommand extends Command
{
public function __construct(
private AgentInterface $agent,
) {
parent::__construct();
}
protected function execute(InputInterface $input, OutputInterface $output): int
{
$messages = new MessageBag(
Message::forSystem('You are a helpful assistant.'),
Message::ofUser('Tina has one brother and one sister. How many sisters do Tina\'s siblings have?'),
);
$result = $this->agent->call($messages,['stream' => true]);
foreach ($result->getContent() as $chunk) {
$output->write($chunk->getContent());
}
#Working without stream
#$result = $this->agent->call($messages);
#$output->write($result->getContent());
return Command::SUCCESS;
}
}
When using the agent without tools, the streaming of the response is working as expected.
When using the agent with tools, the streaming is not working. I get the two following response objects:
Symfony\AI\Platform\Bridge\Ollama\OllamaMessageChunk Object
(
[model] => llama3.2
[created_at] => DateTimeImmutable Object
(
[date] => 2025-10-23 11:51:50.465004
[timezone_type] => 2
[timezone] => Z
)
[message] => Array
(
[role] => assistant
[content] =>
[tool_calls] => Array
(
[0] => Array
(
[function] => Array
(
[index] => 0
[name] => wikipedia_article
[arguments] => Array
(
[title] => number of siblings
)
)
)
)
)
[done] =>
)
Symfony\AI\Platform\Bridge\Ollama\OllamaMessageChunk Object
(
[model] => llama3.2
[created_at] => DateTimeImmutable Object
(
[date] => 2025-10-23 11:51:50.475651
[timezone_type] => 2
[timezone] => Z
)
[message] => Array
(
[role] => assistant
[content] =>
)
[done] => 1
)
Without streaming enabled, I get the expected output.
I think that the ToolCall doesn't get processed.
Metadata
Metadata
Assignees
Labels
PlatformIssues & PRs about the AI Platform componentIssues & PRs about the AI Platform componentStatus: Needs Review