From 57548512237ad51f8f22745e3d28d36a544df606 Mon Sep 17 00:00:00 2001 From: Robby Date: Fri, 9 Feb 2024 17:53:43 -0500 Subject: [PATCH] Fix ollama chunk order --- libs/community/langchain_community/chat_models/ollama.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/libs/community/langchain_community/chat_models/ollama.py b/libs/community/langchain_community/chat_models/ollama.py index 92b5afb52ba3f0..73a194a90f9b78 100644 --- a/libs/community/langchain_community/chat_models/ollama.py +++ b/libs/community/langchain_community/chat_models/ollama.py @@ -313,12 +313,12 @@ def _stream( for stream_resp in self._create_chat_stream(messages, stop, **kwargs): if stream_resp: chunk = _chat_stream_response_to_chat_generation_chunk(stream_resp) - yield chunk if run_manager: run_manager.on_llm_new_token( chunk.text, verbose=self.verbose, ) + yield chunk except OllamaEndpointNotFoundError: yield from self._legacy_stream(messages, stop, **kwargs) @@ -332,12 +332,12 @@ async def _astream( async for stream_resp in self._acreate_chat_stream(messages, stop, **kwargs): if stream_resp: chunk = _chat_stream_response_to_chat_generation_chunk(stream_resp) - yield chunk if run_manager: await run_manager.on_llm_new_token( chunk.text, verbose=self.verbose, ) + yield chunk @deprecated("0.0.3", alternative="_stream") def _legacy_stream( @@ -351,9 +351,9 @@ def _legacy_stream( for stream_resp in self._create_generate_stream(prompt, stop, **kwargs): if stream_resp: chunk = _stream_response_to_chat_generation_chunk(stream_resp) - yield chunk if run_manager: run_manager.on_llm_new_token( chunk.text, verbose=self.verbose, ) + yield chunk