Skip to content

Commit

Permalink
fix: GenAI - Fixed response validation error during streaming
Browse files Browse the repository at this point in the history
Previously, when the response was blocked by safety, the exception was non-informative.

PiperOrigin-RevId: 622291778
  • Loading branch information
Ark-kun authored and Copybara-Service committed Apr 5, 2024
1 parent 9bb687c commit c881998
Showing 1 changed file with 8 additions and 9 deletions.
17 changes: 8 additions & 9 deletions vertexai/generative_models/_generative_models.py
Expand Up @@ -1022,17 +1022,17 @@ def _send_message_streaming(
full_response = None
for chunk in stream:
chunks.append(chunk)
if full_response:
_append_response(full_response, chunk)
else:
full_response = chunk
# By default we're not adding incomplete interactions to history.
if self._response_validator is not None:
self._response_validator(
response=chunk,
request_contents=request_history,
response_chunks=chunks,
)
if full_response:
_append_response(full_response, chunk)
else:
full_response = chunk
yield chunk
if not full_response:
return
Expand Down Expand Up @@ -1089,18 +1089,17 @@ async def async_generator():
full_response = None
async for chunk in stream:
chunks.append(chunk)
if full_response:
_append_response(full_response, chunk)
else:
full_response = chunk
# By default we're not adding incomplete interactions to history.
if self._response_validator is not None:
self._response_validator(
response=chunk,
request_contents=request_history,
response_chunks=chunks,
)

if full_response:
_append_response(full_response, chunk)
else:
full_response = chunk
yield chunk
if not full_response:
return
Expand Down

0 comments on commit c881998

Please sign in to comment.