Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Remove Support for OpenAI < 1 #788

Merged
merged 20 commits into from
Jun 18, 2024
Merged
31 changes: 8 additions & 23 deletions guardrails/run/async_stream_runner.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,6 @@
from guardrails.prompt import Instructions, Prompt
from guardrails.run import StreamRunner
from guardrails.run.async_runner import AsyncRunner
from guardrails.utils.openai_utils import OPENAI_VERSION


class AsyncStreamRunner(AsyncRunner, StreamRunner):
Expand Down Expand Up @@ -212,29 +211,15 @@ def get_chunk_text(self, chunk: Any, api: Union[PromptCallableBase, None]) -> st
"""Get the text from a chunk."""
chunk_text = ""
if isinstance(api, OpenAICallable):
if OPENAI_VERSION.startswith("0"):
finished = chunk["choices"][0]["finish_reason"]
if "text" in chunk["choices"][0]:
content = chunk["choices"][0]["text"]
if not finished and content:
chunk_text = content
else:
finished = chunk.choices[0].finish_reason
content = chunk.choices[0].text
if not finished and content:
chunk_text = content
finished = chunk.choices[0].finish_reason
content = chunk.choices[0].text
if not finished and content:
chunk_text = content
elif isinstance(api, OpenAIChatCallable):
if OPENAI_VERSION.startswith("0"):
finished = chunk["choices"][0]["finish_reason"]
if "content" in chunk["choices"][0]["delta"]:
content = chunk["choices"][0]["delta"]["content"]
if not finished and content:
chunk_text = content
else:
finished = chunk.choices[0].finish_reason
content = chunk.choices[0].delta.content
if not finished and content:
chunk_text = content
finished = chunk.choices[0].finish_reason
content = chunk.choices[0].delta.content
if not finished and content:
chunk_text = content
elif isinstance(api, LiteLLMCallable):
finished = chunk.choices[0].finish_reason
content = chunk.choices[0].delta.content
Expand Down
54 changes: 8 additions & 46 deletions guardrails/run/stream_runner.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,6 @@
)
from guardrails.prompt import Instructions, Prompt
from guardrails.run.runner import Runner
from guardrails.utils.openai_utils import OPENAI_VERSION
from guardrails.utils.parsing_utils import (
coerce_types,
parse_llm_output,
Expand Down Expand Up @@ -272,58 +271,21 @@ def step(

def is_last_chunk(self, chunk: Any, api: Union[PromptCallableBase, None]) -> bool:
"""Detect if chunk is final chunk."""
if isinstance(api, OpenAICallable):
if OPENAI_VERSION.startswith("0"):
finished = chunk["choices"][0]["finish_reason"]
return finished is not None
else:
finished = chunk.choices[0].finish_reason
return finished is not None
elif isinstance(api, OpenAIChatCallable):
if OPENAI_VERSION.startswith("0"):
finished = chunk["choices"][0]["finish_reason"]
return finished is not None
else:
finished = chunk.choices[0].finish_reason
return finished is not None
elif isinstance(api, LiteLLMCallable):
try:
finished = chunk.choices[0].finish_reason
return finished is not None
else:
try:
finished = chunk.choices[0].finish_reason
return finished is not None
except (AttributeError, TypeError):
return False
except (AttributeError, TypeError):
return False

def get_chunk_text(self, chunk: Any, api: Union[PromptCallableBase, None]) -> str:
"""Get the text from a chunk."""
chunk_text = ""
if isinstance(api, OpenAICallable):
if OPENAI_VERSION.startswith("0"):
finished = chunk["choices"][0]["finish_reason"]
if "text" in chunk["choices"][0]:
content = chunk["choices"][0]["text"]
if not finished and content:
chunk_text = content
else:
finished = chunk.choices[0].finish_reason
content = chunk.choices[0].text
if not finished and content:
chunk_text = content
elif isinstance(api, OpenAIChatCallable):
if OPENAI_VERSION.startswith("0"):
finished = chunk["choices"][0]["finish_reason"]
if "content" in chunk["choices"][0]["delta"]:
content = chunk["choices"][0]["delta"]["content"]
if not finished and content:
chunk_text = content
else:
finished = chunk.choices[0].finish_reason
content = chunk.choices[0].delta.content
if not finished and content:
chunk_text = content
elif isinstance(api, LiteLLMCallable):
finished = chunk.choices[0].finish_reason
content = chunk.choices[0].text
if not finished and content:
chunk_text = content
elif isinstance(api, OpenAIChatCallable) or isinstance(api, LiteLLMCallable):
finished = chunk.choices[0].finish_reason
content = chunk.choices[0].delta.content
if not finished and content:
Expand Down
35 changes: 9 additions & 26 deletions guardrails/utils/openai_utils/__init__.py
Original file line number Diff line number Diff line change
@@ -1,31 +1,14 @@
from openai.version import VERSION

OPENAI_VERSION = VERSION

if OPENAI_VERSION.startswith("0"):
from .v0 import AsyncOpenAIClientV0 as AsyncOpenAIClient
from .v0 import OpenAIClientV0 as OpenAIClient
from .v0 import (
OpenAIServiceUnavailableError,
get_static_openai_acreate_func,
get_static_openai_chat_acreate_func,
get_static_openai_chat_create_func,
get_static_openai_create_func,
)
else:
from .v1 import AsyncOpenAIClientV1 as AsyncOpenAIClient
from .v1 import OpenAIClientV1 as OpenAIClient
from .v1 import (
OpenAIServiceUnavailableError,
get_static_openai_acreate_func,
get_static_openai_chat_acreate_func,
get_static_openai_chat_create_func,
get_static_openai_create_func,
)

from .v1 import AsyncOpenAIClientV1 as AsyncOpenAIClient
from .v1 import OpenAIClientV1 as OpenAIClient
from .v1 import (
OpenAIServiceUnavailableError,
get_static_openai_acreate_func,
get_static_openai_chat_acreate_func,
get_static_openai_chat_create_func,
get_static_openai_create_func,
)

zsimjee marked this conversation as resolved.
Show resolved Hide resolved
__all__ = [
"OPENAI_VERSION",
"AsyncOpenAIClient",
"OpenAIClient",
"get_static_openai_create_func",
Expand Down
Loading
Loading