Skip to content

Commit

Permalink
Merge pull request #788 from guardrails-ai/remove_support_for_old_openai
Browse files Browse the repository at this point in the history
Remove Support for OpenAI < 1
  • Loading branch information
CalebCourier committed Jun 18, 2024
2 parents 972e929 + f572ddc commit a28d01c
Show file tree
Hide file tree
Showing 15 changed files with 223 additions and 881 deletions.
31 changes: 8 additions & 23 deletions guardrails/run/async_stream_runner.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,6 @@
from guardrails.prompt import Instructions, Prompt
from guardrails.run import StreamRunner
from guardrails.run.async_runner import AsyncRunner
from guardrails.utils.openai_utils import OPENAI_VERSION


class AsyncStreamRunner(AsyncRunner, StreamRunner):
Expand Down Expand Up @@ -212,29 +211,15 @@ def get_chunk_text(self, chunk: Any, api: Union[PromptCallableBase, None]) -> st
"""Get the text from a chunk."""
chunk_text = ""
if isinstance(api, OpenAICallable):
if OPENAI_VERSION.startswith("0"):
finished = chunk["choices"][0]["finish_reason"]
if "text" in chunk["choices"][0]:
content = chunk["choices"][0]["text"]
if not finished and content:
chunk_text = content
else:
finished = chunk.choices[0].finish_reason
content = chunk.choices[0].text
if not finished and content:
chunk_text = content
finished = chunk.choices[0].finish_reason
content = chunk.choices[0].text
if not finished and content:
chunk_text = content
elif isinstance(api, OpenAIChatCallable):
if OPENAI_VERSION.startswith("0"):
finished = chunk["choices"][0]["finish_reason"]
if "content" in chunk["choices"][0]["delta"]:
content = chunk["choices"][0]["delta"]["content"]
if not finished and content:
chunk_text = content
else:
finished = chunk.choices[0].finish_reason
content = chunk.choices[0].delta.content
if not finished and content:
chunk_text = content
finished = chunk.choices[0].finish_reason
content = chunk.choices[0].delta.content
if not finished and content:
chunk_text = content
elif isinstance(api, LiteLLMCallable):
finished = chunk.choices[0].finish_reason
content = chunk.choices[0].delta.content
Expand Down
54 changes: 8 additions & 46 deletions guardrails/run/stream_runner.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,6 @@
)
from guardrails.prompt import Instructions, Prompt
from guardrails.run.runner import Runner
from guardrails.utils.openai_utils import OPENAI_VERSION
from guardrails.utils.parsing_utils import (
coerce_types,
parse_llm_output,
Expand Down Expand Up @@ -272,58 +271,21 @@ def step(

def is_last_chunk(self, chunk: Any, api: Union[PromptCallableBase, None]) -> bool:
"""Detect if chunk is final chunk."""
if isinstance(api, OpenAICallable):
if OPENAI_VERSION.startswith("0"):
finished = chunk["choices"][0]["finish_reason"]
return finished is not None
else:
finished = chunk.choices[0].finish_reason
return finished is not None
elif isinstance(api, OpenAIChatCallable):
if OPENAI_VERSION.startswith("0"):
finished = chunk["choices"][0]["finish_reason"]
return finished is not None
else:
finished = chunk.choices[0].finish_reason
return finished is not None
elif isinstance(api, LiteLLMCallable):
try:
finished = chunk.choices[0].finish_reason
return finished is not None
else:
try:
finished = chunk.choices[0].finish_reason
return finished is not None
except (AttributeError, TypeError):
return False
except (AttributeError, TypeError):
return False

def get_chunk_text(self, chunk: Any, api: Union[PromptCallableBase, None]) -> str:
"""Get the text from a chunk."""
chunk_text = ""
if isinstance(api, OpenAICallable):
if OPENAI_VERSION.startswith("0"):
finished = chunk["choices"][0]["finish_reason"]
if "text" in chunk["choices"][0]:
content = chunk["choices"][0]["text"]
if not finished and content:
chunk_text = content
else:
finished = chunk.choices[0].finish_reason
content = chunk.choices[0].text
if not finished and content:
chunk_text = content
elif isinstance(api, OpenAIChatCallable):
if OPENAI_VERSION.startswith("0"):
finished = chunk["choices"][0]["finish_reason"]
if "content" in chunk["choices"][0]["delta"]:
content = chunk["choices"][0]["delta"]["content"]
if not finished and content:
chunk_text = content
else:
finished = chunk.choices[0].finish_reason
content = chunk.choices[0].delta.content
if not finished and content:
chunk_text = content
elif isinstance(api, LiteLLMCallable):
finished = chunk.choices[0].finish_reason
content = chunk.choices[0].text
if not finished and content:
chunk_text = content
elif isinstance(api, OpenAIChatCallable) or isinstance(api, LiteLLMCallable):
finished = chunk.choices[0].finish_reason
content = chunk.choices[0].delta.content
if not finished and content:
Expand Down
35 changes: 9 additions & 26 deletions guardrails/utils/openai_utils/__init__.py
Original file line number Diff line number Diff line change
@@ -1,31 +1,14 @@
from openai.version import VERSION

OPENAI_VERSION = VERSION

if OPENAI_VERSION.startswith("0"):
from .v0 import AsyncOpenAIClientV0 as AsyncOpenAIClient
from .v0 import OpenAIClientV0 as OpenAIClient
from .v0 import (
OpenAIServiceUnavailableError,
get_static_openai_acreate_func,
get_static_openai_chat_acreate_func,
get_static_openai_chat_create_func,
get_static_openai_create_func,
)
else:
from .v1 import AsyncOpenAIClientV1 as AsyncOpenAIClient
from .v1 import OpenAIClientV1 as OpenAIClient
from .v1 import (
OpenAIServiceUnavailableError,
get_static_openai_acreate_func,
get_static_openai_chat_acreate_func,
get_static_openai_chat_create_func,
get_static_openai_create_func,
)

from .v1 import AsyncOpenAIClientV1 as AsyncOpenAIClient
from .v1 import OpenAIClientV1 as OpenAIClient
from .v1 import (
OpenAIServiceUnavailableError,
get_static_openai_acreate_func,
get_static_openai_chat_acreate_func,
get_static_openai_chat_create_func,
get_static_openai_create_func,
)

__all__ = [
"OPENAI_VERSION",
"AsyncOpenAIClient",
"OpenAIClient",
"get_static_openai_create_func",
Expand Down
Loading

0 comments on commit a28d01c

Please sign in to comment.