Skip to content

Commit

Permalink
re-introduce legacy function calling
Browse files Browse the repository at this point in the history
  • Loading branch information
rgbkrk committed Feb 27, 2024
1 parent 42a2878 commit f98458b
Show file tree
Hide file tree
Showing 2 changed files with 23 additions and 10 deletions.
5 changes: 5 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,11 @@ All notable changes to this project will be documented in this file.
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).

## [2.0.0]

- Support parallel tool calling by default in `Chat`.
- Legacy support for function calling is available by passing `legacy_function_calling=True` to the `Chat` constructor.

## [1.3.0]

- Support tool call format from `FunctionRegistry`. Enables parallel function calling (note: not in `Chat` yet). https://github.com/rgbkrk/chatlab/pull/122
Expand Down
28 changes: 18 additions & 10 deletions chatlab/chat.py
Original file line number Diff line number Diff line change
Expand Up @@ -73,6 +73,7 @@ def __init__(
chat_functions: Optional[List[Callable]] = None,
allow_hallucinated_python: bool = False,
python_hallucination_function: Optional[PythonHallucinationFunction] = None,
legacy_function_calling: bool = False,
):
"""Initialize a Chat with an optional initial context of messages.
Expand All @@ -99,6 +100,8 @@ def __init__(
self.api_key = openai_api_key
self.base_url = base_url

self.legacy_function_calling = legacy_function_calling

if initial_context is None:
initial_context = [] # type: ignore

Expand Down Expand Up @@ -295,28 +298,33 @@ async def submit(self, *messages: Union[ChatCompletionMessageParam, str], stream
base_url=self.base_url,
)

chat_create_kwargs = {
"model": self.model,
"messages": full_messages,
"stream": stream,
"temperature": kwargs.get("temperature", 0),
}

# Due to the strict response typing based on `Literal` typing on `stream`, we have to process these
# two cases separately
if stream:
if self.legacy_function_calling:
chat_create_kwargs.update(self.function_registry.api_manifest())
else:
chat_create_kwargs["tools"] = self.function_registry.tools

streaming_response = await client.chat.completions.create(
model=self.model,
messages=full_messages,
tools=self.function_registry.tools,
stream=True,
temperature=kwargs.get("temperature", 0),
**chat_create_kwargs,
stream=True,
)

self.append(*messages)

finish_reason, function_call_request, tool_arguments = await self.__process_stream(streaming_response)
else:
# TODO: Process tools for non stream
full_response = await client.chat.completions.create(
model=self.model,
messages=full_messages,
tools=self.function_registry.tools,
**chat_create_kwargs,
stream=False,
temperature=kwargs.get("temperature", 0),
)

self.append(*messages)
Expand Down

0 comments on commit f98458b

Please sign in to comment.