Skip to content

Commit

Permalink
Fix temp file resource leak (#569)
Browse files Browse the repository at this point in the history
ChatOpenAI will soon be consolidated, but this fixes a resource leak
within it.

Also adds more helpful type hinting on File.
  • Loading branch information
GitOnUp committed Oct 3, 2023
1 parent b9e32cc commit 47b0eb1
Show file tree
Hide file tree
Showing 2 changed files with 36 additions and 31 deletions.
65 changes: 35 additions & 30 deletions src/steamship/agents/llms/openai.py
Expand Up @@ -91,35 +91,40 @@ def chat(self, messages: List[Block], tools: Optional[List[Tool]], **kwargs) ->
tags=[Tag(kind=TagKind.GENERATION, name=GenerationTag.PROMPT_COMPLETION)],
)

options = {}
if len(tools) > 0:
functions = []
for tool in tools:
functions.append(tool.as_openai_function().dict())
options["functions"] = functions

if "max_tokens" in kwargs:
options["max_tokens"] = kwargs["max_tokens"]

extra = {
AgentLogging.LLM_NAME: "OpenAI",
AgentLogging.IS_MESSAGE: True,
AgentLogging.MESSAGE_TYPE: AgentLogging.PROMPT,
AgentLogging.MESSAGE_AUTHOR: AgentLogging.LLM,
}

if logging.DEBUG >= logging.root.getEffectiveLevel():
extra["messages"] = json.dumps(
"\n".join([f"[{msg.chat_role}] {msg.as_llm_input()}" for msg in messages])
try:
options = {}
if len(tools) > 0:
functions = []
for tool in tools:
functions.append(tool.as_openai_function().dict())
options["functions"] = functions

if "max_tokens" in kwargs:
options["max_tokens"] = kwargs["max_tokens"]

extra = {
AgentLogging.LLM_NAME: "OpenAI",
AgentLogging.IS_MESSAGE: True,
AgentLogging.MESSAGE_TYPE: AgentLogging.PROMPT,
AgentLogging.MESSAGE_AUTHOR: AgentLogging.LLM,
}

if logging.DEBUG >= logging.root.getEffectiveLevel():
extra["messages"] = json.dumps(
"\n".join([f"[{msg.chat_role}] {msg.as_llm_input()}" for msg in messages])
)
extra["tools"] = ",".join([t.name for t in tools])
else:
extra["num_messages"] = len(messages)
extra["num_tools"] = len(tools)

logging.info(f"OpenAI ChatComplete ({messages[-1].as_llm_input()})", extra=extra)

tool_selection_task = self.generator.generate(
input_file_id=temp_file.id, options=options
)
extra["tools"] = ",".join([t.name for t in tools])
else:
extra["num_messages"] = len(messages)
extra["num_tools"] = len(tools)

logging.info(f"OpenAI ChatComplete ({messages[-1].as_llm_input()})", extra=extra)

tool_selection_task = self.generator.generate(input_file_id=temp_file.id, options=options)
tool_selection_task.wait()
tool_selection_task.wait()

return tool_selection_task.output.blocks
return tool_selection_task.output.blocks
finally:
temp_file.delete()
2 changes: 1 addition & 1 deletion src/steamship/data/file.py
Expand Up @@ -121,7 +121,7 @@ def create(
blocks: List[Block] = None,
tags: List[Tag] = None,
public_data: bool = False,
) -> Any:
) -> File:

req = {
"handle": handle,
Expand Down

0 comments on commit 47b0eb1

Please sign in to comment.