-
Notifications
You must be signed in to change notification settings - Fork 2.3k
Closed
Labels
questionQuestion about using the SDKQuestion about using the SDK
Description
Please read this first
- Have you read the docs?Agents SDK docs YES
- Have you searched for related issues? Others may have had similar requesrs YES
Question
import asyncio
import random
from typing import Any
from pydantic import BaseModel
import asyncio
from dotenv import load_dotenv
from agents import Agent, Runner, AsyncOpenAI, OpenAIChatCompletionsModel, ModelSettings
from agents import set_default_openai_client, set_tracing_disabled
from agents import Agent, RunContextWrapper, RunHooks, Runner, Tool, Usage, function_tool
import os
load_dotenv()
deepseek_api_key = os.getenv('DEEPSEEK_API_KEY')
custom_client = AsyncOpenAI(
api_key=os.getenv('DEEPSEEK_API_KEY'),
base_url="https://api.deepseek.com"
)
set_tracing_disabled(True)
set_default_openai_client(custom_client)
class ExampleHooks(RunHooks):
def __init__(self):
self.event_counter = 0
def _usage_to_str(self, usage: Usage) -> str:
return f"{usage.requests} requests, {usage.input_tokens} input tokens, {usage.output_tokens} output tokens, {usage.total_tokens} total tokens"
async def on_agent_start(self, context: RunContextWrapper, agent: Agent) -> None:
self.event_counter += 1
print(
f"### {self.event_counter}: Agent {agent.name} started. Usage: {self._usage_to_str(context.usage)}"
)
async def on_agent_end(self, context: RunContextWrapper, agent: Agent, output: Any) -> None:
self.event_counter += 1
print(
f"### {self.event_counter}: Agent {agent.name} ended with output {output}. Usage: {self._usage_to_str(context.usage)}"
)
async def on_tool_start(self, context: RunContextWrapper, agent: Agent, tool: Tool) -> None:
self.event_counter += 1
print(
f"### {self.event_counter}: Tool {tool.name} started. Usage: {self._usage_to_str(context.usage)}"
)
async def on_tool_end(
self, context: RunContextWrapper, agent: Agent, tool: Tool, result: str
) -> None:
self.event_counter += 1
print(
f"### {self.event_counter}: Tool {tool.name} ended with result {result}. Usage: {self._usage_to_str(context.usage)}"
)
async def on_handoff(
self, context: RunContextWrapper, from_agent: Agent, to_agent: Agent
) -> None:
self.event_counter += 1
print(
f"### {self.event_counter}: Handoff from {from_agent.name} to {to_agent.name}. Usage: {self._usage_to_str(context.usage)}"
)
hooks = ExampleHooks()
###
@function_tool
def random_number(max: int) -> int:
"""Generate a random number up to the provided max."""
return random.randint(0, max)
@function_tool
def multiply_by_two(x: int) -> int:
"""Return x times two."""
return x * 2
class FinalResult(BaseModel):
number: int
multiply_agent = Agent(
name="Multiply Agent",
instructions="Multiply the number by 2 and then return the final result.",
model=OpenAIChatCompletionsModel(
model="deepseek-chat",
openai_client=custom_client,
),
model_settings=ModelSettings(temperature=0.7),
tools=[multiply_by_two],
output_type=FinalResult,
)
start_agent = Agent(
name="Start Agent",
instructions="Generate a random number. If it's even, stop. If it's odd, hand off to the multipler agent.",
model=OpenAIChatCompletionsModel(
model="deepseek-chat",
openai_client=custom_client,
),
model_settings=ModelSettings(temperature=0.7),
tools=[random_number],
output_type=FinalResult,
handoffs=[multiply_agent],
)
async def main() -> None:
user_input = input("Enter a max number: ")
await Runner.run(
start_agent,
hooks=hooks,
input=f"Generate a random number between 0 and {user_input}.",
)
print("Done!")
if __name__ == "__main__":
asyncio.run(main())
error messages
Enter a max number: 30
### 1: Agent Start Agent started. Usage: 0 requests, 0 input tokens, 0 output tokens, 0 total tokens
Traceback (most recent call last):
File "/Users/mars.yao/workSpace/openai-agents-python/examples/basic/lifecycle_example.py", line 129, in <module>
asyncio.run(main())
File "/opt/anaconda3/envs/openai-agents/lib/python3.11/asyncio/runners.py", line 190, in run
return runner.run(main)
^^^^^^^^^^^^^^^^
File "/opt/anaconda3/envs/openai-agents/lib/python3.11/asyncio/runners.py", line 118, in run
return self._loop.run_until_complete(task)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/opt/anaconda3/envs/openai-agents/lib/python3.11/asyncio/base_events.py", line 654, in run_until_complete
return future.result()
^^^^^^^^^^^^^^^
File "/Users/mars.yao/workSpace/openai-agents-python/examples/basic/lifecycle_example.py", line 119, in main
await Runner.run(
File "/opt/anaconda3/envs/openai-agents/lib/python3.11/site-packages/agents/run.py", line 210, in run
input_guardrail_results, turn_result = await asyncio.gather(
^^^^^^^^^^^^^^^^^^^^^
File "/opt/anaconda3/envs/openai-agents/lib/python3.11/site-packages/agents/run.py", line 719, in _run_single_turn
new_response = await cls._get_new_response(
^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/opt/anaconda3/envs/openai-agents/lib/python3.11/site-packages/agents/run.py", line 862, in _get_new_response
new_response = await model.get_response(
^^^^^^^^^^^^^^^^^^^^^^^^^
File "/opt/anaconda3/envs/openai-agents/lib/python3.11/site-packages/agents/models/openai_chatcompletions.py", line 116, in get_response
response = await self._fetch_response(
^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/opt/anaconda3/envs/openai-agents/lib/python3.11/site-packages/agents/models/openai_chatcompletions.py", line 498, in _fetch_response
ret = await self._get_client().chat.completions.create(
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/opt/anaconda3/envs/openai-agents/lib/python3.11/site-packages/openai/resources/chat/completions/completions.py", line 2000, in create
return await self._post(
^^^^^^^^^^^^^^^^^
File "/opt/anaconda3/envs/openai-agents/lib/python3.11/site-packages/openai/_base_client.py", line 1767, in post
return await self.request(cast_to, opts, stream=stream, stream_cls=stream_cls)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/opt/anaconda3/envs/openai-agents/lib/python3.11/site-packages/openai/_base_client.py", line 1461, in request
return await self._request(
^^^^^^^^^^^^^^^^^^^^
File "/opt/anaconda3/envs/openai-agents/lib/python3.11/site-packages/openai/_base_client.py", line 1562, in _request
raise self._make_status_error_from_response(err.response) from None
openai.UnprocessableEntityError: Failed to deserialize the JSON body into the target type: response_format: response_format.type `json_schema` is unavailable now at line 1 column 483
how can i fix it
Metadata
Metadata
Assignees
Labels
questionQuestion about using the SDKQuestion about using the SDK