Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
@@ -0,0 +1,137 @@
interactions:
- request:
body: '{"contents": [{"parts": [{"text": "What''s the weather in San Francisco?"}],
"role": "user"}], "systemInstruction": {"parts": [{"text": "You are a helpful
weather assistant. Use the get_weather tool to answer questions about weather.\n\nYou
are an agent. Your internal name is \"weather_agent\"."}], "role": "user"},
"tools": [{"functionDeclarations": [{"description": "Get the weather for a location.",
"name": "get_weather", "parameters": {"properties": {"location": {"type": "STRING"}},
"required": ["location"], "type": "OBJECT"}}]}], "generationConfig": {}}'
headers:
accept:
- '*/*'
accept-encoding:
- gzip, deflate
connection:
- keep-alive
content-length:
- '561'
content-type:
- application/json
host:
- generativelanguage.googleapis.com
user-agent:
- google-genai-sdk/1.31.0 gl-python/3.9.21 google-adk/1.14.1 gl-python/3.9.21
x-goog-api-client:
- google-genai-sdk/1.31.0 gl-python/3.9.21 google-adk/1.14.1 gl-python/3.9.21
method: POST
uri: https://generativelanguage.googleapis.com/v1beta/models/gemini-2.0-flash:generateContent
response:
body:
string: !!binary |
H4sIAAAAAAAC/61SXU+DMBR9768gfR4LMofG100TP4hTyWJijLnChTWWFttuy1z23y1fG0x9ExJS
7jk9596ebonj0BhEwhIwqOmF82IrjrOtviUmhUFhLNCWbLEAZQ7c+tl21paSLkVsmBQT4Ly3ucEF
5GjrNEPztkYwC1R0cEwClelfNluEyxhK+VLiCYRzpUDETMeSHnF35K+/w/r1YEyV5FVfuUyQt2K7
lkBTJphePCLoxju6n+37prDK7mRWKPletu2eDwPf98bBaeD59h2fjND1AtKaV7Z0qSHDEA3YAGA/
LLUieWEi+YFiIpdVAOOz2qiTVw8PGthIA7yPjAY/VPXUejLejbGTsB0fODObcsbo8jnqZGP1e021
Z0Q6R3nc4j+ZBX0v0iRThzVHpZsbkWFuc3L9oeemHPSiEqQKdSGFxuuk5Ez9NITwFqfhav1p9EzG
XzebB4+SHfkGVtQS/hUDAAA=
headers:
Alt-Svc:
- h3=":443"; ma=2592000,h3-29=":443"; ma=2592000
Content-Encoding:
- gzip
Content-Type:
- application/json; charset=UTF-8
Date:
- Thu, 18 Sep 2025 20:09:51 GMT
Server:
- scaffolding on HTTPServer2
Server-Timing:
- gfet4t7; dur=530
Transfer-Encoding:
- chunked
Vary:
- Origin
- X-Origin
- Referer
X-Content-Type-Options:
- nosniff
X-Frame-Options:
- SAMEORIGIN
X-XSS-Protection:
- '0'
status:
code: 200
message: OK
- request:
body: '{"contents": [{"parts": [{"text": "What''s the weather in San Francisco?"}],
"role": "user"}, {"parts": [{"functionCall": {"args": {"location": "San Francisco"},
"name": "get_weather"}}], "role": "model"}, {"parts": [{"functionResponse":
{"name": "get_weather", "response": {"location": "San Francisco", "temperature":
"72\u00b0F", "condition": "sunny", "humidity": "45%", "wind": "5 mph NW"}}}],
"role": "user"}], "systemInstruction": {"parts": [{"text": "You are a helpful
weather assistant. Use the get_weather tool to answer questions about weather.\n\nYou
are an agent. Your internal name is \"weather_agent\"."}], "role": "user"},
"tools": [{"functionDeclarations": [{"description": "Get the weather for a location.",
"name": "get_weather", "parameters": {"properties": {"location": {"type": "STRING"}},
"required": ["location"], "type": "OBJECT"}}]}], "generationConfig": {}}'
headers:
accept:
- '*/*'
accept-encoding:
- gzip, deflate
connection:
- keep-alive
content-length:
- '881'
content-type:
- application/json
host:
- generativelanguage.googleapis.com
user-agent:
- google-genai-sdk/1.31.0 gl-python/3.9.21 google-adk/1.14.1 gl-python/3.9.21
x-goog-api-client:
- google-genai-sdk/1.31.0 gl-python/3.9.21 google-adk/1.14.1 gl-python/3.9.21
method: POST
uri: https://generativelanguage.googleapis.com/v1beta/models/gemini-2.0-flash:generateContent
response:
body:
string: !!binary |
H4sIAAAAAAAC/61R0U7bQBB891esTuobiS5OHELfqqaRUIEisACprdA2Xsen2nfmbl2IovwT38CX
cefUwaGv9YO12pnbGc1sIgCxRJ2pDJmc+Ajf/QZg0/4DZjSTZg90K7+s0fIbd/dterOnMD2FRyIt
CB4JuSALSsM1alhY1EvllgaUA9dovYZHxQUgMFU1WeTGEpgcjuOX58UQwomiqVSmeB2eTJIP4B0D
h9PKD36XQFUXcHE7/KFFz8h2P/88erNvTUnBW2UyKjv6tiOIXGnliitCZ3SgXaffLsUexT+rM7Oq
rfkVEhjIoZQymU1HUo6n8iSeTpKTWRx14q2saByu6JwYfci4j1L4I1XNqflN+rNp2pBnk51Qr5MD
fNzhbBjLA2g0mh39c9fNvaoq+2X1evQBYOlTbYv6cpeKXkh8aKtLKeqF+d7kfxIbvxOL/paz6+uG
rFO7YlZU+aoG8VAO8hJd0V4UllxttKPTLHDmcX6Op+nXi3v19MDu8j6npPkkRbSNXgFas32N/AIA
AA==
headers:
Alt-Svc:
- h3=":443"; ma=2592000,h3-29=":443"; ma=2592000
Content-Encoding:
- gzip
Content-Type:
- application/json; charset=UTF-8
Date:
- Thu, 18 Sep 2025 20:09:52 GMT
Server:
- scaffolding on HTTPServer2
Server-Timing:
- gfet4t7; dur=612
Transfer-Encoding:
- chunked
Vary:
- Origin
- X-Origin
- Referer
X-Content-Type-Options:
- nosniff
X-Frame-Options:
- SAMEORIGIN
X-XSS-Protection:
- '0'
status:
code: 200
message: OK
version: 1
2 changes: 2 additions & 0 deletions py/src/braintrust/integrations/adk/integration.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@
McpToolPatcher,
RunnerRunSyncPatcher,
ThreadBridgePatcher,
ToolCallAsyncPatcher,
)


Expand All @@ -27,5 +28,6 @@ class ADKIntegration(BaseIntegration):
AgentRunAsyncPatcher,
RunnerRunSyncPatcher,
FlowRunAsyncPatcher,
ToolCallAsyncPatcher,
McpToolPatcher,
)
15 changes: 15 additions & 0 deletions py/src/braintrust/integrations/adk/patchers.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,7 @@
_mcp_tool_run_async_wrapper_async,
_runner_run_async_wrapper,
_runner_run_wrapper,
_tool_call_async_wrapper,
)


Expand Down Expand Up @@ -89,6 +90,20 @@ class FlowRunAsyncPatcher(CompositeFunctionWrapperPatcher):
sub_patchers = (_FlowRunAsyncSubPatcher, _FlowCallLlmAsyncSubPatcher)


# ---------------------------------------------------------------------------
# Tool patcher
# ---------------------------------------------------------------------------


class ToolCallAsyncPatcher(FunctionWrapperPatcher):
"""Patch ADK's central async tool execution helper for tracing."""

name = "adk.tool.call_async"
target_module = "google.adk.flows.llm_flows.functions"
target_path = "__call_tool_async"
wrapper = _tool_call_async_wrapper


# ---------------------------------------------------------------------------
# Thread-bridge patchers
# ---------------------------------------------------------------------------
Expand Down
65 changes: 64 additions & 1 deletion py/src/braintrust/integrations/adk/test_adk.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@


ADK_VERSION = tuple(int(x) for x in pkg_version("google-adk").split(".")[:3])
from google.adk.agents import LlmAgent
from google.adk.agents import LlmAgent, ParallelAgent, SequentialAgent
from google.adk.runners import Runner
from google.adk.sessions import InMemorySessionService
from google.genai import types
Expand Down Expand Up @@ -226,6 +226,69 @@ def get_weather(location: str):
assert "72" in response_output, "Response doesn't mention temperature"


@pytest.mark.vcr
@pytest.mark.asyncio
async def test_adk_nested_subagent_tool_calls_are_traced(memory_logger):
assert not memory_logger.pop()

def get_weather(location: str):
"""Get the weather for a location."""
return {
"location": location,
"temperature": "72°F",
"condition": "sunny",
}

leaf_agent = Agent(
name="weather_agent",
model="gemini-2.0-flash",
instruction="You are a helpful weather assistant. Use the get_weather tool to answer questions about weather.",
tools=[get_weather],
)
agent = SequentialAgent(
name="root_agent",
sub_agents=[
ParallelAgent(
name="parallel_weather_agent",
sub_agents=[leaf_agent],
)
],
)

app_name = "nested_weather_app"
user_id = "test-user"
session_id = "test-session-nested"

session_service = InMemorySessionService()
await session_service.create_session(app_name=app_name, user_id=user_id, session_id=session_id)

runner = Runner(agent=agent, app_name=app_name, session_service=session_service)
user_msg = types.Content(role="user", parts=[types.Part(text="What's the weather in San Francisco?")])

responses = []
async for event in runner.run_async(user_id=user_id, session_id=session_id, new_message=user_msg):
if event.is_final_response():
responses.append(event)

assert responses
assert responses[0].content
response_text = responses[0].content.parts[0].text
assert "san francisco" in response_text.lower()

spans = memory_logger.pop()

tool_spans = [row for row in spans if row["span_attributes"]["type"] == "tool"]
assert len(tool_spans) == 1, (
f"Expected one tool span, got {[row['span_attributes']['name'] for row in tool_spans]}"
)

tool_span = tool_spans[0]
assert tool_span["span_attributes"]["name"] == "tool [get_weather]"
assert tool_span["input"]["arguments"] == {"location": "San Francisco"}
assert tool_span["output"]["location"] == "San Francisco"
assert tool_span["output"]["temperature"] == "72°F"


@pytest.mark.vcr
@pytest.mark.asyncio
async def test_adk_max_tokens_captures_content(memory_logger):
Expand Down
25 changes: 25 additions & 0 deletions py/src/braintrust/integrations/adk/tracing.py
Original file line number Diff line number Diff line change
Expand Up @@ -548,6 +548,31 @@ async def _trace():
yield event


async def _tool_call_async_wrapper(wrapped: Any, instance: Any, args: Any, kwargs: Any):
tool = args[0] if len(args) > 0 else kwargs.get("tool")
tool_args = args[1] if len(args) > 1 else kwargs.get("args", {})

# MCP tools already have a dedicated wrapper. Skip here to avoid duplicate tool spans.
if tool is not None and getattr(tool.__class__, "__module__", "").startswith("google.adk.tools.mcp_tool"):
return await wrapped(*args, **kwargs)

tool_name = getattr(tool, "name", tool.__class__.__name__ if tool is not None else "unknown")

with start_span(
name=f"tool [{tool_name}]",
type=SpanTypeAttribute.TOOL,
input={"tool_name": tool_name, "arguments": bt_safe_deep_copy(tool_args)},
metadata={"tool_class": tool.__class__.__name__ if tool is not None else None},
) as tool_span:
try:
result = await wrapped(*args, **kwargs)
tool_span.log(output=result)
return result
except Exception as e:
tool_span.log(error=str(e))
raise


async def _mcp_tool_run_async_wrapper_async(wrapped: Any, instance: Any, args: Any, kwargs: Any):
# Extract tool information
tool_name = instance.name
Expand Down
Loading