Skip to content

Unable to include ToolContext in a tool signature #3090

@nithin-nk

Description

@nithin-nk

Unable to include ToolContext in a tool signature. Getting the following error

2025-10-04 07:48:00,275 - ERROR - fast_api.py:850 - Error in event_generator: Failed to parse the parameter context: google.adk.tools.tool_context.ToolContext of function generate_complete_query for automatic function calling. Automatic function calling works best with simpler function signature schema, consider manually parsing your function declaration for function generate_complete_query.
Traceback (most recent call last):
File "/Users/nithin.anil/Desktop/workspace/llm_planner/.venv/lib/python3.12/site-packages/google/adk/cli/fast_api.py", line 839, in event_generator
async for event in runner.run_async(
File "/Users/nithin.anil/Desktop/workspace/llm_planner/.venv/lib/python3.12/site-packages/google/adk/runners.py", line 203, in run_async
async for event in invocation_context.agent.run_async(invocation_context):
File "/Users/nithin.anil/Desktop/workspace/llm_planner/.venv/lib/python3.12/site-packages/google/adk/agents/base_agent.py", line 201, in run_async
async for event in self._run_async_impl(ctx):
File "/Users/nithin.anil/Desktop/workspace/llm_planner/.venv/lib/python3.12/site-packages/google/adk/agents/llm_agent.py", line 275, in _run_async_impl
async for event in self._llm_flow.run_async(ctx):
File "/Users/nithin.anil/Desktop/workspace/llm_planner/.venv/lib/python3.12/site-packages/google/adk/flows/llm_flows/base_llm_flow.py", line 283, in run_async
async for event in self._run_one_step_async(invocation_context):
File "/Users/nithin.anil/Desktop/workspace/llm_planner/.venv/lib/python3.12/site-packages/google/adk/flows/llm_flows/base_llm_flow.py", line 303, in _run_one_step_async
async for event in self._preprocess_async(invocation_context, llm_request):
File "/Users/nithin.anil/Desktop/workspace/llm_planner/.venv/lib/python3.12/site-packages/google/adk/flows/llm_flows/base_llm_flow.py", line 346, in _preprocess_async
await tool.process_llm_request(
File "/Users/nithin.anil/Desktop/workspace/llm_planner/.venv/lib/python3.12/site-packages/google/adk/tools/base_tool.py", line 96, in process_llm_request
if (function_declaration := self._get_declaration()) is None:
^^^^^^^^^^^^^^^^^^^^^^^
File "/Users/nithin.anil/Desktop/workspace/llm_planner/.venv/lib/python3.12/site-packages/google/adk/tools/function_tool.py", line 67, in _get_declaration
build_function_declaration(
File "/Users/nithin.anil/Desktop/workspace/llm_planner/.venv/lib/python3.12/site-packages/google/adk/tools/_automatic_function_calling_util.py", line 236, in build_function_declaration
from_function_with_options(func, variant)
File "/Users/nithin.anil/Desktop/workspace/llm_planner/.venv/lib/python3.12/site-packages/google/adk/tools/_automatic_function_calling_util.py", line 310, in from_function_with_options
schema = _function_parameter_parse_util._parse_schema_from_parameter(
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/Users/nithin.anil/Desktop/workspace/llm_planner/.venv/lib/python3.12/site-packages/google/adk/tools/_function_parameter_parse_util.py", line 306, in _parse_schema_from_parameter
raise ValueError(
ValueError: Failed to parse the parameter context: google.adk.tools.tool_context.ToolContext of function generate_complete_query for automatic function calling. Automatic function calling works best with simpler function signature schema, consider manually parsing your function declaration for function generate_complete_query.

from google.adk.agents import LlmAgent
from google.adk.tools.tool_context import ToolContext

from get_source_metadata.agent import root_agent


def generate_complete_query(question: str, source: str, context: ToolContext) -> str:
    """
    Generates a complete query based on the given question, source, and metadata.

    Args:
        question (str): The user's question.
        source (str): The source document or text.
        metadata (dict): Metadata information about the source.
        context (ToolContext): The tool context.

    Returns:
        str: A complete query string.
    """
    metadata = context.get("source_metadata", {})
    if "tables" in metadata:
        tables_info = "\n".join([f"Table: {table['name']}, Columns: {', '.join(table['columns'])}" for table in metadata['tables']])
        complete_query = f"Based on the following tables:\n{tables_info}\nGenerate a SQL query for the question: '{question}'"
    else:
        complete_query = f"Unable to generate a complete query as no metadata is available. Please provide more details about the source."

    context.state["complete_query"] = complete_query

    return complete_query

generate_complete_query = LlmAgent(
    name="generate_complete_query",
    description="An agent that can generate a complete query based on user input and metadata of the data source.",
    model="gemini-2.0-flash",
    instruction="Generate the complete query of the given question. Input of this agent will be question, source and metadata information from the get_source_metadata",
    tools=[generate_complete_query],
)

root_agent = generate_complete_query

Metadata

Metadata

Assignees

Labels

tools[Component] This issue is related to tools

Type

Projects

No projects

Milestone

No milestone

Relationships

None yet

Development

No branches or pull requests

Issue actions