diff --git a/pydantic_ai_slim/pydantic_ai/models/function.py b/pydantic_ai_slim/pydantic_ai/models/function.py index e098e986fb..591c37970d 100644 --- a/pydantic_ai_slim/pydantic_ai/models/function.py +++ b/pydantic_ai_slim/pydantic_ai/models/function.py @@ -31,7 +31,7 @@ UserContent, UserPromptPart, ) -from ..profiles import ModelProfileSpec +from ..profiles import ModelProfile, ModelProfileSpec from ..settings import ModelSettings from ..tools import ToolDefinition from . import Model, ModelRequestParameters, StreamedResponse @@ -111,6 +111,12 @@ def __init__( stream_function_name = self.stream_function.__name__ if self.stream_function is not None else '' self._model_name = model_name or f'function:{function_name}:{stream_function_name}' + # Use a default profile that supports JSON schema and object output if none provided + if profile is None: + profile = ModelProfile( + supports_json_schema_output=True, + supports_json_object_output=True, + ) super().__init__(settings=settings, profile=profile) async def request( diff --git a/tests/test_agent.py b/tests/test_agent.py index 415b242d0d..61ba806969 100644 --- a/tests/test_agent.py +++ b/tests/test_agent.py @@ -1557,7 +1557,7 @@ def return_city_location(messages: list[ModelMessage], _info: AgentInfo) -> Mode text = '{"city": "Mexico City", "country": "Mexico"}' return ModelResponse(parts=[TextPart(content=text)]) - m = FunctionModel(return_city_location, profile=ModelProfile(supports_json_schema_output=True)) + m = FunctionModel(return_city_location) class CityLocation(BaseModel): city: str