You signed in with another tab or window. Reload to refresh your session.You signed out in another tab or window. Reload to refresh your session.You switched accounts on another tab or window. Reload to refresh your session.Dismiss alert
fromvertexai.preview.generative_modelsimportPart, Content, Tool, GenerationConfig, GenerativeModel, FunctionDeclaration, ToolConfig# Specify a function declaration and parameters for an API requestget_product_info_func=FunctionDeclaration(
name="get_product_sku",
description="Get the available inventory for a Google products, e.g: Pixel phones, Pixel Watches, Google Home etc",
# Function parameters are specified in OpenAPI JSON schema formatparameters={
"type": "object",
"properties": {
"product_name": {"type": "string", "description": "Product name"}
},
},
)
# Specify another function declaration and parameters for an API requestget_store_location_func=FunctionDeclaration(
name="get_store_location",
description="Get the location of the closest store",
# Function parameters are specified in OpenAPI JSON schema formatparameters={
"type": "object",
"properties": {"location": {"type": "string", "description": "Location"}},
},
)
# Define a tool that includes the above functionsretail_tool=Tool(
function_declarations=[
get_product_info_func,
get_store_location_func,
],
)
# Define a tool config for the above functionsretail_tool_config=ToolConfig(
function_calling_config=ToolConfig.FunctionCallingConfig(
# ANY mode forces the model to predict a function callmode=ToolConfig.FunctionCallingConfig.Mode.ANY,
# List of functions that can be returned when the mode is ANY.# If the list is empty, any declared function can be returned.allowed_function_names=["get_product_sku"],
)
)
generation_config=GenerationConfig(
temperature=0.95, top_p=1.0, max_output_tokens=8192
)
gemini_model=GenerativeModel(
"gemini-1.5-pro-preview-0409",
generation_config=generation_config,
tools=[retail_tool],
tool_config=retail_tool_config,
)
model_response=gemini_model.generate_content(
"Do you have the White Pixel 8 Pro 128GB in stock in the US?"
)
Stack trace
---------------------------------------------------------------------------
_InactiveRpcError Traceback (most recent call last)
File ~/Repos/blossom-backend/.venv/lib/python3.12/site-packages/google/api_core/grpc_helpers.py:76, in _wrap_unary_errors.<locals>.error_remapped_callable(*args, **kwargs)
75 try:
---> 76 return callable_(*args, **kwargs)
77 except grpc.RpcError as exc:
File ~/Repos/blossom-backend/.venv/lib/python3.12/site-packages/grpc/_channel.py:1181, in _UnaryUnaryMultiCallable.__call__(self, request, timeout, metadata, credentials, wait_for_ready, compression)
1175 (
1176 state,
1177 call,
1178 ) = self._blocking(
1179 request, timeout, metadata, credentials, wait_for_ready, compression
1180 )
-> 1181 return _end_unary_response_blocking(state, call, False, None)
File ~/Repos/blossom-backend/.venv/lib/python3.12/site-packages/grpc/_channel.py:1006, in _end_unary_response_blocking(state, call, with_call, deadline)
1005 else:
-> 1006 raise _InactiveRpcError(state)
_InactiveRpcError: <_InactiveRpcError of RPC that terminated with:
status = StatusCode.INVALID_ARGUMENT
details = "Request contains an invalid argument."
debug_error_string = "UNKNOWN:Error received from peer ipv4:142.251.215.234:443 {grpc_message:"Request contains an invalid argument.", grpc_status:3, created_time:"2024-05-03T10:44:28.065764-07:00"}"
>
The above exception was the direct cause of the following exception:
InvalidArgument Traceback (most recent call last)
Cell In[29], line 55
48 gemini_model = GenerativeModel(
49 "gemini-1.5-pro-preview-0409",
50 generation_config=generation_config,
51 tools=[retail_tool],
52 tool_config=retail_tool_config,
53 )
54 try:
---> 55 model_response = gemini_model.generate_content(
56 "Do you have the White Pixel 8 Pro 128GB in stock in the US?"
57 )
58 except Exception as e:
59 err = e
File ~/Repos/blossom-backend/.venv/lib/python3.12/site-packages/vertexai/generative_models/_generative_models.py:407, in _GenerativeModel.generate_content(self, contents, generation_config, safety_settings, tools, tool_config, stream)
399 return self._generate_content_streaming(
400 contents=contents,
401 generation_config=generation_config,
(...)
404 tool_config=tool_config,
405 )
406 else:
--> 407 return self._generate_content(
408 contents=contents,
409 generation_config=generation_config,
410 safety_settings=safety_settings,
411 tools=tools,
412 tool_config=tool_config,
413 )
File ~/Repos/blossom-backend/.venv/lib/python3.12/site-packages/vertexai/generative_models/_generative_models.py:496, in _GenerativeModel._generate_content(self, contents, generation_config, safety_settings, tools, tool_config)
471 """Generates content.
472
473 Args:
(...)
487 A single GenerationResponse object
488 """
489 request = self._prepare_request(
490 contents=contents,
491 generation_config=generation_config,
(...)
494 tool_config=tool_config,
495 )
--> 496 gapic_response = self._prediction_client.generate_content(request=request)
497 return self._parse_response(gapic_response)
File ~/Repos/blossom-backend/.venv/lib/python3.12/site-packages/google/cloud/aiplatform_v1beta1/services/prediction_service/client.py:2102, in PredictionServiceClient.generate_content(self, request, model, contents, retry, timeout, metadata)
2099 self._validate_universe_domain()
2101 # Send the request.
-> 2102 response = rpc(
2103 request,
2104 retry=retry,
2105 timeout=timeout,
2106 metadata=metadata,
2107 )
2109 # Done; return the response.
2110 return response
File ~/Repos/blossom-backend/.venv/lib/python3.12/site-packages/google/api_core/gapic_v1/method.py:131, in _GapicCallable.__call__(self, timeout, retry, compression, *args, **kwargs)
128 if self._compression is not None:
129 kwargs["compression"] = compression
--> 131 return wrapped_func(*args, **kwargs)
File ~/Repos/blossom-backend/.venv/lib/python3.12/site-packages/google/api_core/grpc_helpers.py:78, in _wrap_unary_errors.<locals>.error_remapped_callable(*args, **kwargs)
76 return callable_(*args, **kwargs)
77 except grpc.RpcError as exc:
---> 78 raise exceptions.from_grpc_error(exc) from exc
InvalidArgument: 400 Request contains an invalid argument.```
The text was updated successfully, but these errors were encountered:
# Define a tool config for the above functionsretail_tool_config=ToolConfig(
function_calling_config=ToolConfig.FunctionCallingConfig(
# ANY mode forces the model to predict a function callmode=ToolConfig.FunctionCallingConfig.Mode.ANY,
# List of functions that can be returned when the mode is ANY.# If the list is empty, any declared function can be returned.allowed_function_names=["get_product_sku"],
)
)
excluding the tool_config from the request seems to work, but it is not a good workaround for my particular use case.
I'm having trouble running the function-calling example located here:
https://cloud.google.com/vertex-ai/generative-ai/docs/model-reference/function-calling#python_2
Environment details
google-cloud-aiplatform
version:Steps to reproduce
Code example
Stack trace
The text was updated successfully, but these errors were encountered: