Skip to content

Commit

Permalink
feat: GenAI - Automatic Function Calling feature
Browse files Browse the repository at this point in the history
The Automatic Function Calling (AFC) feature automates the function calling handling.
AFC reduces manual user steps when using Function Calling.
AFC makes it easy to use native functions (e.g. Python) with the Function Calling feature.
AFC makes it possible for the user to instruct the SDK to let the model call the user-provided functions automatically.

AFC can be enabled when starting a chat.

Usage:
```python
from vertexai.preview.generative_models import GenerativeModel, FunctionCallingConfig, FunctionDeclaration, Tool

# Some function that the model will be able to call
def get_current_weather(location: str, unit: str = "centigrade"):
    """Gets weather in the specified location.

    Args:
        location: The location for which to get the weather.
        unit: Optional. Temperature unit. Can be Centigrade or Fahrenheit. Defaults to Centigrade.

    Returns:
        The weather information as a dict.
    """
    # Add actual implementation.
    return dict(
        location=location,
        unit=unit,
        weather="Super nice, but maybe a bit hot.",
    )

# AFC feature 1: Automatically creating function declaration and schema from a Python function:
get_current_weather_declaration = FunctionDeclaration.from_func(get_current_weather)

weather_tool = Tool([get_current_weather_declaration])

fc_model = GenerativeModel("gemini-pro", tools=[weather_tool])
# AFC feature 2: Activating the Automatic Function Calling
afc_responder = AutomaticFunctionCallingResponder(
    # Optional:
    max_number_of_automatic_function_calls=5,
)
chat = model.start_chat(responder=afc_responder)

# Using the AFC-enabled chat
fc_chat.send_message("What is the weather like in Boston?")
# With AFC, send_message returns the final model answer rather than `function_call` that the user has to handle.
# Notice how the model slightly changes the function response to incorporate it in the final answer.
```

PiperOrigin-RevId: 620189258
  • Loading branch information
Ark-kun authored and Copybara-Service committed Mar 29, 2024
1 parent 5015d25 commit eef84c6
Show file tree
Hide file tree
Showing 5 changed files with 376 additions and 27 deletions.
50 changes: 50 additions & 0 deletions tests/system/vertexai/test_generative_models.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,25 @@
generative_models as preview_generative_models,
)


# A dummy function for function calling
def get_current_weather(location: str, unit: str = "centigrade"):
"""Gets weather in the specified location.
Args:
location: The location for which to get the weather.
unit: Optional. Temperature unit. Can be Centigrade or Fahrenheit. Defaults to Centigrade.
Returns:
The weather information as a dict.
"""
return dict(
location=location,
unit=unit,
weather="Super nice, but maybe a bit hot.",
)


_REQUEST_FUNCTION_PARAMETER_SCHEMA_STRUCT = {
"type": "object",
"properties": {
Expand Down Expand Up @@ -320,3 +339,34 @@ def test_generate_content_function_calling(self):
summary = response.candidates[0].content.parts[0].text

assert summary

def test_chat_automatic_function_calling(self):
get_current_weather_func = generative_models.FunctionDeclaration.from_func(
get_current_weather
)

weather_tool = generative_models.Tool(
function_declarations=[get_current_weather_func],
)

model = preview_generative_models.GenerativeModel(
"gemini-1.0-pro",
# Specifying the tools once to avoid specifying them in every request
tools=[weather_tool],
)

chat = model.start_chat(
responder=preview_generative_models.AutomaticFunctionCallingResponder(
max_automatic_function_calls=1,
)
)

response = chat.send_message("What is the weather like in Boston?")

assert response.text
assert "nice" in response.text
assert len(chat.history) == 4
assert chat.history[-3].parts[0].function_call
assert chat.history[-3].parts[0].function_call.name == "get_current_weather"
assert chat.history[-2].parts[0].function_response
assert chat.history[-2].parts[0].function_response.name == "get_current_weather"
39 changes: 39 additions & 0 deletions tests/unit/vertexai/test_generative_models.py
Original file line number Diff line number Diff line change
Expand Up @@ -538,6 +538,45 @@ def test_generate_content_grounding_vertex_ai_search_retriever(self):
)
assert response.text

@mock.patch.object(
target=prediction_service.PredictionServiceClient,
attribute="generate_content",
new=mock_generate_content,
)
def test_chat_automatic_function_calling(self):
generative_models = preview_generative_models
get_current_weather_func = generative_models.FunctionDeclaration.from_func(
get_current_weather
)
weather_tool = generative_models.Tool(
function_declarations=[get_current_weather_func],
)

model = generative_models.GenerativeModel(
"gemini-pro",
# Specifying the tools once to avoid specifying them in every request
tools=[weather_tool],
)
afc_responder = generative_models.AutomaticFunctionCallingResponder(
max_automatic_function_calls=5,
)
chat = model.start_chat(responder=afc_responder)

response1 = chat.send_message("What is the weather like in Boston?")
assert response1.text.startswith("The weather in Boston is")
assert "nice" in response1.text
assert len(chat.history) == 4
assert chat.history[-3].parts[0].function_call
assert chat.history[-2].parts[0].function_response

# Test max_automatic_function_calls:
# Setting the AFC limit to 0 to test the error handling
afc_responder._max_automatic_function_calls = 0
chat2 = model.start_chat(responder=afc_responder)
with pytest.raises(RuntimeError) as err:
chat2.send_message("What is the weather like in Boston?")
assert err.match("Exceeded the maximum")


EXPECTED_SCHEMA_FOR_GET_CURRENT_WEATHER = {
"title": "get_current_weather",
Expand Down
46 changes: 46 additions & 0 deletions vertexai/generative_models/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -116,6 +116,52 @@ print(chat.send_message(
))
```


#### Automatic Function calling

```
from vertexai..preview generative_models import GenerativeModel, Tool, FunctionDeclaration, AutomaticFunctionCallingResponder
# First, create functions that the model is can use to answer your questions.
def get_current_weather(location: str, unit: str = "centigrade"):
"""Gets weather in the specified location.
Args:
location: The location for which to get the weather.
unit: Optional. Temperature unit. Can be Centigrade or Fahrenheit. Defaults to Centigrade.
"""
return dict(
location=location,
unit=unit,
weather="Super nice, but maybe a bit hot.",
)
# Infer function schema
get_current_weather_func = FunctionDeclaration.from_func(get_current_weather)
# Tool is a collection of related functions
weather_tool = Tool(
function_declarations=[get_current_weather_func],
)
# Use tools in chat:
model = GenerativeModel(
"gemini-pro",
# You can specify tools when creating a model to avoid having to send them with every request.
tools=[weather_tool],
)
# Activate automatic function calling:
afc_responder = AutomaticFunctionCallingResponder(
# Optional:
max_automatic_function_calls=5,
)
chat = model.start_chat(responder=afc_responder)
# Send a message to the model. The model will respond with a function call.
# The SDK will automatically call the requested function and respond to the model.
# The model will use the function call response to answer the original question.
print(chat.send_message("What is the weather like in Boston?"))
```

## Documentation

You can find complete documentation for the Vertex AI SDKs and the Gemini model in the Google Cloud [documentation](https://cloud.google.com/vertex-ai/docs/generative-ai/learn/overview)
Expand Down
Loading

0 comments on commit eef84c6

Please sign in to comment.