Skip to content

Commit

Permalink
Merge branch 'main' into change-streams-txn-exclusion
Browse files Browse the repository at this point in the history
  • Loading branch information
rahul2393 committed Apr 29, 2024
2 parents cfdb7cf + 6022d19 commit 2f07027
Show file tree
Hide file tree
Showing 3 changed files with 104 additions and 29 deletions.
122 changes: 99 additions & 23 deletions vertexai/function-calling/functioncalling.go
Original file line number Diff line number Diff line change
Expand Up @@ -15,13 +15,14 @@
// Function calling lets developers create a description of a function in their code, then pass
// that description to a language model in a request.
//
// This function call example involves 3 messages:
// This function call example involves 2 interactions of 3 messages:
// - ask the model to generate a function call request
// - call the Open API service (simulated in this example)
// - ask the model to interpret the function call response
package functioncalling

// [START aiplatform_gemini_function_calling]
// [START aiplatform_gemini_function_calling_chat]
import (
"context"
"encoding/json"
Expand All @@ -32,11 +33,12 @@ import (
"cloud.google.com/go/vertexai/genai"
)

// functionCalls opens a chat session and sends 2 messages to the model:
// - first, to convert a text into a structured function call request
// - second, to convert a structured function call response into natural language
func functionCalls(w io.Writer, prompt, projectID, location, modelName string) error {
// prompt := "What's the weather like in Paris?"
// functionCallsChat opens a chat session and sends 4 messages to the model:
// - convert a first text question into a structured function call request
// - convert the first structured function call response into natural language
// - convert a second text question into a structured function call request
// - convert the second structured function call response into natural language
func functionCallsChat(w io.Writer, projectID, location, modelName string) error {
// location := "us-central1"
// modelName := "gemini-1.0-pro"
ctx := context.Background()
Expand All @@ -49,26 +51,46 @@ func functionCalls(w io.Writer, prompt, projectID, location, modelName string) e
model := client.GenerativeModel(modelName)

// Build an OpenAPI schema, in memory
params := &genai.Schema{
paramsProduct := &genai.Schema{
Type: genai.TypeObject,
Properties: map[string]*genai.Schema{
"productName": {
Type: genai.TypeString,
Description: "Product name",
},
},
}
fundeclProductInfo := &genai.FunctionDeclaration{
Name: "getProductSku",
Description: "Get the SKU for a product",
Parameters: paramsProduct,
}
paramsStore := &genai.Schema{
Type: genai.TypeObject,
Properties: map[string]*genai.Schema{
"location": {
Type: genai.TypeString,
Description: "location",
Description: "Location",
},
},
}
fundecl := &genai.FunctionDeclaration{
Name: "getCurrentWeather",
Description: "Get the current weather in a given location",
Parameters: params,
fundeclStoreLocation := &genai.FunctionDeclaration{
Name: "getStoreLocation",
Description: "Get the location of the closest store",
Parameters: paramsStore,
}
model.Tools = []*genai.Tool{
{FunctionDeclarations: []*genai.FunctionDeclaration{fundecl}},
{FunctionDeclarations: []*genai.FunctionDeclaration{
fundeclProductInfo,
fundeclStoreLocation,
}},
}
model.SetTemperature(0.0)

chat := model.StartChat()

// Send a prompt for the first conversation turn that should invoke the getProductSku function
prompt := "Do you have the Pixel 8 Pro in stock?"
fmt.Fprintf(w, "Question: %s\n", prompt)
resp, err := chat.SendMessage(ctx, genai.Text(prompt))
if err != nil {
Expand All @@ -79,27 +101,28 @@ func functionCalls(w io.Writer, prompt, projectID, location, modelName string) e
return errors.New("empty response from model")
}

// The model has returned a function call to the declared function `getCurrentWeather`
// with a value for the argument `location`.
jsondata, err := json.MarshalIndent(resp.Candidates[0].Content.Parts[0], "", " ")
// The model has returned a function call to the declared function `getProductSku`
// with a value for the argument `productName`.
jsondata, err := json.MarshalIndent(resp.Candidates[0].Content.Parts[0], "\t", " ")
if err != nil {
return fmt.Errorf("json.Marshal: %w", err)
}
fmt.Fprintf(w, "function call generated by the model:\n%s\n\n", string(jsondata))
fmt.Fprintf(w, "function call generated by the model:\n\t%s\n", string(jsondata))

// Create a function call response, to simulate the result of a call to a
// real service
funresp := &genai.FunctionResponse{
Name: "getCurrentWeather",
Name: "getProductSku",
Response: map[string]any{
"currentWeather": "sunny",
"sku": "GA04834-US",
"in_stock": "yes",
},
}
jsondata, err = json.MarshalIndent(funresp, "", " ")
jsondata, err = json.MarshalIndent(funresp, "\t", " ")
if err != nil {
return fmt.Errorf("json.Marshal: %w", err)
}
fmt.Fprintf(w, "function call response sent to the model:\n%s\n\n", string(jsondata))
fmt.Fprintf(w, "function call response sent to the model:\n\t%s\n\n", string(jsondata))

// And provide the function call response to the model
resp, err = chat.SendMessage(ctx, funresp)
Expand All @@ -113,13 +136,66 @@ func functionCalls(w io.Writer, prompt, projectID, location, modelName string) e

// The model has taken the function call response as input, and has
// reformulated the response to the user.
jsondata, err = json.MarshalIndent(resp.Candidates[0].Content.Parts[0], "", " ")
jsondata, err = json.MarshalIndent(resp.Candidates[0].Content.Parts[0], "\t", " ")
if err != nil {
return fmt.Errorf("json.Marshal: %w", err)
}
fmt.Fprintf(w, "Answer generated by the model:\n%s\n", string(jsondata))
fmt.Fprintf(w, "Answer generated by the model:\n\t%s\n\n", string(jsondata))

// Send a prompt for the second conversation turn that should invoke the getStoreLocation function
prompt2 := "Is there a store in Mountain View, CA that I can visit to try it out?"
fmt.Fprintf(w, "Question: %s\n", prompt)

resp, err = chat.SendMessage(ctx, genai.Text(prompt2))
if err != nil {
return err
}
if len(resp.Candidates) == 0 ||
len(resp.Candidates[0].Content.Parts) == 0 {
return errors.New("empty response from model")
}

// The model has returned a function call to the declared function `getStoreLocation`
// with a value for the argument `store`.
jsondata, err = json.MarshalIndent(resp.Candidates[0].Content.Parts[0], "\t", " ")
if err != nil {
return fmt.Errorf("json.Marshal: %w", err)
}
fmt.Fprintf(w, "function call generated by the model:\n\t%s\n", string(jsondata))

// Create a function call response, to simulate the result of a call to a
// real service
funresp = &genai.FunctionResponse{
Name: "getStoreLocation",
Response: map[string]any{
"store": "2000 N Shoreline Blvd, Mountain View, CA 94043, US",
},
}
jsondata, err = json.MarshalIndent(funresp, "\t", " ")
if err != nil {
return fmt.Errorf("json.Marshal: %w", err)
}
fmt.Fprintf(w, "function call response sent to the model:\n\t%s\n\n", string(jsondata))

// And provide the function call response to the model
resp, err = chat.SendMessage(ctx, funresp)
if err != nil {
return err
}
if len(resp.Candidates) == 0 ||
len(resp.Candidates[0].Content.Parts) == 0 {
return errors.New("empty response from model")
}

// The model has taken the function call response as input, and has
// reformulated the response to the user.
jsondata, err = json.MarshalIndent(resp.Candidates[0].Content.Parts[0], "\t", " ")
if err != nil {
return fmt.Errorf("json.Marshal: %w", err)
}
fmt.Fprintf(w, "Answer generated by the model:\n\t%s\n\n", string(jsondata))
return nil
}

// [END aiplatform_gemini_function_calling_chat]
// [END aiplatform_gemini_function_calling]
7 changes: 3 additions & 4 deletions vertexai/function-calling/functioncalling_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -21,16 +21,15 @@ import (
"github.com/GoogleCloudPlatform/golang-samples/internal/testutil"
)

func Test_functionCalls(t *testing.T) {
func Test_functionCallsChat(t *testing.T) {
tc := testutil.SystemTest(t)

w := io.Discard
prompt := "What's the weather like in Paris?"
location := "us-central1"
modelName := "gemini-1.0-pro"

err := functionCalls(w, prompt, tc.ProjectID, location, modelName)
err := functionCallsChat(w, tc.ProjectID, location, modelName)
if err != nil {
t.Errorf("Test_functionCalls: %v", err.Error())
t.Errorf("Test_functionCallsChat: %v", err.Error())
}
}
4 changes: 2 additions & 2 deletions vertexai/multimodal-video/multimodalvideo.go
Original file line number Diff line number Diff line change
Expand Up @@ -47,12 +47,12 @@ func generateMultimodalContent(w io.Writer, prompt, video, projectID, location,
model.SetTemperature(0.4)

// Given a video file URL, prepare video file as genai.Part
img := genai.FileData{
part := genai.FileData{
MIMEType: mime.TypeByExtension(filepath.Ext(video)),
FileURI: video,
}

res, err := model.GenerateContent(ctx, img, genai.Text(prompt))
res, err := model.GenerateContent(ctx, part, genai.Text(prompt))
if err != nil {
return fmt.Errorf("unable to generate contents: %v", err)
}
Expand Down

0 comments on commit 2f07027

Please sign in to comment.