From 8153999de6cb42c7b91964f61f7a88bebb52dca1 Mon Sep 17 00:00:00 2001 From: Valentin Deleplace Date: Thu, 25 Apr 2024 11:24:16 +0200 Subject: [PATCH 1/2] feat(vertexai): Align Function calling chat sample with the Python sample. (#3851) * feat(vertexai): Align Function calling chat sample with the Python sample. * fix(vertexai): more precise func name for Chat modality. * fix(vertexai): 2 Function calling region tags, for a transitional period --------- Co-authored-by: Adam Ross --- vertexai/function-calling/functioncalling.go | 122 ++++++++++++++---- .../function-calling/functioncalling_test.go | 7 +- 2 files changed, 102 insertions(+), 27 deletions(-) diff --git a/vertexai/function-calling/functioncalling.go b/vertexai/function-calling/functioncalling.go index 6f64b0f5e7..62a3399e20 100644 --- a/vertexai/function-calling/functioncalling.go +++ b/vertexai/function-calling/functioncalling.go @@ -15,13 +15,14 @@ // Function calling lets developers create a description of a function in their code, then pass // that description to a language model in a request. // -// This function call example involves 3 messages: +// This function call example involves 2 interactions of 3 messages: // - ask the model to generate a function call request // - call the Open API service (simulated in this example) // - ask the model to interpret the function call response package functioncalling // [START aiplatform_gemini_function_calling] +// [START aiplatform_gemini_function_calling_chat] import ( "context" "encoding/json" @@ -32,11 +33,12 @@ import ( "cloud.google.com/go/vertexai/genai" ) -// functionCalls opens a chat session and sends 2 messages to the model: -// - first, to convert a text into a structured function call request -// - second, to convert a structured function call response into natural language -func functionCalls(w io.Writer, prompt, projectID, location, modelName string) error { - // prompt := "What's the weather like in Paris?" +// functionCallsChat opens a chat session and sends 4 messages to the model: +// - convert a first text question into a structured function call request +// - convert the first structured function call response into natural language +// - convert a second text question into a structured function call request +// - convert the second structured function call response into natural language +func functionCallsChat(w io.Writer, projectID, location, modelName string) error { // location := "us-central1" // modelName := "gemini-1.0-pro" ctx := context.Background() @@ -49,26 +51,46 @@ func functionCalls(w io.Writer, prompt, projectID, location, modelName string) e model := client.GenerativeModel(modelName) // Build an OpenAPI schema, in memory - params := &genai.Schema{ + paramsProduct := &genai.Schema{ + Type: genai.TypeObject, + Properties: map[string]*genai.Schema{ + "productName": { + Type: genai.TypeString, + Description: "Product name", + }, + }, + } + fundeclProductInfo := &genai.FunctionDeclaration{ + Name: "getProductSku", + Description: "Get the SKU for a product", + Parameters: paramsProduct, + } + paramsStore := &genai.Schema{ Type: genai.TypeObject, Properties: map[string]*genai.Schema{ "location": { Type: genai.TypeString, - Description: "location", + Description: "Location", }, }, } - fundecl := &genai.FunctionDeclaration{ - Name: "getCurrentWeather", - Description: "Get the current weather in a given location", - Parameters: params, + fundeclStoreLocation := &genai.FunctionDeclaration{ + Name: "getStoreLocation", + Description: "Get the location of the closest store", + Parameters: paramsStore, } model.Tools = []*genai.Tool{ - {FunctionDeclarations: []*genai.FunctionDeclaration{fundecl}}, + {FunctionDeclarations: []*genai.FunctionDeclaration{ + fundeclProductInfo, + fundeclStoreLocation, + }}, } + model.SetTemperature(0.0) chat := model.StartChat() + // Send a prompt for the first conversation turn that should invoke the getProductSku function + prompt := "Do you have the Pixel 8 Pro in stock?" fmt.Fprintf(w, "Question: %s\n", prompt) resp, err := chat.SendMessage(ctx, genai.Text(prompt)) if err != nil { @@ -79,27 +101,28 @@ func functionCalls(w io.Writer, prompt, projectID, location, modelName string) e return errors.New("empty response from model") } - // The model has returned a function call to the declared function `getCurrentWeather` - // with a value for the argument `location`. - jsondata, err := json.MarshalIndent(resp.Candidates[0].Content.Parts[0], "", " ") + // The model has returned a function call to the declared function `getProductSku` + // with a value for the argument `productName`. + jsondata, err := json.MarshalIndent(resp.Candidates[0].Content.Parts[0], "\t", " ") if err != nil { return fmt.Errorf("json.Marshal: %w", err) } - fmt.Fprintf(w, "function call generated by the model:\n%s\n\n", string(jsondata)) + fmt.Fprintf(w, "function call generated by the model:\n\t%s\n", string(jsondata)) // Create a function call response, to simulate the result of a call to a // real service funresp := &genai.FunctionResponse{ - Name: "getCurrentWeather", + Name: "getProductSku", Response: map[string]any{ - "currentWeather": "sunny", + "sku": "GA04834-US", + "in_stock": "yes", }, } - jsondata, err = json.MarshalIndent(funresp, "", " ") + jsondata, err = json.MarshalIndent(funresp, "\t", " ") if err != nil { return fmt.Errorf("json.Marshal: %w", err) } - fmt.Fprintf(w, "function call response sent to the model:\n%s\n\n", string(jsondata)) + fmt.Fprintf(w, "function call response sent to the model:\n\t%s\n\n", string(jsondata)) // And provide the function call response to the model resp, err = chat.SendMessage(ctx, funresp) @@ -113,13 +136,66 @@ func functionCalls(w io.Writer, prompt, projectID, location, modelName string) e // The model has taken the function call response as input, and has // reformulated the response to the user. - jsondata, err = json.MarshalIndent(resp.Candidates[0].Content.Parts[0], "", " ") + jsondata, err = json.MarshalIndent(resp.Candidates[0].Content.Parts[0], "\t", " ") if err != nil { return fmt.Errorf("json.Marshal: %w", err) } - fmt.Fprintf(w, "Answer generated by the model:\n%s\n", string(jsondata)) + fmt.Fprintf(w, "Answer generated by the model:\n\t%s\n\n", string(jsondata)) + + // Send a prompt for the second conversation turn that should invoke the getStoreLocation function + prompt2 := "Is there a store in Mountain View, CA that I can visit to try it out?" + fmt.Fprintf(w, "Question: %s\n", prompt) + resp, err = chat.SendMessage(ctx, genai.Text(prompt2)) + if err != nil { + return err + } + if len(resp.Candidates) == 0 || + len(resp.Candidates[0].Content.Parts) == 0 { + return errors.New("empty response from model") + } + + // The model has returned a function call to the declared function `getStoreLocation` + // with a value for the argument `store`. + jsondata, err = json.MarshalIndent(resp.Candidates[0].Content.Parts[0], "\t", " ") + if err != nil { + return fmt.Errorf("json.Marshal: %w", err) + } + fmt.Fprintf(w, "function call generated by the model:\n\t%s\n", string(jsondata)) + + // Create a function call response, to simulate the result of a call to a + // real service + funresp = &genai.FunctionResponse{ + Name: "getStoreLocation", + Response: map[string]any{ + "store": "2000 N Shoreline Blvd, Mountain View, CA 94043, US", + }, + } + jsondata, err = json.MarshalIndent(funresp, "\t", " ") + if err != nil { + return fmt.Errorf("json.Marshal: %w", err) + } + fmt.Fprintf(w, "function call response sent to the model:\n\t%s\n\n", string(jsondata)) + + // And provide the function call response to the model + resp, err = chat.SendMessage(ctx, funresp) + if err != nil { + return err + } + if len(resp.Candidates) == 0 || + len(resp.Candidates[0].Content.Parts) == 0 { + return errors.New("empty response from model") + } + + // The model has taken the function call response as input, and has + // reformulated the response to the user. + jsondata, err = json.MarshalIndent(resp.Candidates[0].Content.Parts[0], "\t", " ") + if err != nil { + return fmt.Errorf("json.Marshal: %w", err) + } + fmt.Fprintf(w, "Answer generated by the model:\n\t%s\n\n", string(jsondata)) return nil } +// [END aiplatform_gemini_function_calling_chat] // [END aiplatform_gemini_function_calling] diff --git a/vertexai/function-calling/functioncalling_test.go b/vertexai/function-calling/functioncalling_test.go index cfe03d0ffb..7461cfdaaf 100644 --- a/vertexai/function-calling/functioncalling_test.go +++ b/vertexai/function-calling/functioncalling_test.go @@ -21,16 +21,15 @@ import ( "github.com/GoogleCloudPlatform/golang-samples/internal/testutil" ) -func Test_functionCalls(t *testing.T) { +func Test_functionCallsChat(t *testing.T) { tc := testutil.SystemTest(t) w := io.Discard - prompt := "What's the weather like in Paris?" location := "us-central1" modelName := "gemini-1.0-pro" - err := functionCalls(w, prompt, tc.ProjectID, location, modelName) + err := functionCallsChat(w, tc.ProjectID, location, modelName) if err != nil { - t.Errorf("Test_functionCalls: %v", err.Error()) + t.Errorf("Test_functionCallsChat: %v", err.Error()) } } From 6022d1977fbaec7d67f0bddebf835eb800fa6ee7 Mon Sep 17 00:00:00 2001 From: Valentin Deleplace Date: Thu, 25 Apr 2024 18:39:25 +0200 Subject: [PATCH 2/2] fix(vertexai): variable name (#4109) --- vertexai/multimodal-video/multimodalvideo.go | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/vertexai/multimodal-video/multimodalvideo.go b/vertexai/multimodal-video/multimodalvideo.go index a479573ae5..7d7cf2da6d 100644 --- a/vertexai/multimodal-video/multimodalvideo.go +++ b/vertexai/multimodal-video/multimodalvideo.go @@ -47,12 +47,12 @@ func generateMultimodalContent(w io.Writer, prompt, video, projectID, location, model.SetTemperature(0.4) // Given a video file URL, prepare video file as genai.Part - img := genai.FileData{ + part := genai.FileData{ MIMEType: mime.TypeByExtension(filepath.Ext(video)), FileURI: video, } - res, err := model.GenerateContent(ctx, img, genai.Text(prompt)) + res, err := model.GenerateContent(ctx, part, genai.Text(prompt)) if err != nil { return fmt.Errorf("unable to generate contents: %v", err) }