From b5cc98d0fba3930d4e85691d35b43c4e4658cf37 Mon Sep 17 00:00:00 2001 From: EmanuelB25 Date: Tue, 12 Nov 2024 14:12:14 -0500 Subject: [PATCH 1/8] feat(genai): add prompt template example --- generative_ai/prompts/prompt_template.py | 58 ++++++++++++++++++++++++ 1 file changed, 58 insertions(+) create mode 100644 generative_ai/prompts/prompt_template.py diff --git a/generative_ai/prompts/prompt_template.py b/generative_ai/prompts/prompt_template.py new file mode 100644 index 00000000000..0e31cf4b57a --- /dev/null +++ b/generative_ai/prompts/prompt_template.py @@ -0,0 +1,58 @@ +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +import os + + +PROJECT_ID = os.getenv("GOOGLE_CLOUD_PROJECT") + + +def prompt_template() -> str: + """Create prompt template""" + + # [START generativeaionvertexai_prompt_template] + import vertexai + from vertexai.preview.prompts import Prompt + + # Initialize vertexai + vertexai.init(project=PROJECT_ID, location="us-central1") + + variables = [ + { + "animal": ["""Eagels, Coyotes, Squirrels"""], + "activity": ["""eat berries, jump, fly"""], + }, + ] + prompt = Prompt( + prompt_data=["Do {animal}{activity}"], # Includes placeholders vor vars + model_name="gemini-1.5-flash-002", # Model in use + variables=variables, # Lists variables defined above + system_instruction=["You are a helpful zoolgist"] # passes instructions for the llm + # optional - generation_config=generation_config, + # optional - safety_settings=safety_settings, + ) + # Generates content using the assembled prompt. + responses = prompt.generate_content( + contents=prompt.assemble_contents(**prompt.variables[0]), + stream=True, + ) + + for response in responses: + print(response.text, end="") + + # [END generativeaionvertexai_prompt_template] + return responses + + +if __name__ == "__main__": + prompt_template() From ace151a71becc21871cd620b5eed124ec4f3379c Mon Sep 17 00:00:00 2001 From: EmanuelB25 Date: Tue, 12 Nov 2024 16:13:22 -0500 Subject: [PATCH 2/8] feat(genai): fix spelling, restructure comments, fix syntax --- generative_ai/prompts/prompt_template.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/generative_ai/prompts/prompt_template.py b/generative_ai/prompts/prompt_template.py index 0e31cf4b57a..b62f15196de 100644 --- a/generative_ai/prompts/prompt_template.py +++ b/generative_ai/prompts/prompt_template.py @@ -29,17 +29,17 @@ def prompt_template() -> str: variables = [ { - "animal": ["""Eagels, Coyotes, Squirrels"""], + "animal": ["""Eagles, Coyotes, Squirrels"""], "activity": ["""eat berries, jump, fly"""], }, ] prompt = Prompt( - prompt_data=["Do {animal}{activity}"], # Includes placeholders vor vars + prompt_data=["Do {animal}{activity}?"], # Includes placeholders for vars model_name="gemini-1.5-flash-002", # Model in use variables=variables, # Lists variables defined above - system_instruction=["You are a helpful zoolgist"] # passes instructions for the llm - # optional - generation_config=generation_config, - # optional - safety_settings=safety_settings, + system_instruction=["You are a helpful zoolgist"] + # generation_config=generation_config, # Optional + # safety_settings=safety_settings, # Optional ) # Generates content using the assembled prompt. responses = prompt.generate_content( From c3d11b6c0bc498983a13f8525117def471ba1ea1 Mon Sep 17 00:00:00 2001 From: EmanuelB25 Date: Tue, 12 Nov 2024 18:06:55 -0500 Subject: [PATCH 3/8] feat(genai): update comment, restructure variables and prompt assembly and fix syntax --- generative_ai/prompts/prompt_template.py | 32 +++++++++++++----------- 1 file changed, 17 insertions(+), 15 deletions(-) diff --git a/generative_ai/prompts/prompt_template.py b/generative_ai/prompts/prompt_template.py index b62f15196de..43840b48a4c 100644 --- a/generative_ai/prompts/prompt_template.py +++ b/generative_ai/prompts/prompt_template.py @@ -13,12 +13,11 @@ # limitations under the License. import os - PROJECT_ID = os.getenv("GOOGLE_CLOUD_PROJECT") -def prompt_template() -> str: - """Create prompt template""" +def prompt_template() -> list: + """Build a parameterized prompt template to generate content with multiple variable sets""" # [START generativeaionvertexai_prompt_template] import vertexai @@ -28,24 +27,27 @@ def prompt_template() -> str: vertexai.init(project=PROJECT_ID, location="us-central1") variables = [ - { - "animal": ["""Eagles, Coyotes, Squirrels"""], - "activity": ["""eat berries, jump, fly"""], - }, + {"animal": "Eagles", "activity": "eat berries"}, + {"animal": "Coyotes", "activity": "jump"}, + {"animal": "Squirrels", "activity": "fly"} ] + prompt = Prompt( - prompt_data=["Do {animal}{activity}?"], # Includes placeholders for vars - model_name="gemini-1.5-flash-002", # Model in use - variables=variables, # Lists variables defined above - system_instruction=["You are a helpful zoolgist"] + prompt_data="Do {animal} {activity}?", + model_name="gemini-1.5-flash-002", + variables=variables, + system_instruction=["You are a helpful zoologist"] # generation_config=generation_config, # Optional # safety_settings=safety_settings, # Optional ) + # Generates content using the assembled prompt. - responses = prompt.generate_content( - contents=prompt.assemble_contents(**prompt.variables[0]), - stream=True, - ) + responses = [] + for i in range(len(prompt.variables)): + response = prompt.generate_content( + contents=prompt.assemble_contents(**prompt.variables[i]) + ) + responses.append(response) for response in responses: print(response.text, end="") From b843d5a74a15b12e73a958a34c0ccf95c3971b89 Mon Sep 17 00:00:00 2001 From: EmanuelB25 Date: Wed, 13 Nov 2024 07:54:28 -0500 Subject: [PATCH 4/8] feat(genai): update comment and fix syntax --- generative_ai/prompts/prompt_template.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/generative_ai/prompts/prompt_template.py b/generative_ai/prompts/prompt_template.py index 43840b48a4c..153078ce174 100644 --- a/generative_ai/prompts/prompt_template.py +++ b/generative_ai/prompts/prompt_template.py @@ -32,6 +32,7 @@ def prompt_template() -> list: {"animal": "Squirrels", "activity": "fly"} ] + # define prompt template prompt = Prompt( prompt_data="Do {animal} {activity}?", model_name="gemini-1.5-flash-002", @@ -43,12 +44,13 @@ def prompt_template() -> list: # Generates content using the assembled prompt. responses = [] - for i in range(len(prompt.variables)): + for variable_set in prompt.variables: response = prompt.generate_content( - contents=prompt.assemble_contents(**prompt.variables[i]) + contents=prompt.assemble_contents(**variable_set) ) responses.append(response) + # Example response for response in responses: print(response.text, end="") From 4fbd473126597672869c9ac84698876549063633 Mon Sep 17 00:00:00 2001 From: EmanuelB25 Date: Wed, 13 Nov 2024 09:18:09 -0500 Subject: [PATCH 5/8] feat(genai): update example comment --- generative_ai/prompts/prompt_template.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/generative_ai/prompts/prompt_template.py b/generative_ai/prompts/prompt_template.py index 153078ce174..11e0fbe29a7 100644 --- a/generative_ai/prompts/prompt_template.py +++ b/generative_ai/prompts/prompt_template.py @@ -50,10 +50,14 @@ def prompt_template() -> list: ) responses.append(response) - # Example response for response in responses: print(response.text, end="") + # Example response + # Assembled prompt replacing: 1 instances of variable animal, 1 instances of variable activity + # WARNING: All log messages before absl::InitializeLog() is called are written to STDERR + # I0000 00:00:1731452465.383223 241384 config.cc:230] gRPC experiments enabled ..... + # No, eagles are carnivores. Their diet primarily consists of fish, small mammals, reptiles, and other birds...... # [END generativeaionvertexai_prompt_template] return responses From a206183f3adaa60e5ede4e82188bfa5eaf06584e Mon Sep 17 00:00:00 2001 From: EmanuelB25 Date: Wed, 13 Nov 2024 18:51:57 -0500 Subject: [PATCH 6/8] feat(genai): renaming def / adding testing file --- generative_ai/prompts/prompt_template.py | 4 ++-- generative_ai/prompts/test_prompt_template.py | 20 +++++++++++++++++++ 2 files changed, 22 insertions(+), 2 deletions(-) create mode 100644 generative_ai/prompts/test_prompt_template.py diff --git a/generative_ai/prompts/prompt_template.py b/generative_ai/prompts/prompt_template.py index 11e0fbe29a7..b1f8d990609 100644 --- a/generative_ai/prompts/prompt_template.py +++ b/generative_ai/prompts/prompt_template.py @@ -16,7 +16,7 @@ PROJECT_ID = os.getenv("GOOGLE_CLOUD_PROJECT") -def prompt_template() -> list: +def prompt_template_example() -> list: """Build a parameterized prompt template to generate content with multiple variable sets""" # [START generativeaionvertexai_prompt_template] @@ -63,4 +63,4 @@ def prompt_template() -> list: if __name__ == "__main__": - prompt_template() + prompt_template_example() diff --git a/generative_ai/prompts/test_prompt_template.py b/generative_ai/prompts/test_prompt_template.py new file mode 100644 index 00000000000..ad9ec17b39e --- /dev/null +++ b/generative_ai/prompts/test_prompt_template.py @@ -0,0 +1,20 @@ +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import prompt_template + + +def test_prompt_template() -> None: + text = prompt_template.prompt_template_example() + assert len(text) > 0 From c153b73dc726f40c1cc5f83df927d421dcbf918a Mon Sep 17 00:00:00 2001 From: EmanuelB25 Date: Thu, 14 Nov 2024 17:31:23 -0500 Subject: [PATCH 7/8] feat(genai): updating commenting and logic on sample --- generative_ai/prompts/prompt_template.py | 11 ++++++----- 1 file changed, 6 insertions(+), 5 deletions(-) diff --git a/generative_ai/prompts/prompt_template.py b/generative_ai/prompts/prompt_template.py index b1f8d990609..fc8bdf037e3 100644 --- a/generative_ai/prompts/prompt_template.py +++ b/generative_ai/prompts/prompt_template.py @@ -13,10 +13,13 @@ # limitations under the License. import os +from vertexai.generative_models import GenerationResponse + + PROJECT_ID = os.getenv("GOOGLE_CLOUD_PROJECT") -def prompt_template_example() -> list: +def prompt_template_example() -> GenerationResponse: """Build a parameterized prompt template to generate content with multiple variable sets""" # [START generativeaionvertexai_prompt_template] @@ -37,7 +40,7 @@ def prompt_template_example() -> list: prompt_data="Do {animal} {activity}?", model_name="gemini-1.5-flash-002", variables=variables, - system_instruction=["You are a helpful zoologist"] + system_instruction="You are a helpful zoologist" # generation_config=generation_config, # Optional # safety_settings=safety_settings, # Optional ) @@ -55,9 +58,7 @@ def prompt_template_example() -> list: # Example response # Assembled prompt replacing: 1 instances of variable animal, 1 instances of variable activity - # WARNING: All log messages before absl::InitializeLog() is called are written to STDERR - # I0000 00:00:1731452465.383223 241384 config.cc:230] gRPC experiments enabled ..... - # No, eagles are carnivores. Their diet primarily consists of fish, small mammals, reptiles, and other birds...... + # Eagles are primarily carnivorous. While they might *accidentally* ingest a berry...... # [END generativeaionvertexai_prompt_template] return responses From af0e8a6c908457269cfc008a813ffac1750e6034 Mon Sep 17 00:00:00 2001 From: EmanuelB25 Date: Thu, 14 Nov 2024 17:59:57 -0500 Subject: [PATCH 8/8] feat(genai): fixing list type and updating list length based on number of items in list to be more specific --- generative_ai/prompts/prompt_template.py | 2 +- generative_ai/prompts/test_prompt_template.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/generative_ai/prompts/prompt_template.py b/generative_ai/prompts/prompt_template.py index fc8bdf037e3..cc253aa02a8 100644 --- a/generative_ai/prompts/prompt_template.py +++ b/generative_ai/prompts/prompt_template.py @@ -19,7 +19,7 @@ PROJECT_ID = os.getenv("GOOGLE_CLOUD_PROJECT") -def prompt_template_example() -> GenerationResponse: +def prompt_template_example() -> list[GenerationResponse]: """Build a parameterized prompt template to generate content with multiple variable sets""" # [START generativeaionvertexai_prompt_template] diff --git a/generative_ai/prompts/test_prompt_template.py b/generative_ai/prompts/test_prompt_template.py index ad9ec17b39e..4af772cbf07 100644 --- a/generative_ai/prompts/test_prompt_template.py +++ b/generative_ai/prompts/test_prompt_template.py @@ -17,4 +17,4 @@ def test_prompt_template() -> None: text = prompt_template.prompt_template_example() - assert len(text) > 0 + assert len(text) > 2