Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,7 @@ def generate_content(output_uri: str) -> str:
print(f"Job name: {job.name}")
print(f"Job state: {job.state}")
# Example response:
# Job name: projects/%PROJECT_ID%/locations/us-central1/batchPredictionJobs/9876453210000000000
# Job name: projects/.../locations/.../batchPredictionJobs/9876453210000000000
# Job state: JOB_STATE_PENDING

# See the documentation: https://googleapis.github.io/python-genai/genai.html#genai.types.BatchJob
Expand Down
2 changes: 1 addition & 1 deletion genai/batch_prediction/batchpredict_with_bq.py
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,7 @@ def generate_content(output_uri: str) -> str:
print(f"Job name: {job.name}")
print(f"Job state: {job.state}")
# Example response:
# Job name: projects/%PROJECT_ID%/locations/us-central1/batchPredictionJobs/9876453210000000000
# Job name: projects/.../locations/.../batchPredictionJobs/9876453210000000000
# Job state: JOB_STATE_PENDING

# See the documentation: https://googleapis.github.io/python-genai/genai.html#genai.types.BatchJob
Expand Down
2 changes: 1 addition & 1 deletion genai/batch_prediction/batchpredict_with_gcs.py
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,7 @@ def generate_content(output_uri: str) -> str:
print(f"Job name: {job.name}")
print(f"Job state: {job.state}")
# Example response:
# Job name: projects/%PROJECT_ID%/locations/us-central1/batchPredictionJobs/9876453210000000000
# Job name: projects/.../locations/.../batchPredictionJobs/9876453210000000000
# Job state: JOB_STATE_PENDING

# See the documentation: https://googleapis.github.io/python-genai/genai.html#genai.types.BatchJob
Expand Down
2 changes: 1 addition & 1 deletion genai/batch_prediction/get_batch_job.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@ def get_batch_job(batch_job_name: str) -> types.BatchJob:
client = genai.Client(http_options=HttpOptions(api_version="v1"))

# Get the batch job
# Eg. batch_job_name = "projects/123456789012/locations/us-central1/batchPredictionJobs/1234567890123456789"
# Eg. batch_job_name = "projects/123456789012/locations/.../batchPredictionJobs/1234567890123456789"
batch_job = client.batches.get(name=batch_job_name)

print(f"Job state: {batch_job.state}")
Expand Down
2 changes: 0 additions & 2 deletions genai/batch_prediction/requirements-test.txt
Original file line number Diff line number Diff line change
@@ -1,4 +1,2 @@
google-api-core==2.24.0
google-cloud-bigquery==3.29.0
google-cloud-storage==2.19.0
pytest==8.2.0
4 changes: 3 additions & 1 deletion genai/batch_prediction/requirements.txt
Original file line number Diff line number Diff line change
@@ -1 +1,3 @@
google-genai==1.27.0
google-cloud-bigquery==3.29.0
google-cloud-storage==2.19.0
google-genai==1.42.0
129 changes: 83 additions & 46 deletions genai/batch_prediction/test_batch_prediction_examples.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,87 +11,124 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

#
# Using Google Cloud Vertex AI to test the code samples.
#
from datetime import datetime as dt
import os

from unittest.mock import MagicMock, patch

from google.cloud import bigquery, storage
from google.genai import types
from google.genai.types import JobState
import pytest

import batchpredict_embeddings_with_gcs
import batchpredict_with_bq
import batchpredict_with_gcs
import get_batch_job


os.environ["GOOGLE_GENAI_USE_VERTEXAI"] = "True"
os.environ["GOOGLE_CLOUD_LOCATION"] = "us-central1"
# The project name is included in the CICD pipeline
# os.environ['GOOGLE_CLOUD_PROJECT'] = "add-your-project-name"
BQ_OUTPUT_DATASET = f"{os.environ['GOOGLE_CLOUD_PROJECT']}.gen_ai_batch_prediction"
GCS_OUTPUT_BUCKET = "python-docs-samples-tests"


@pytest.fixture(scope="session")
def bq_output_uri() -> str:
table_name = f"text_output_{dt.now().strftime('%Y_%m_%d_T%H_%M_%S')}"
table_uri = f"{BQ_OUTPUT_DATASET}.{table_name}"
@patch("google.genai.Client")
@patch("time.sleep", return_value=None)
def test_batch_prediction_embeddings_with_gcs(
mock_sleep: MagicMock, mock_genai_client: MagicMock
) -> None:
# Mock the API response
mock_batch_job_running = types.BatchJob(
name="test-batch-job", state="JOB_STATE_RUNNING"
)
mock_batch_job_succeeded = types.BatchJob(
name="test-batch-job", state="JOB_STATE_SUCCEEDED"
)

yield f"bq://{table_uri}"
mock_genai_client.return_value.batches.create.return_value = (
mock_batch_job_running
)
mock_genai_client.return_value.batches.get.return_value = (
mock_batch_job_succeeded
)

bq_client = bigquery.Client()
bq_client.delete_table(table_uri, not_found_ok=True)
response = batchpredict_embeddings_with_gcs.generate_content(
output_uri="gs://test-bucket/test-prefix"
)

mock_genai_client.assert_called_once_with(
http_options=types.HttpOptions(api_version="v1")
)
mock_genai_client.return_value.batches.create.assert_called_once()
mock_genai_client.return_value.batches.get.assert_called_once()
assert response == JobState.JOB_STATE_SUCCEEDED

@pytest.fixture(scope="session")
def gcs_output_uri() -> str:
prefix = f"text_output/{dt.now()}"

yield f"gs://{GCS_OUTPUT_BUCKET}/{prefix}"
@patch("google.genai.Client")
@patch("time.sleep", return_value=None)
def test_batch_prediction_with_bq(
mock_sleep: MagicMock, mock_genai_client: MagicMock
) -> None:
# Mock the API response
mock_batch_job_running = types.BatchJob(
name="test-batch-job", state="JOB_STATE_RUNNING"
)
mock_batch_job_succeeded = types.BatchJob(
name="test-batch-job", state="JOB_STATE_SUCCEEDED"
)

storage_client = storage.Client()
bucket = storage_client.get_bucket(GCS_OUTPUT_BUCKET)
blobs = bucket.list_blobs(prefix=prefix)
for blob in blobs:
blob.delete()
mock_genai_client.return_value.batches.create.return_value = (
mock_batch_job_running
)
mock_genai_client.return_value.batches.get.return_value = (
mock_batch_job_succeeded
)

response = batchpredict_with_bq.generate_content(
output_uri="bq://test-project.test_dataset.test_table"
)

def test_batch_prediction_embeddings_with_gcs(gcs_output_uri: str) -> None:
response = batchpredict_embeddings_with_gcs.generate_content(
output_uri=gcs_output_uri
mock_genai_client.assert_called_once_with(
http_options=types.HttpOptions(api_version="v1")
)
mock_genai_client.return_value.batches.create.assert_called_once()
mock_genai_client.return_value.batches.get.assert_called_once()
assert response == JobState.JOB_STATE_SUCCEEDED


def test_batch_prediction_with_bq(bq_output_uri: str) -> None:
response = batchpredict_with_bq.generate_content(output_uri=bq_output_uri)
assert response == JobState.JOB_STATE_SUCCEEDED
@patch("google.genai.Client")
@patch("time.sleep", return_value=None)
def test_batch_prediction_with_gcs(
mock_sleep: MagicMock, mock_genai_client: MagicMock
) -> None:
# Mock the API response
mock_batch_job_running = types.BatchJob(
name="test-batch-job", state="JOB_STATE_RUNNING"
)
mock_batch_job_succeeded = types.BatchJob(
name="test-batch-job", state="JOB_STATE_SUCCEEDED"
)

mock_genai_client.return_value.batches.create.return_value = (
mock_batch_job_running
)
mock_genai_client.return_value.batches.get.return_value = (
mock_batch_job_succeeded
)

response = batchpredict_with_gcs.generate_content(
output_uri="gs://test-bucket/test-prefix"
)

def test_batch_prediction_with_gcs(gcs_output_uri: str) -> None:
response = batchpredict_with_gcs.generate_content(output_uri=gcs_output_uri)
mock_genai_client.assert_called_once_with(
http_options=types.HttpOptions(api_version="v1")
)
mock_genai_client.return_value.batches.create.assert_called_once()
mock_genai_client.return_value.batches.get.assert_called_once()
assert response == JobState.JOB_STATE_SUCCEEDED


@patch("google.genai.Client")
def test_get_batch_job(mock_genai_client: MagicMock) -> None:
# Mock the API response
mock_batch_job = types.BatchJob(
name="test-batch-job",
state="JOB_STATE_PENDING"
)
mock_batch_job = types.BatchJob(name="test-batch-job", state="JOB_STATE_PENDING")

mock_genai_client.return_value.batches.get.return_value = mock_batch_job

response = get_batch_job.get_batch_job("test-batch-job")

mock_genai_client.assert_called_once_with(http_options=types.HttpOptions(api_version="v1"))
mock_genai_client.assert_called_once_with(
http_options=types.HttpOptions(api_version="v1")
)
mock_genai_client.return_value.batches.get.assert_called_once()
assert response == mock_batch_job
2 changes: 1 addition & 1 deletion genai/bounding_box/requirements.txt
Original file line number Diff line number Diff line change
@@ -1,2 +1,2 @@
google-genai==1.27.0
google-genai==1.42.0
pillow==11.1.0
4 changes: 2 additions & 2 deletions genai/content_cache/contentcache_create_with_txt_gcs_pdf.py
Original file line number Diff line number Diff line change
Expand Up @@ -47,7 +47,7 @@ def create_content_cache() -> str:
contents=contents,
system_instruction=system_instruction,
# (Optional) For enhanced security, the content cache can be encrypted using a Cloud KMS key
# kms_key_name = "projects/.../locations/us-central1/keyRings/.../cryptoKeys/..."
# kms_key_name = "projects/.../locations/.../keyRings/.../cryptoKeys/..."
display_name="example-cache",
ttl="86400s",
),
Expand All @@ -56,7 +56,7 @@ def create_content_cache() -> str:
print(content_cache.name)
print(content_cache.usage_metadata)
# Example response:
# projects/111111111111/locations/us-central1/cachedContents/1111111111111111111
# projects/111111111111/locations/.../cachedContents/1111111111111111111
# CachedContentUsageMetadata(audio_duration_seconds=None, image_count=167,
# text_count=153, total_token_count=43130, video_duration_seconds=None)
# [END googlegenaisdk_contentcache_create_with_txt_gcs_pdf]
Expand Down
4 changes: 2 additions & 2 deletions genai/content_cache/contentcache_delete.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,11 +19,11 @@ def delete_context_caches(cache_name: str) -> str:

client = genai.Client()
# Delete content cache using name
# E.g cache_name = 'projects/111111111111/locations/us-central1/cachedContents/1111111111111111111'
# E.g cache_name = 'projects/111111111111/locations/.../cachedContents/1111111111111111111'
client.caches.delete(name=cache_name)
print("Deleted Cache", cache_name)
# Example response
# Deleted Cache projects/111111111111/locations/us-central1/cachedContents/1111111111111111111
# Deleted Cache projects/111111111111/locations/.../cachedContents/1111111111111111111
# [END googlegenaisdk_contentcache_delete]
return cache_name

Expand Down
4 changes: 2 additions & 2 deletions genai/content_cache/contentcache_list.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,8 +29,8 @@ def list_context_caches() -> str:
print(f"Expires at: {content_cache.expire_time}")

# Example response:
# * Cache `projects/111111111111/locations/us-central1/cachedContents/1111111111111111111` for
# model `projects/111111111111/locations/us-central1/publishers/google/models/gemini-XXX-pro-XXX`
# * Cache `projects/111111111111/locations/.../cachedContents/1111111111111111111` for
# model `projects/111111111111/locations/.../publishers/google/models/gemini-XXX-pro-XXX`
# * Last updated at: 2025-02-13 14:46:42.620490+00:00
# * CachedContentUsageMetadata(audio_duration_seconds=None, image_count=167, text_count=153, total_token_count=43130, video_duration_seconds=None)
# ...
Expand Down
2 changes: 1 addition & 1 deletion genai/content_cache/contentcache_update.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@ def update_content_cache(cache_name: str) -> str:
client = genai.Client(http_options=HttpOptions(api_version="v1"))

# Get content cache by name
# cache_name = "projects/111111111111/locations/us-central1/cachedContents/1111111111111111111"
# cache_name = "projects/.../locations/.../cachedContents/1111111111111111111"
content_cache = client.caches.get(name=cache_name)
print("Expire time", content_cache.expire_time)
# Example response
Expand Down
2 changes: 1 addition & 1 deletion genai/content_cache/contentcache_use_with_txt.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@ def generate_content(cache_name: str) -> str:

client = genai.Client(http_options=HttpOptions(api_version="v1"))
# Use content cache to generate text response
# E.g cache_name = 'projects/111111111111/locations/us-central1/cachedContents/1111111111111111111'
# E.g cache_name = 'projects/.../locations/.../cachedContents/1111111111111111111'
response = client.models.generate_content(
model="gemini-2.5-flash",
contents="Summarize the pdfs",
Expand Down
2 changes: 1 addition & 1 deletion genai/content_cache/requirements.txt
Original file line number Diff line number Diff line change
@@ -1 +1 @@
google-genai==1.27.0
google-genai==1.42.0
2 changes: 1 addition & 1 deletion genai/controlled_generation/requirements.txt
Original file line number Diff line number Diff line change
@@ -1 +1 @@
google-genai==1.27.0
google-genai==1.42.0
2 changes: 1 addition & 1 deletion genai/count_tokens/requirements.txt
Original file line number Diff line number Diff line change
@@ -1 +1 @@
google-genai==1.27.0
google-genai==1.42.0
2 changes: 1 addition & 1 deletion genai/embeddings/requirements.txt
Original file line number Diff line number Diff line change
@@ -1 +1 @@
google-genai==1.27.0
google-genai==1.42.0
2 changes: 1 addition & 1 deletion genai/express_mode/requirements.txt
Original file line number Diff line number Diff line change
@@ -1 +1 @@
google-genai==1.27.0
google-genai==1.42.0
2 changes: 1 addition & 1 deletion genai/image_generation/requirements.txt
Original file line number Diff line number Diff line change
@@ -1,2 +1,2 @@
google-genai==1.29.0
google-genai==1.42.0
pillow==11.1.0
2 changes: 1 addition & 1 deletion genai/live/live_audiogen_with_txt.py
Original file line number Diff line number Diff line change
Expand Up @@ -82,7 +82,7 @@ async def generate_content() -> None:
# Received audio answer. Saving to local file...
# Audio saved to gemini_response.wav
# [END googlegenaisdk_live_audiogen_with_txt]
return None
return True


if __name__ == "__main__":
Expand Down
2 changes: 1 addition & 1 deletion genai/live/live_code_exec_with_txt.py
Original file line number Diff line number Diff line change
Expand Up @@ -55,7 +55,7 @@ async def generate_content() -> list[str]:
# > Compute the largest prime palindrome under 10
# Final Answer: The final answer is $\boxed{7}$
# [END googlegenaisdk_live_code_exec_with_txt]
return response
return True


if __name__ == "__main__":
Expand Down
2 changes: 1 addition & 1 deletion genai/live/live_func_call_with_txt.py
Original file line number Diff line number Diff line change
Expand Up @@ -67,7 +67,7 @@ async def generate_content() -> list[FunctionResponse]:
# > Turn on the lights please
# ok
# [END googlegenaisdk_live_func_call_with_txt]
return function_responses
return True


if __name__ == "__main__":
Expand Down
2 changes: 1 addition & 1 deletion genai/live/live_ground_googsearch_with_txt.py
Original file line number Diff line number Diff line change
Expand Up @@ -56,7 +56,7 @@ async def generate_content() -> list[str]:
# > When did the last Brazil vs. Argentina soccer match happen?
# The last Brazil vs. Argentina soccer match was on March 25, 2025, a 2026 World Cup qualifier, where Argentina defeated Brazil 4-1.
# [END googlegenaisdk_live_ground_googsearch_with_txt]
return response
return True


if __name__ == "__main__":
Expand Down
2 changes: 1 addition & 1 deletion genai/live/live_structured_ouput_with_txt.py
Original file line number Diff line number Diff line change
Expand Up @@ -79,7 +79,7 @@ def generate_content() -> CalendarEvent:
# User message: Alice and Bob are going to a science fair on Friday.
# Output message: name='science fair' date='Friday' participants=['Alice', 'Bob']
# [END googlegenaisdk_live_structured_ouput_with_txt]
return response
return True


if __name__ == "__main__":
Expand Down
2 changes: 1 addition & 1 deletion genai/live/live_transcribe_with_audio.py
Original file line number Diff line number Diff line change
Expand Up @@ -60,7 +60,7 @@ async def generate_content() -> list[str]:
# > Hello? Gemini are you there?
# Yes, I'm here. What would you like to talk about?
# [END googlegenaisdk_live_transcribe_with_audio]
return response
return True


if __name__ == "__main__":
Expand Down
2 changes: 1 addition & 1 deletion genai/live/live_txtgen_with_audio.py
Original file line number Diff line number Diff line change
Expand Up @@ -71,7 +71,7 @@ def get_audio(url: str) -> bytes:
# > Answer to this audio url https://storage.googleapis.com/generativeai-downloads/data/16000.wav
# Yes, I can hear you. How can I help you today?
# [END googlegenaisdk_live_txtgen_with_audio]
return response
return True


if __name__ == "__main__":
Expand Down
2 changes: 1 addition & 1 deletion genai/live/live_websocket_audiogen_with_txt.py
Original file line number Diff line number Diff line change
Expand Up @@ -143,7 +143,7 @@ async def generate_content() -> str:
# Input: Hello? Gemini are you there?
# Audio Response: Hello there. I'm here. What can I do for you today?
# [END googlegenaisdk_live_audiogen_websocket_with_txt]
return "output.wav"
return True


if __name__ == "__main__":
Expand Down
2 changes: 1 addition & 1 deletion genai/live/live_websocket_audiotranscript_with_txt.py
Original file line number Diff line number Diff line change
Expand Up @@ -160,7 +160,7 @@ async def generate_content() -> str:
# Input transcriptions:
# Output transcriptions: Yes, I'm here. How can I help you today?
# [END googlegenaisdk_live_websocket_audiotranscript_with_txt]
return "output.wav"
return True


if __name__ == "__main__":
Expand Down
2 changes: 1 addition & 1 deletion genai/live/live_websocket_textgen_with_audio.py
Original file line number Diff line number Diff line change
Expand Up @@ -154,7 +154,7 @@ def read_wavefile(filepath: str) -> tuple[str, str]:
# Setup Response: {'setupComplete': {}}
# Response: Hey there. What's on your mind today?
# [END googlegenaisdk_live_websocket_textgen_with_audio]
return final_response_text
return True


if __name__ == "__main__":
Expand Down
2 changes: 1 addition & 1 deletion genai/live/live_websocket_textgen_with_txt.py
Original file line number Diff line number Diff line change
Expand Up @@ -130,7 +130,7 @@ async def generate_content() -> str:
# Input: Hello? Gemini are you there?
# Response: Hello there. I'm here. What can I do for you today?
# [END googlegenaisdk_live_websocket_with_txt]
return final_response_text
return True


if __name__ == "__main__":
Expand Down
Loading