Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
37 changes: 37 additions & 0 deletions test-google-genai/README.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,37 @@
# Google GenAI / Vertex AI Test

This project demonstrates how to use Google's GenAI API with Vertex AI authentication.

## Prerequisites

1. Python 3.8+
2. Google Cloud Project with Vertex AI API enabled (there is already one in Sentry)
3. Authentication credentials (see setup below)

## Authentication Setup

In shared 1Password there is a shared secret `Vertex API Key JSON` which contains JSON credentials for service account set up.

All the scripts in this directory rely on using that service account as an authentication to Vertex APIs, we don't have an API key.

To set it up, copy the content of the secret to a JSON file, and the path of the file should be store in `GOOGLE_APPLICATION_CREDENTIALS`.

Ask on Slack for the values for `GOOGLE_VERTEX_LOCATION` and `GOOGLE_VERTEX_PROJECT` env variables.

## Configure

Set the following environment variables in `.env` file:

- `SENTRY_DSN`
- `GOOGLE_APPLICATION_CREDENTIALS`
- `GOOGLE_VERTEX_LOCATION`
- `GOOGLE_VERTEX_PROJECT`

# Example scripts

Docs are available at README.md in https://github.com/googleapis/python-genai or at https://googleapis.github.io/python-genai/.

- `client.py` - runs the basic example from the docs
- `client_content_stream.py` - runs the content stream example from the documentation
- `client_async.py` - runs the async example from the documentation
- `chats.py` - runs the chats example from the documentation
39 changes: 39 additions & 0 deletions test-google-genai/chats.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,39 @@
from dotenv import load_dotenv
from google import genai
from google.genai.types import HttpOptions
import os

import sentry_sdk
from sentry_sdk.integrations.google_genai import GoogleGenAIIntegration

load_dotenv()

sentry_sdk.init(
dsn=os.environ["SENTRY_DSN"],
# Add data like request headers and IP for users,
# see https://docs.sentry.io/platforms/python/data-management/data-collected/ for more info
send_default_pii=True,
traces_sample_rate=1.0,
integrations=[GoogleGenAIIntegration()],
debug=True,
)


client = genai.Client(
vertexai=True,
project=os.environ["GOOGLE_VERTEX_PROJECT"],
location=os.environ["GOOGLE_VERTEX_LOCATION"],
http_options=HttpOptions(api_version="v1"),
)


def main():
with sentry_sdk.start_transaction(op="test-transaction", name="test-chats"):
chat = client.chats.create(model="gemini-2.0-flash")
response = chat.send_message("What is the weather like in San Francisco, CA?")

print(response.text)


if __name__ == "__main__":
main()
59 changes: 59 additions & 0 deletions test-google-genai/client.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,59 @@
from dotenv import load_dotenv

from google import genai
from google.genai import types
from google.genai.types import HttpOptions
import os

import sentry_sdk
from sentry_sdk.integrations.google_genai import GoogleGenAIIntegration

load_dotenv()

sentry_sdk.init(
dsn=os.environ["SENTRY_DSN"],
# Add data like request headers and IP for users,
# see https://docs.sentry.io/platforms/python/data-management/data-collected/ for more info
send_default_pii=True,
traces_sample_rate=1.0,
integrations=[GoogleGenAIIntegration()],
debug=True,
)

client = genai.Client(
vertexai=True,
project=os.environ["GOOGLE_VERTEX_PROJECT"],
location=os.environ["GOOGLE_VERTEX_LOCATION"],
http_options=HttpOptions(api_version="v1"),
)


def get_current_weather(location: str) -> str:
"""Returns the current weather.

Args:
location: The city and state, e.g. San Francisco, CA
"""
return "sunny"


google_search_retrieval_tool = {"google_search": {}}


def main():
with sentry_sdk.start_transaction(op="test-transaction", name="test"):
response = client.models.generate_content(
model="gemini-2.5-flash",
contents="What is weather like in Boston, MA?",
config=types.GenerateContentConfig(
tools=[get_current_weather],
system_instruction="You are a helpful assistant that can use tools to help answer questions.",
temperature=0.2,
),
)

print(response.text)


if __name__ == "__main__":
main()
55 changes: 55 additions & 0 deletions test-google-genai/client_async.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,55 @@
from dotenv import load_dotenv
import asyncio
from google import genai
from google.genai import types
from google.genai.types import HttpOptions
import os

import sentry_sdk
from sentry_sdk.integrations.google_genai import GoogleGenAIIntegration

load_dotenv()

sentry_sdk.init(
dsn=os.environ["SENTRY_DSN"],
# Add data like request headers and IP for users,
# see https://docs.sentry.io/platforms/python/data-management/data-collected/ for more info
send_default_pii=True,
traces_sample_rate=1.0,
integrations=[GoogleGenAIIntegration()],
debug=True,
)

aclient = genai.Client(
vertexai=True,
project=os.environ["GOOGLE_VERTEX_PROJECT"],
location=os.environ["GOOGLE_VERTEX_LOCATION"],
http_options=HttpOptions(api_version="v1"),
).aio


def get_current_weather(location: str) -> str:
"""Returns the current weather.

Args:
location: The city and state, e.g. San Francisco, CA
"""
return "sunny"


async def main():
with sentry_sdk.start_transaction(op="async-test-transaction", name="async-test"):
response = await aclient.models.generate_content(
model="gemini-2.5-flash",
contents="What is weather like in Boston, MA?",
config=types.GenerateContentConfig(
tools=[get_current_weather],
system_instruction="You are a helpful assistant that can use tools to help answer questions.",
temperature=0.2,
),
)

print(response.text)


asyncio.run(main())
52 changes: 52 additions & 0 deletions test-google-genai/client_content_stream.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,52 @@
from dotenv import load_dotenv
from google import genai
from google.genai.types import HttpOptions
import os

import sentry_sdk
from sentry_sdk.integrations.google_genai import GoogleGenAIIntegration

load_dotenv()

sentry_sdk.init(
dsn=os.environ["SENTRY_DSN"],
# Add data like request headers and IP for users,
# see https://docs.sentry.io/platforms/python/data-management/data-collected/ for more info
send_default_pii=True,
traces_sample_rate=1.0,
integrations=[GoogleGenAIIntegration()],
debug=True,
)


client = genai.Client(
vertexai=True,
project=os.environ["GOOGLE_VERTEX_PROJECT"],
location=os.environ["GOOGLE_VERTEX_LOCATION"],
http_options=HttpOptions(api_version="v1"),
)


def get_current_weather(location: str) -> str:
"""Returns the current weather.

Args:
location: The city and state, e.g. San Francisco, CA
"""
return "sunny"


google_search_retrieval_tool = {"google_search": {}}


def main():
with sentry_sdk.start_transaction(op="test-transaction", name="test-streaming"):
for chunk in client.models.generate_content_stream(
model="gemini-2.5-flash",
contents="Why is the sky blue?",
):
print(chunk.text, end="")


if __name__ == "__main__":
main()
17 changes: 17 additions & 0 deletions test-google-genai/pyproject.toml
Original file line number Diff line number Diff line change
@@ -0,0 +1,17 @@
[project]
name = "test-google-genai"
version = "0.1.0"
description = "Google GenAI / Vertex AI API testing with proper authentication"
readme = "README.md"
requires-python = ">=3.12"
dependencies = [
"google-genai",
"google-auth",
"google-cloud-aiplatform",
"sentry-sdk",
"pip>=25.2",
"python-dotenv>=1.1.1",
]

[tool.uv.sources]
sentry-sdk = { path = "../../sentry-python", editable = true }
Loading