diff --git a/.github/workflows/stale.yml b/.github/workflows/stale.yml
new file mode 100644
index 00000000000..5fdcb436f99
--- /dev/null
+++ b/.github/workflows/stale.yml
@@ -0,0 +1,33 @@
+# This workflow warns and then closes issues that have had no activity for a specified amount of time.
+name: Mark and close stale issues
+
+on:
+ schedule:
+ # Scheduled to run at 1:30 UTC everyday
+ - cron: '30 1 * * *'
+
+jobs:
+ stale:
+
+ runs-on: ubuntu-latest
+ permissions:
+ issues: write
+
+ steps:
+ - uses: actions/stale@v5
+ with:
+ repo-token: ${{ secrets.GITHUB_TOKEN }}
+ days-before-issue-stale: 7
+ days-before-issue-close: 2
+ stale-issue-label: "status:stale"
+ close-issue-reason: not_planned
+ any-of-labels: "status:awaiting user response"
+ remove-stale-when-updated: true
+ labels-to-remove-when-unstale: 'status:awaiting user response,status:stale'
+ stale-issue-message: >
+ This issue has been marked as stale because it has been open for 7 days with no activity. It will be closed in 2 days if no further activity occurs.
+ close-issue-message: >
+ This issue was closed because it has been inactive for 9 days.
+ Please post a new issue if you need further assistance. Thanks!
+ # Label that can be assigned to issues to exclude them from being marked as stale
+ exempt-issue-labels: 'override-stale'
diff --git a/.github/workflows/unit-tests.yml b/.github/workflows/unit-tests.yml
index 72e3eec72d4..d2fd993b39c 100644
--- a/.github/workflows/unit-tests.yml
+++ b/.github/workflows/unit-tests.yml
@@ -14,7 +14,8 @@ jobs:
strategy:
matrix:
java: [8, 11, 17, 21]
- testgroup: ['**/*']
+ # Only run unit tests.
+ testgroup: ["**/*Test"]
fail-fast: false
name: unit-test (${{matrix.java}})
steps:
@@ -25,4 +26,4 @@ jobs:
java-version: ${{matrix.java}}
cache: 'maven'
- name: Java Unit Tests
- run: mvn clean test -Dtest=${{matrix.testgroup}}
\ No newline at end of file
+ run: mvn clean test -Dtest=${{matrix.testgroup}} -Djacoco.skip=true
\ No newline at end of file
diff --git a/.release-please-manifest.json b/.release-please-manifest.json
index eb4e0dba726..0c0c0c35760 100644
--- a/.release-please-manifest.json
+++ b/.release-please-manifest.json
@@ -1,3 +1,3 @@
{
- ".": "1.10.0"
+ ".": "1.25.0"
}
\ No newline at end of file
diff --git a/CHANGELOG.md b/CHANGELOG.md
index a770a7362d1..1a5d8a27fcb 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -1,5 +1,213 @@
# Changelog
+## [1.25.0](https://github.com/googleapis/java-genai/compare/v1.24.0...v1.25.0) (2025-10-29)
+
+
+### Features
+
+* Add safety_filter_level and person_generation for Imagen upscaling ([09a8075](https://github.com/googleapis/java-genai/commit/09a80754b202fdf903039341f5266f62d9b879cb))
+* Add support for preference optimization tuning in the SDK. ([5d4123c](https://github.com/googleapis/java-genai/commit/5d4123c0391d443e94bb1e81524ccae8779462d7))
+* Added Operations.get which is a generic method which will handle all Operation types. ([c1dc32f](https://github.com/googleapis/java-genai/commit/c1dc32f84d0e4d14a16345dcb404c8b2bef05338))
+* Pass file name to the backend when uploading with a file path ([081a9a6](https://github.com/googleapis/java-genai/commit/081a9a6a67d1ba542edb1d1330dfa56579204a43))
+* support default global location when not using api key with vertexai backend ([f9028a7](https://github.com/googleapis/java-genai/commit/f9028a71d4e736a8dc97daa54e6e4275b5016abd))
+* Support retries in API requests ([3d5de00](https://github.com/googleapis/java-genai/commit/3d5de000277eb0da172d6b19795c6f2d4b88c213))
+
+
+### Documentation
+
+* Add docstring for classes and fields that are not supported in Gemini or Vertex API ([7a03dac](https://github.com/googleapis/java-genai/commit/7a03dac0a4e3388f98be199765794fcf511bfe83))
+* Add docstring for enum classes that are not supported in Gemini or Vertex API ([830a12f](https://github.com/googleapis/java-genai/commit/830a12f3dcbb8beb1dd5ff3ff82f6b19ebb2af93))
+* Add documentation for the retry behavior ([4fbcf51](https://github.com/googleapis/java-genai/commit/4fbcf514321fdc2cbee1393fc6babe33fd0e5e74))
+
+## [1.24.0](https://github.com/googleapis/java-genai/compare/v1.23.0...v1.24.0) (2025-10-22)
+
+
+### Features
+
+* Add enable_enhanced_civic_answers in GenerationConfig ([684a2c5](https://github.com/googleapis/java-genai/commit/684a2c5b582fa4ca13cb9cfe819ef759778101b0))
+* support createEmbeddings in Batches.java ([8947f6f](https://github.com/googleapis/java-genai/commit/8947f6fc20fbdd90a7d17071dee1bd2e5bea0c3e))
+* support jailbreak in HarmCategory and BlockedReason ([3dab40b](https://github.com/googleapis/java-genai/commit/3dab40bc367168ed48d8d1acfb278f5bc6edb83f))
+
+
+### Bug Fixes
+
+* Make async methods in Batches module truly non-blocking ([f2ae75a](https://github.com/googleapis/java-genai/commit/f2ae75ac364702f483c376e458a120d1ffa93b17))
+* Make async methods in Caches, Tuning, and Operations modules truly non-blocking ([db56239](https://github.com/googleapis/java-genai/commit/db56239bbebbfe3cb95e00d2d3eac253b76f22fe))
+* Make async methods in Models module truly non-blocking ([c205d01](https://github.com/googleapis/java-genai/commit/c205d0172ca40e01f7d8de17a3bc9d38eeb5fc21))
+
+## [1.23.0](https://github.com/googleapis/java-genai/compare/v1.22.0...v1.23.0) (2025-10-15)
+
+
+### Features
+
+* Support video extension for Veo on Gemini Developer API ([b398509](https://github.com/googleapis/java-genai/commit/b398509697a3e9aa27bad5e804382c5a4db333ab))
+
+## [1.22.0](https://github.com/googleapis/java-genai/compare/v1.21.0...v1.22.0) (2025-10-10)
+
+
+### Features
+
+* Enable Google Maps tool for Genai. ([a4baf3c](https://github.com/googleapis/java-genai/commit/a4baf3c610ddcb1ed36c1501fcb2248b5a6bd610))
+* Support enableWidget feature in GoogleMaps ([aefbd5c](https://github.com/googleapis/java-genai/commit/aefbd5c1519f453cd2fe158a2765c195a9454322))
+* Support Gemini batch inline request's metadata and add test coverage to safety setting ([17033b3](https://github.com/googleapis/java-genai/commit/17033b38a93d6952b29699f5a4c79ed9dd862976))
+
+## [1.21.0](https://github.com/googleapis/java-genai/compare/v1.20.0...v1.21.0) (2025-10-08)
+
+
+### Features
+
+* Add `NO_IMAGE` enum value to `FinishReason` ([6b00c0b](https://github.com/googleapis/java-genai/commit/6b00c0b7dc8c85fcefc5aac643c3588048317614))
+* Add labels field to Imagen configs ([e69cf68](https://github.com/googleapis/java-genai/commit/e69cf68583ca581f1a7fad89b04292036433cdb4))
+* Add thinking_config for live ([274c21d](https://github.com/googleapis/java-genai/commit/274c21d34310e630b9b4ad296b4c8314a4249d0c))
+* Add utility methods for creating `FunctionResponsePart` and creating FunctionResponse `Part` with `FunctionResponseParts` ([af16a4c](https://github.com/googleapis/java-genai/commit/af16a4c994e0cc4e6fbc2cdbda825246df9aa253))
+* Enable Ingredients to Video and Advanced Controls for Veo on Gemini Developer API (Early Access Program) ([4c42e65](https://github.com/googleapis/java-genai/commit/4c42e6527a7fe43c0b534e381d65b5d9650e8709))
+
+
+### Bug Fixes
+
+* Ensure Live server message are properly converted ([206dc88](https://github.com/googleapis/java-genai/commit/206dc88e3b220a875f784a507fc9470bc411de36))
+
+## [1.20.0](https://github.com/googleapis/java-genai/compare/v1.19.0...v1.20.0) (2025-10-01)
+
+
+### Features
+
+* Add `ImageConfig` to `GenerateContentConfig` ([6fb5eba](https://github.com/googleapis/java-genai/commit/6fb5eba0e916ada8f300dd5ad333f269e9044ea3))
+
+## [1.19.0](https://github.com/googleapis/java-genai/compare/v1.18.0...v1.19.0) (2025-09-30)
+
+
+### Features
+
+* expose session id in Live API ([b6d5389](https://github.com/googleapis/java-genai/commit/b6d5389899bd1443d5c508776dfe5909eb1d7400))
+* rename ComputerUse tool (early access) ([4bbba2b](https://github.com/googleapis/java-genai/commit/4bbba2b53eedec0b28a5d98d7fc193683c565f50))
+
+## [1.18.0](https://github.com/googleapis/java-genai/compare/v1.17.0...v1.18.0) (2025-09-25)
+
+
+### Features
+
+* Add FunctionResponsePart & ToolComputerUse.excludedPredefinedFunctions ([1a24bed](https://github.com/googleapis/java-genai/commit/1a24bedc752851236b0a7239a7dba7090e4ac4e8))
+* Support Imagen 4 Ingredients on Vertex ([b5eed8d](https://github.com/googleapis/java-genai/commit/b5eed8d1323a3d37b53c1d8c5c5557392ce7ed44))
+
+
+### Bug Fixes
+
+* Expose `JOB_STATE_RUNNING` and `JOB_STATE_EXPIRED` for Gemini Batches states ([c5b4fdf](https://github.com/googleapis/java-genai/commit/c5b4fdf58b9d0d74efdd2c7e740bed8b6b661c99))
+* initialization of `pre_tuned_model_checkpoint_id` from tuning config. ([c293633](https://github.com/googleapis/java-genai/commit/c293633a8fe298668f030ba3b257347a8fd0eedf))
+* Make async generateContent and generateContentStream truly non-blocking ([5cb18fd](https://github.com/googleapis/java-genai/commit/5cb18fd4f07f9b1f21efb82fe961e473325f6257))
+* only run unit tests in github action ([9b2861b](https://github.com/googleapis/java-genai/commit/9b2861bb79d50c10c152aa010bedf0bc48a04ad8))
+
+## [1.17.0](https://github.com/googleapis/java-genai/compare/v1.16.0...v1.17.0) (2025-09-16)
+
+
+### Features
+
+* Add 'turn_complete_reason' and 'waiting_for_input' fields. ([5bc4873](https://github.com/googleapis/java-genai/commit/5bc48732fd9281162942b158de34173343d7b179))
+* Add `VideoGenerationMaskMode` enum for Veo 2 Editing ([e5c8277](https://github.com/googleapis/java-genai/commit/e5c82778586dfee4ed7d04a9eabb2a4d8eac6185))
+* Add labels to create tuning job config ([695e17a](https://github.com/googleapis/java-genai/commit/695e17a7b1adebbccb1651d30b768d27f81c3977))
+* generate the function_call class's converters ([38703c7](https://github.com/googleapis/java-genai/commit/38703c726606cbe1b6f5f5f4eb809310b0df94a8))
+* java local tokenizer ([d774185](https://github.com/googleapis/java-genai/commit/d7741856cafd3b8e05803f7b452335fbc4ce8977))
+* Support Veo 2 Editing on Vertex ([d401d3c](https://github.com/googleapis/java-genai/commit/d401d3cf6a5f9ef3d2a76a548eed9d218169170e))
+
+
+### Bug Fixes
+
+* Enable `id` field in `FunctionCall` for Vertex AI. ([3773fe7](https://github.com/googleapis/java-genai/commit/3773fe75007b9ce83692de0031853f0f607bff3e))
+* update Live API audio example with better interruption handling ([cad8df9](https://github.com/googleapis/java-genai/commit/cad8df9c4edaf0806a641869fef6379ed05f0189))
+
+## [1.16.0](https://github.com/googleapis/java-genai/compare/v1.15.0...v1.16.0) (2025-09-02)
+
+
+### Features
+
+* Add resolution field for Gemini Developer API Veo 3 generation ([eec410c](https://github.com/googleapis/java-genai/commit/eec410c5b68de471e9a824e61f0efb819841dfe6))
+* add the response body for generateContent ([a011580](https://github.com/googleapis/java-genai/commit/a0115804e438bac120d5155c91ece53c79ada677))
+
+
+### Documentation
+
+* Refactor/update docstrings for Imagen and Veo ([2470101](https://github.com/googleapis/java-genai/commit/24701018feb91d147bf1817b04752e2595bf40ab))
+
+## [1.15.0](https://github.com/googleapis/java-genai/compare/v1.14.0...v1.15.0) (2025-08-27)
+
+
+### Features
+
+* add `sdkHttpResponse.headers` to *Delete responses. ([4be038d](https://github.com/googleapis/java-genai/commit/4be038de86c782d103d21258db51055f35e5af21))
+* Add output_gcs_uri to Imagen upscale_image ([7649467](https://github.com/googleapis/java-genai/commit/76494678d3937229778c5063b4f4ff340f977bba))
+* add the response body for generateContent ([6e28ab4](https://github.com/googleapis/java-genai/commit/6e28ab4236565be61fb11e79ca9f2f31a2013598))
+* add the response body for generateContent ([b2a5b3f](https://github.com/googleapis/java-genai/commit/b2a5b3f5a6ef7a8bb4d011980d90ffdc3c745603))
+* Add VALIDATED mode into FunctionCallingConfigMode ([4bb8680](https://github.com/googleapis/java-genai/commit/4bb868046199d3249f75ede213ef7d77e0b7783f))
+* Add VideoGenerationReferenceType enum for generate_videos ([df9d910](https://github.com/googleapis/java-genai/commit/df9d910537ec7de6188f777801b4d50e84cd91e7))
+* Support GenerateVideosSource for Veo GenerateVideos ([c26af63](https://github.com/googleapis/java-genai/commit/c26af6396002cf21c0ed272290d44b09b6a41840))
+* support tunings.cancel in the genai SDK for Python, Java, JS, and Go ([9982251](https://github.com/googleapis/java-genai/commit/9982251d2dd80d3151aefb4462d9e4864d8e064e))
+
+
+### Documentation
+
+* Refactor model IDs into a Constants class ([dacd787](https://github.com/googleapis/java-genai/commit/dacd7875d41f810e50f2655e5d0e62f031197e61))
+
+## [1.14.0](https://github.com/googleapis/java-genai/compare/v1.13.0...v1.14.0) (2025-08-22)
+
+
+### Features
+
+* Add add_watermark field for recontext_image (Virtual Try-On, Product Recontext) ([5aacbc0](https://github.com/googleapis/java-genai/commit/5aacbc06435fb36fffde0c3641b3077493f13577))
+
+
+### Bug Fixes
+
+* Fix the bug that files.create doesn't return the upload URL correctly ([eb40c5f](https://github.com/googleapis/java-genai/commit/eb40c5f7f255b46a7a820da044e210127c7aac18))
+
+
+### Documentation
+
+* update TokensInfo docstring ([48eba7f](https://github.com/googleapis/java-genai/commit/48eba7fcb369537ca4266ec61107e016f7c242ed))
+
+## [1.13.0](https://github.com/googleapis/java-genai/compare/v1.12.0...v1.13.0) (2025-08-18)
+
+
+### Features
+
+* expose JsonSerializable.stringToJsonNode to help user better use *JsonSchema fields. ([35d783b](https://github.com/googleapis/java-genai/commit/35d783b5d1655b6f0d52afefa633c608f39d4e01))
+* Return response headers for all methods (except streaming methods) ([7e8b71b](https://github.com/googleapis/java-genai/commit/7e8b71b0769362a728e2bf9b93738563113a4edc))
+* Support Imagen image segmentation on Vertex ([e2a561b](https://github.com/googleapis/java-genai/commit/e2a561b11b53f3a7cc30aacb4a0dcf6a26e01645))
+* Support Veo 2 Reference Images to Video Generation on Vertex ([2f5580f](https://github.com/googleapis/java-genai/commit/2f5580fd1e78d6e8e4f371f291dacf98c7c617ef))
+
+## [1.12.0](https://github.com/googleapis/java-genai/compare/v1.11.0...v1.12.0) (2025-08-13)
+
+
+### Features
+
+* enable continuous fine-tuning on a pre-tuned model in the SDK. ([e49d350](https://github.com/googleapis/java-genai/commit/e49d3509355f717d391a88b6ff1a6f4f6d83fddc))
+* support document name in grounding metadata ([8273922](https://github.com/googleapis/java-genai/commit/8273922ebfbce4ffafa8993bcc6928b47b5ff821))
+* Support exclude_domains in Google Search and Enterprise Web Search ([e975d28](https://github.com/googleapis/java-genai/commit/e975d284f78e0c9a3cd2199d304b4739bad36fe1))
+
+## [1.11.0](https://github.com/googleapis/java-genai/compare/v1.10.0...v1.11.0) (2025-08-06)
+
+
+### Features
+
+* Add image_size field for Gemini Developer API Imagen 4 generation ([c50c755](https://github.com/googleapis/java-genai/commit/c50c755c08efbed5a62e1006890b1d0bd9956702))
+* enable responseId for Gemini Developer API ([4912ff4](https://github.com/googleapis/java-genai/commit/4912ff421d6d3bc40edd70a939f71f5f33f58597))
+* support extraBody in HttpOptions class ([036bac8](https://github.com/googleapis/java-genai/commit/036bac89fda15022ec4d9c5c73ba81ad0a6cc9be))
+* Support image recontext on Vertex ([e7de8c8](https://github.com/googleapis/java-genai/commit/e7de8c83bbd2e7e37c2198c3501e2d5bee58c0a2))
+* Support new enum types for UrlRetrievalStatus ([cb27222](https://github.com/googleapis/java-genai/commit/cb27222a7f7cdf442a7d6b61496709f7cf084a91))
+* support response headers in Go for all methods. ([222b41e](https://github.com/googleapis/java-genai/commit/222b41e196afc13775cc22292a58567d7b4859fa))
+
+
+### Bug Fixes
+
+* Remove duplicate JavaTimeModule in JsonSerializable ([a7dbd4c](https://github.com/googleapis/java-genai/commit/a7dbd4c527456f20aa5d154bde14f74f6e66d174))
+
+
+### Documentation
+
+* Add Imagen and Veo to README ([cc0a0aa](https://github.com/googleapis/java-genai/commit/cc0a0aa28cae618acf617ab92819df78d80afea5))
+* Add latest models features in README ([a2eccaf](https://github.com/googleapis/java-genai/commit/a2eccafae5c6c9b82341a148b572bf9bc80f241b))
+* mark Client as thread safe and Chat as not thread safe ([be3e50e](https://github.com/googleapis/java-genai/commit/be3e50e4217780329c0636fd7f8a1b743e7f9597))
+
## [1.10.0](https://github.com/googleapis/java-genai/compare/v1.9.0...v1.10.0) (2025-07-23)
diff --git a/README.md b/README.md
index dbaa7e1a39a..b69e93492d3 100644
--- a/README.md
+++ b/README.md
@@ -15,7 +15,7 @@ If you're using Maven, add the following to your dependencies:
com.google.genaigoogle-genai
- 1.10.0
+ 1.25.0
```
@@ -161,6 +161,30 @@ per-request basis, providing maximum flexibility for diverse API call settings.
See [this example](https://github.com/googleapis/java-genai/blob/main/examples/src/main/java/com/google/genai/examples/RequestLevelHttpOptions.java)
for more details.
+### HttpRetryOptions
+
+[HttpRetryOptions](https://github.com/googleapis/java-genai/blob/main/src/main/java/com/google/genai/types/HttpRetryOptions.java)
+allows you to configure the automatic retry behavior for failed API calls. You
+can customize key settings like:
+
+ * Total number of attempts.
+ * Which HTTP status codes should trigger a retry (e.g., 429 for rate limits).
+ * Backoff strategy, including the initial delay and maximum delay between retries.
+
+```java
+HttpOptions httpOptions = HttpOptions.builder()
+ .retryOptions(
+ HttpRetryOptions.builder()
+ .attempts(3)
+ .httpStatusCodes(408, 429))
+ .build();
+```
+
+Since HttpRetryOptions is part of HttpOptions, it supports being set at the
+client level (as shown) or on a per-request basis. Note that Providing
+`HttpRetryOptions` for a specific request will completely override any default
+retry settings configured on the client.
+
### ClientOptions
[ClientOptions](https://github.com/googleapis/java-genai/blob/main/src/main/java/com/google/genai/types/ClientOptions.java)
enables you to customize the behavior of the HTTP client. It currently supports
@@ -178,11 +202,11 @@ Client client = Client.builder()
```
### Interact with models
-The Gen AI Java SDK allows you to access the service programmatically.
+The Google Gen AI Java SDK allows you to access the service programmatically.
The following code snippets are some basic usages of model inferencing.
#### Generate Content
-Use `generateContent` method for the most basic text generation.
+Use `generateContent` method for the most basic content generation.
##### with text input
@@ -199,10 +223,17 @@ public class GenerateContentWithTextInput {
Client client = new Client();
GenerateContentResponse response =
- client.models.generateContent("gemini-2.0-flash-001", "What is your name?", null);
+ client.models.generateContent("gemini-2.5-flash", "What is your name?", null);
// Gets the text string from the response by the quick accessor method `text()`.
System.out.println("Unary response: " + response.text());
+
+ // Gets the http headers from the response.
+ response
+ .sdkHttpResponse()
+ .ifPresent(
+ httpResponse ->
+ System.out.println("Response headers: " + httpResponse.headers().orElse(null)));
}
}
```
@@ -232,7 +263,68 @@ public class GenerateContentWithImageInput {
Part.fromUri("gs://path/to/image.jpg", "image/jpeg"));
GenerateContentResponse response =
- client.models.generateContent("gemini-2.0-flash-001", content, null);
+ client.models.generateContent("gemini-2.5-flash", content, null);
+
+ System.out.println("Response: " + response.text());
+ }
+}
+```
+
+##### Generate Content with extra configs
+To set configurations like System Instructions and Safety Settings, you can pass
+a `GenerateContentConfig` to the `GenerateContent` method.
+
+```java
+package ;
+
+import com.google.common.collect.ImmutableList;
+import com.google.genai.Client;
+import com.google.genai.types.Content;
+import com.google.genai.types.GenerateContentConfig;
+import com.google.genai.types.GenerateContentResponse;
+import com.google.genai.types.GoogleSearch;
+import com.google.genai.types.HarmBlockThreshold;
+import com.google.genai.types.HarmCategory;
+import com.google.genai.types.Part;
+import com.google.genai.types.SafetySetting;
+import com.google.genai.types.ThinkingConfig;
+import com.google.genai.types.Tool;
+
+public class GenerateContentWithConfigs {
+ public static void main(String[] args) {
+ Client client = new Client();
+
+ // Sets the safety settings in the config.
+ ImmutableList safetySettings =
+ ImmutableList.of(
+ SafetySetting.builder()
+ .category(HarmCategory.Known.HARM_CATEGORY_HATE_SPEECH)
+ .threshold(HarmBlockThreshold.Known.BLOCK_ONLY_HIGH)
+ .build(),
+ SafetySetting.builder()
+ .category(HarmCategory.Known.HARM_CATEGORY_DANGEROUS_CONTENT)
+ .threshold(HarmBlockThreshold.Known.BLOCK_LOW_AND_ABOVE)
+ .build());
+
+ // Sets the system instruction in the config.
+ Content systemInstruction = Content.fromParts(Part.fromText("You are a history teacher."));
+
+ // Sets the Google Search tool in the config.
+ Tool googleSearchTool = Tool.builder().googleSearch(GoogleSearch.builder()).build();
+
+ GenerateContentConfig config =
+ GenerateContentConfig.builder()
+ // Sets the thinking budget to 0 to disable thinking mode
+ .thinkingConfig(ThinkingConfig.builder().thinkingBudget(0))
+ .candidateCount(1)
+ .maxOutputTokens(1024)
+ .safetySettings(safetySettings)
+ .systemInstruction(systemInstruction)
+ .tools(googleSearchTool)
+ .build();
+
+ GenerateContentResponse response =
+ client.models.generateContent("gemini-2.5-flash", "Tell me the history of LLM", config);
System.out.println("Response: " + response.text());
}
@@ -281,20 +373,20 @@ public class GenerateContentWithFunctionCall {
public static void main(String[] args) throws NoSuchMethodException {
Client client = new Client();
+ // Load the method as a reflected Method object so that it can be
+ // automatically executed on the client side.
Method method =
GenerateContentWithFunctionCall.class.getMethod(
"getCurrentWeather", String.class, String.class);
GenerateContentConfig config =
GenerateContentConfig.builder()
- .tools(
- ImmutableList.of(
- Tool.builder().functions(ImmutableList.of(method)).build()))
+ .tools(Tool.builder().functions(method))
.build();
GenerateContentResponse response =
client.models.generateContent(
- "gemini-2.0-flash-001",
+ "gemini-2.5-flash",
"What is the weather in Vancouver?",
config);
@@ -306,7 +398,7 @@ public class GenerateContentWithFunctionCall {
}
```
-#### Stream Generated Content
+##### Stream Generated Content
To get a streamed response, you can use the `generateContentStream` method:
```java
@@ -318,13 +410,11 @@ import com.google.genai.types.GenerateContentResponse;
public class StreamGeneration {
public static void main(String[] args) {
- // Instantiate the client using Vertex API. The client gets the project and location from the
- // environment variables `GOOGLE_CLOUD_PROJECT` and `GOOGLE_CLOUD_LOCATION`.
- Client client = Client.builder().vertexAI(true).build();
+ Client client = new Client();
ResponseStream responseStream =
client.models.generateContentStream(
- "gemini-2.0-flash-001", "Tell me a story in 300 words.", null);
+ "gemini-2.5-flash", "Tell me a story in 300 words.", null);
System.out.println("Streaming response: ");
for (GenerateContentResponse res : responseStream) {
@@ -338,7 +428,7 @@ public class StreamGeneration {
}
```
-#### Async Generate Content
+##### Async Generate Content
To get a response asynchronously, you can use the `generateContent` method from
the `client.async.models` namespace.
@@ -351,12 +441,11 @@ import java.util.concurrent.CompletableFuture;
public class GenerateContentAsync {
public static void main(String[] args) {
- // Instantiates the client using Gemini API, and sets the API key in the builder.
- Client client = Client.builder().apiKey("your-api-key").build();
+ Client client = new Client();
CompletableFuture responseFuture =
client.async.models.generateContent(
- "gemini-2.0-flash-001", "Introduce Google AI Studio.", null);
+ "gemini-2.5-flash", "Introduce Google AI Studio.", null);
responseFuture
.thenAccept(
@@ -368,65 +457,7 @@ public class GenerateContentAsync {
}
```
-#### Generate Content with extra configs
-To set configurations like System Instructions and Safety Settings, you can pass
-a `GenerateContentConfig` to the `GenerateContent` method.
-
-```java
-package ;
-
-import com.google.common.collect.ImmutableList;
-import com.google.genai.Client;
-import com.google.genai.types.Content;
-import com.google.genai.types.GenerateContentConfig;
-import com.google.genai.types.GenerateContentResponse;
-import com.google.genai.types.GoogleSearch;
-import com.google.genai.types.HarmBlockThreshold;
-import com.google.genai.types.HarmCategory;
-import com.google.genai.types.Part;
-import com.google.genai.types.SafetySetting;
-import com.google.genai.types.Tool;
-
-public class GenerateContentWithConfigs {
- public static void main(String[] args) {
- Client client = new Client();
-
- // Sets the safety settings in the config.
- ImmutableList safetySettings =
- ImmutableList.of(
- SafetySetting.builder()
- .category(HarmCategory.Known.HARM_CATEGORY_HATE_SPEECH)
- .threshold(HarmBlockThreshold.Known.BLOCK_ONLY_HIGH)
- .build(),
- SafetySetting.builder()
- .category(HarmCategory.Known.HARM_CATEGORY_DANGEROUS_CONTENT)
- .threshold(HarmBlockThreshold.Known.BLOCK_LOW_AND_ABOVE)
- .build());
-
- // Sets the system instruction in the config.
- Content systemInstruction = Content.fromParts(Part.fromText("You are a history teacher."));
-
- // Sets the Google Search tool in the config.
- Tool googleSearchTool = Tool.builder().googleSearch(GoogleSearch.builder().build()).build();
-
- GenerateContentConfig config =
- GenerateContentConfig.builder()
- .candidateCount(1)
- .maxOutputTokens(1024)
- .safetySettings(safetySettings)
- .systemInstruction(systemInstruction)
- .tools(ImmutableList.of(googleSearchTool))
- .build();
-
- GenerateContentResponse response =
- client.models.generateContent("gemini-2.0-flash-001", "Tell me the history of LLM", config);
-
- System.out.println("Response: " + response.text());
- }
-}
-```
-
-#### Generate Content with JSON response schema
+##### Generate Content with JSON response schema
To get a response in JSON by passing in a response schema to the
`GenerateContent` API.
@@ -445,6 +476,7 @@ public class GenerateContentWithSchema {
public static void main(String[] args) {
Client client = new Client();
+ // Define the schema for the response, in Json format.
ImmutableMap schema = ImmutableMap.of(
"type", "object",
"properties", ImmutableMap.of(
@@ -457,6 +489,7 @@ public class GenerateContentWithSchema {
"required", ImmutableList.of("recipe_name", "ingredients")
);
+ // Set the response schema in GenerateContentConfig
GenerateContentConfig config =
GenerateContentConfig.builder()
.responseMimeType("application/json")
@@ -465,13 +498,349 @@ public class GenerateContentWithSchema {
.build();
GenerateContentResponse response =
- client.models.generateContent("gemini-2.0-flash-001", "Tell me your name", config);
+ client.models.generateContent("gemini-2.5-flash", "Tell me your name", config);
System.out.println("Response: " + response.text());
}
}
```
+#### Count Tokens and Compute Tokens
+
+The `countTokens` method allows you to calculate the number of tokens your
+prompt will use before sending it to the model, helping you manage costs and
+stay within the context window.
+
+```java
+package ;
+
+import com.google.genai.Client;
+import com.google.genai.types.CountTokensResponse;
+
+public class CountTokens {
+ public static void main(String[] args) {
+ Client client = new Client();
+
+ CountTokensResponse response =
+ client.models.countTokens("gemini-2.5-flash", "What is your name?", null);
+
+ System.out.println("Count tokens response: " + response);
+ }
+}
+```
+
+The `computeTokens` method returns the Tokens Info that contains tokens and
+token IDs given your prompt. This method is only supported in Vertex AI.
+
+```java
+package ;
+
+import com.google.genai.Client;
+import com.google.genai.types.ComputeTokensResponse;
+
+public class ComputeTokens {
+ public static void main(String[] args) {
+ Client client = Client.builder().vertexAI(true).build();
+
+ ComputeTokensResponse response =
+ client.models.computeTokens("gemini-2.5-flash", "What is your name?", null);
+
+ System.out.println("Compute tokens response: " + response);
+ }
+}
+```
+
+#### Embed Content
+
+The `embedContent` method allows you to generate embeddings for words, phrases,
+sentences, and code. Note that only text embedding is supported in this method.
+
+```java
+package ;
+
+import com.google.genai.Client;
+import com.google.genai.types.EmbedContentResponse;
+
+public class EmbedContent {
+ public static void main(String[] args) {
+ Client client = new Client();
+
+ EmbedContentResponse response =
+ client.models.embedContent("gemini-embedding-001", "why is the sky blue?", null);
+
+ System.out.println("Embedding response: " + response);
+ }
+}
+```
+
+### Imagen
+
+Imagen is a text-to-image GenAI service.
+
+#### Generate Images
+
+The `generateImages` method helps you create high-quality, unique images given a
+text prompt.
+
+```java
+package ;
+
+import com.google.genai.Client;
+import com.google.genai.types.GenerateImagesConfig;
+import com.google.genai.types.GenerateImagesResponse;
+import com.google.genai.types.Image;
+
+public class GenerateImages {
+ public static void main(String[] args) {
+ Client client = new Client();
+
+ GenerateImagesConfig config =
+ GenerateImagesConfig.builder()
+ .numberOfImages(1)
+ .outputMimeType("image/jpeg")
+ .includeSafetyAttributes(true)
+ .build();
+
+ GenerateImagesResponse response =
+ client.models.generateImages(
+ "imagen-3.0-generate-002", "Robot holding a red skateboard", config);
+
+ response.generatedImages().ifPresent(
+ images -> {
+ System.out.println("Generated " + images.size() + " images.");
+ Image image = images.get(0).image().orElse(null);
+ // Do something with the image.
+ }
+ );
+ }
+}
+```
+
+#### Upscale Image
+
+The `upscaleImage` method allows you to upscale an image. This feature is only
+supported in Vertex AI.
+
+```java
+package ;
+
+import com.google.genai.Client;
+import com.google.genai.types.Image;
+import com.google.genai.types.UpscaleImageConfig;
+import com.google.genai.types.UpscaleImageResponse;
+
+public class UpscaleImage {
+ public static void main(String[] args) {
+ Client client = Client.builder().vertexAI(true).build();
+
+ Image image = Image.fromFile("path/to/your/image");
+
+ UpscaleImageConfig config =
+ UpscaleImageConfig.builder()
+ .outputMimeType("image/jpeg")
+ .enhanceInputImage(true)
+ .imagePreservationFactor(0.6f)
+ .build();
+
+ UpscaleImageResponse response =
+ client.models.upscaleImage("imagen-3.0-generate-002", image, "x2", config);
+
+ response.generatedImages().ifPresent(
+ images -> {
+ Image upscaledImage = images.get(0).image().orElse(null);
+ // Do something with the upscaled image.
+ }
+ );
+ }
+}
+```
+
+#### Edit Image
+
+The `editImage` method lets you edit an image. You can input reference images
+(ex. mask reference for inpainting, or style reference for style transfer) in
+addition to a text prompt to guide the editing.
+
+This feature uses a different model than `generateImages` and `upscaleImage`. It
+is only supported in Vertex AI.
+
+```java
+package ;
+
+import com.google.genai.Client;
+import com.google.genai.types.EditImageConfig;
+import com.google.genai.types.EditImageResponse;
+import com.google.genai.types.EditMode;
+import com.google.genai.types.Image;
+import com.google.genai.types.MaskReferenceConfig;
+import com.google.genai.types.MaskReferenceImage;
+import com.google.genai.types.MaskReferenceMode;
+import com.google.genai.types.RawReferenceImage;
+import com.google.genai.types.ReferenceImage;
+import java.util.ArrayList;
+
+public class EditImage {
+ public static void main(String[] args) {
+ Client client = Client.builder().vertexAI(true).build();
+
+ Image image = Image.fromFile("path/to/your/image");
+
+ // Edit image with a mask.
+ EditImageConfig config =
+ EditImageConfig.builder()
+ .editMode(EditMode.Known.EDIT_MODE_INPAINT_INSERTION)
+ .numberOfImages(1)
+ .outputMimeType("image/jpeg")
+ .build();
+
+ ArrayList referenceImages = new ArrayList<>();
+ RawReferenceImage rawReferenceImage =
+ RawReferenceImage.builder().referenceImage(image).referenceId(1).build();
+ referenceImages.add(rawReferenceImage);
+
+ MaskReferenceImage maskReferenceImage =
+ MaskReferenceImage.builder()
+ .referenceId(2)
+ .config(
+ MaskReferenceConfig.builder()
+ .maskMode(MaskReferenceMode.Known.MASK_MODE_BACKGROUND)
+ .maskDilation(0.0f))
+ .build();
+ referenceImages.add(maskReferenceImage);
+
+ EditImageResponse response =
+ client.models.editImage(
+ "imagen-3.0-capability-001", "Sunlight and clear sky", referenceImages, config);
+
+ response.generatedImages().ifPresent(
+ images -> {
+ Image editedImage = images.get(0).image().orElse(null);
+ // Do something with the edited image.
+ }
+ );
+ }
+}
+```
+
+### Veo
+
+Veo is a video generation GenAI service.
+
+#### Generate Videos (Text to Video)
+
+```java
+package ;
+
+import com.google.genai.Client;
+import com.google.genai.types.GenerateVideosConfig;
+import com.google.genai.types.GenerateVideosOperation;
+import com.google.genai.types.Video;
+
+public class GenerateVideosWithText {
+ public static void main(String[] args) {
+ Client client = new Client();
+
+ GenerateVideosConfig config =
+ GenerateVideosConfig.builder()
+ .numberOfVideos(1)
+ .enhancePrompt(true)
+ .durationSeconds(5)
+ .build();
+
+ // generateVideos returns an operation
+ GenerateVideosOperation operation =
+ client.models.generateVideos(
+ "veo-2.0-generate-001", "A neon hologram of a cat driving at top speed", null, config);
+
+ // When the operation hasn't been finished, operation.done() is empty
+ while (!operation.done().isPresent()) {
+ try {
+ System.out.println("Waiting for operation to complete...");
+ Thread.sleep(10000);
+ // Sleep for 10 seconds and check the operation again
+ operation = client.operations.getVideosOperation(operation, null);
+ } catch (InterruptedException e) {
+ System.out.println("Thread was interrupted while sleeping.");
+ Thread.currentThread().interrupt();
+ }
+ }
+
+ operation.response().ifPresent(
+ response -> {
+ response.generatedVideos().ifPresent(
+ videos -> {
+ System.out.println("Generated " + videos.size() + " videos.");
+ Video video = videos.get(0).video().orElse(null);
+ // Do something with the generated video
+ }
+ );
+ }
+ );
+ }
+}
+```
+
+#### Generate Videos (Image to Video)
+
+```java
+package ;
+
+import com.google.genai.Client;
+import com.google.genai.types.GenerateVideosConfig;
+import com.google.genai.types.GenerateVideosOperation;
+import com.google.genai.types.Image;
+import com.google.genai.types.Video;
+
+public class GenerateVideosWithImage {
+ public static void main(String[] args) {
+ Client client = new Client();
+
+ Image image = Image.fromFile("path/to/your/image");
+
+ GenerateVideosConfig config =
+ GenerateVideosConfig.builder()
+ .numberOfVideos(1)
+ .enhancePrompt(true)
+ .durationSeconds(5)
+ .build();
+
+ // generateVideos returns an operation
+ GenerateVideosOperation operation =
+ client.models.generateVideos(
+ "veo-2.0-generate-001",
+ "Night sky",
+ image,
+ config);
+
+ // When the operation hasn't been finished, operation.done() is empty
+ while (!operation.done().isPresent()) {
+ try {
+ System.out.println("Waiting for operation to complete...");
+ Thread.sleep(10000);
+ // Sleep for 10 seconds and check the operation again
+ operation = client.operations.getVideosOperation(operation, null);
+ } catch (InterruptedException e) {
+ System.out.println("Thread was interrupted while sleeping.");
+ Thread.currentThread().interrupt();
+ }
+ }
+
+ operation.response().ifPresent(
+ response -> {
+ response.generatedVideos().ifPresent(
+ videos -> {
+ System.out.println("Generated " + videos.size() + " videos.");
+ Video video = videos.get(0).video().orElse(null);
+ // Do something with the generated video
+ }
+ );
+ }
+ );
+ }
+}
+```
+
+
## Versioning
This library follows [Semantic Versioning](http://semver.org/).
diff --git a/examples/pom.xml b/examples/pom.xml
index aa3685b77c3..38ee1a3ecfc 100644
--- a/examples/pom.xml
+++ b/examples/pom.xml
@@ -5,7 +5,7 @@
com.google.genai.examplesgoogle-genai-examples
- 1.11.0-SNAPSHOT
+ 1.26.0-SNAPSHOTgoogle-genai-examples
@@ -13,7 +13,7 @@
1.81.8
- 1.11.0-SNAPSHOT
+ 1.26.0-SNAPSHOT
diff --git a/examples/src/main/java/com/google/genai/examples/BatchInlinedRequests.java b/examples/src/main/java/com/google/genai/examples/BatchInlinedRequests.java
new file mode 100644
index 00000000000..7e2eee4f361
--- /dev/null
+++ b/examples/src/main/java/com/google/genai/examples/BatchInlinedRequests.java
@@ -0,0 +1,112 @@
+/*
+ * Copyright 2025 Google LLC
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * https://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+
+/**
+ * Usage:
+ *
+ *
1a. If you are using Vertex AI, setup ADC to get credentials:
+ * https://cloud.google.com/docs/authentication/provide-credentials-adc#google-idp
+ *
+ *
Then set Project, Location, and USE_VERTEXAI flag as environment variables:
+ *
+ *
1b. If you are using Gemini Developer API, set an API key environment variable. You can find a
+ * list of available API keys here: https://aistudio.google.com/app/apikey
+ *
+ *
export GOOGLE_API_KEY=YOUR_API_KEY
+ *
+ *
2. Compile the java package and run the sample code.
+ *
+ *
mvn clean compile
+ *
+ *
mvn exec:java -Dexec.mainClass="com.google.genai.examples.BatchInlinedRequests"
+ * -Dexec.args="YOUR_MODEL_ID"
+ */
+package com.google.genai.examples;
+
+import com.google.common.collect.ImmutableList;
+import com.google.genai.Client;
+import com.google.genai.types.BatchJob;
+import com.google.genai.types.BatchJobSource;
+import com.google.genai.types.Content;
+import com.google.genai.types.CreateBatchJobConfig;
+import com.google.genai.types.GenerateContentConfig;
+import com.google.genai.types.InlinedRequest;
+import com.google.genai.types.Part;
+
+/** An example of creating a batch job with inlined requests. */
+public final class BatchInlinedRequests {
+
+ public static void main(String[] args) {
+ // Instantiate the client. The client by default uses the Gemini Developer API. It gets the API
+ // key from the environment variable `GOOGLE_API_KEY`. Vertex AI API can be used by setting the
+ // environment variables `GOOGLE_CLOUD_LOCATION` and `GOOGLE_CLOUD_PROJECT`, as well as setting
+ // `GOOGLE_GENAI_USE_VERTEXAI` to "true".
+ Client client = new Client();
+
+ if (client.vertexAI()) {
+ System.out.println("Inlined requests are not supported for Vertex AI backend.");
+ return;
+ } else {
+ System.out.println("Calling GeminiAPI Backend...");
+ }
+
+ InlinedRequest request1 =
+ InlinedRequest.builder()
+ .contents(Content.builder().parts(Part.fromText("Tell me a one-sentence joke.")))
+ .config(
+ GenerateContentConfig.builder()
+ .systemInstruction(
+ Content.builder()
+ .parts(
+ Part.fromText(
+ "You are a funny comedian. Always respond with humor and"
+ + " wit.")))
+ .temperature(0.5f))
+ .build();
+
+ InlinedRequest request2 =
+ InlinedRequest.builder()
+ .contents(Content.builder().parts(Part.fromText("Why is the sky blue?")))
+ .config(
+ GenerateContentConfig.builder()
+ .systemInstruction(
+ Content.builder()
+ .parts(
+ Part.fromText(
+ "You are a helpful science teacher. Explain complex concepts in"
+ + " simple terms.")))
+ .temperature(0.5f))
+ .build();
+
+ BatchJobSource batchJobSource =
+ BatchJobSource.builder().inlinedRequests(ImmutableList.of(request1, request2)).build();
+
+ CreateBatchJobConfig config =
+ CreateBatchJobConfig.builder().displayName("inlined-requests-job-1").build();
+
+ BatchJob batchJob =
+ client.batches.create(Constants.GEMINI_MODEL_NAME, batchJobSource, config);
+
+ System.out.println("Created batch job: " + batchJob.name().get());
+ }
+}
diff --git a/examples/src/main/java/com/google/genai/examples/BatchManagement.java b/examples/src/main/java/com/google/genai/examples/BatchManagement.java
index d0050df6712..f200a1ee6d6 100644
--- a/examples/src/main/java/com/google/genai/examples/BatchManagement.java
+++ b/examples/src/main/java/com/google/genai/examples/BatchManagement.java
@@ -56,6 +56,12 @@
public final class BatchManagement {
public static void main(String[] args) {
+ final String modelId;
+ if (args.length != 0) {
+ modelId = args[0];
+ } else {
+ modelId = Constants.GEMINI_MODEL_NAME;
+ }
// Instantiate the client. The client by default uses the Gemini Developer API. It gets the API
// key from the environment variable `GOOGLE_API_KEY`. Vertex AI API can be used by setting the
@@ -68,10 +74,6 @@ public static void main(String[] args) {
Client client = new Client();
if (client.vertexAI()) {
- String modelId = "gemini-1.5-flash-002";
- if (args.length != 0) {
- modelId = args[0];
- }
System.out.println("Using Vertex AI");
// Create a batch job.
BatchJobSource batchJobSource =
@@ -97,10 +99,6 @@ public static void main(String[] args) {
System.out.println("Cancelled batch job: " + batchJob1.name().get());
} else {
System.out.println("Using Gemini Developer API");
- String modelId = "gemini-2.0-flash";
- if (args.length != 0) {
- modelId = args[0];
- }
// Create a batch job.
BatchJobSource batchJobSource =
BatchJobSource.builder()
diff --git a/examples/src/main/java/com/google/genai/examples/BatchManagementAsync.java b/examples/src/main/java/com/google/genai/examples/BatchManagementAsync.java
index c1fcca542c5..df00b9b3d15 100644
--- a/examples/src/main/java/com/google/genai/examples/BatchManagementAsync.java
+++ b/examples/src/main/java/com/google/genai/examples/BatchManagementAsync.java
@@ -58,6 +58,12 @@
public final class BatchManagementAsync {
public static void main(String[] args) {
+ final String modelId;
+ if (args.length != 0) {
+ modelId = args[0];
+ } else {
+ modelId = Constants.GEMINI_MODEL_NAME;
+ }
// Instantiate the client. The client by default uses the Gemini Developer API. It gets the API
// key from the environment variable `GOOGLE_API_KEY`. Vertex AI API can be used by setting the
@@ -70,10 +76,6 @@ public static void main(String[] args) {
Client client = new Client();
if (client.vertexAI()) {
- String modelId = "gemini-1.5-flash-002";
- if (args.length != 0) {
- modelId = args[0];
- }
System.out.println("Using Vertex AI");
// Create a batch job.
BatchJobSource batchJobSource =
@@ -123,10 +125,6 @@ public static void main(String[] args) {
System.out.println("All batch job operations completed.");
} else {
System.out.println("Using Gemini Developer API");
- String modelId = "gemini-2.0-flash";
- if (args.length != 0) {
- modelId = args[0];
- }
// Create a batch job.
BatchJobSource batchJobSource =
BatchJobSource.builder()
diff --git a/examples/src/main/java/com/google/genai/examples/CachedContentOperations.java b/examples/src/main/java/com/google/genai/examples/CachedContentOperations.java
index bcd6d30b997..ad37e1ffe31 100644
--- a/examples/src/main/java/com/google/genai/examples/CachedContentOperations.java
+++ b/examples/src/main/java/com/google/genai/examples/CachedContentOperations.java
@@ -50,7 +50,6 @@
import com.google.genai.types.ListCachedContentsConfig;
import com.google.genai.types.Part;
import com.google.genai.types.UpdateCachedContentConfig;
-
import java.io.IOException;
import java.io.InputStream;
import java.net.URL;
@@ -63,9 +62,11 @@
public final class CachedContentOperations {
public static void main(String[] args) {
- String modelId = "gemini-2.0-flash-001";
+ final String modelId;
if (args.length != 0) {
modelId = args[0];
+ } else {
+ modelId = Constants.GEMINI_MODEL_NAME;
}
// Instantiate the client. The client by default uses the Gemini Developer API. It gets the API
diff --git a/examples/src/main/java/com/google/genai/examples/CachedContentOperationsAsync.java b/examples/src/main/java/com/google/genai/examples/CachedContentOperationsAsync.java
index 223c927a64a..62f16eda543 100644
--- a/examples/src/main/java/com/google/genai/examples/CachedContentOperationsAsync.java
+++ b/examples/src/main/java/com/google/genai/examples/CachedContentOperationsAsync.java
@@ -66,11 +66,11 @@
public final class CachedContentOperationsAsync {
public static void main(String[] args) {
- String modelId;
+ final String modelId;
if (args.length != 0) {
modelId = args[0];
} else {
- modelId = "gemini-2.0-flash-001";
+ modelId = Constants.GEMINI_MODEL_NAME;
}
// Instantiate the client. The client by default uses the Gemini Developer API. It gets the API
diff --git a/examples/src/main/java/com/google/genai/examples/ChatWithFunctionCall.java b/examples/src/main/java/com/google/genai/examples/ChatWithFunctionCall.java
index 584a4c1e76a..618d6acb360 100644
--- a/examples/src/main/java/com/google/genai/examples/ChatWithFunctionCall.java
+++ b/examples/src/main/java/com/google/genai/examples/ChatWithFunctionCall.java
@@ -60,9 +60,11 @@ public static Integer divideTwoIntegers(int numerator, int denominator) {
}
public static void main(String[] args) throws NoSuchMethodException {
- String modelId = "gemini-2.0-flash-001";
+ final String modelId;
if (args.length != 0) {
modelId = args[0];
+ } else {
+ modelId = Constants.GEMINI_MODEL_NAME;
}
// Instantiate the client. The client by default uses the Gemini Developer API. It gets the API
diff --git a/examples/src/main/java/com/google/genai/examples/ChatWithHistory.java b/examples/src/main/java/com/google/genai/examples/ChatWithHistory.java
index 83a2b4b8af0..f79b0a38db6 100644
--- a/examples/src/main/java/com/google/genai/examples/ChatWithHistory.java
+++ b/examples/src/main/java/com/google/genai/examples/ChatWithHistory.java
@@ -49,9 +49,11 @@
/** An example of using the Unified Gen AI Java SDK to create a chat session with history. */
public final class ChatWithHistory {
public static void main(String[] args) {
- String modelId = "gemini-2.0-flash-001";
+ final String modelId;
if (args.length != 0) {
modelId = args[0];
+ } else {
+ modelId = Constants.GEMINI_MODEL_NAME;
}
// Instantiate the client. The client by default uses the Gemini Developer API. It gets the API
diff --git a/examples/src/main/java/com/google/genai/examples/ChatWithHistoryAsync.java b/examples/src/main/java/com/google/genai/examples/ChatWithHistoryAsync.java
index aa9da312e55..24c5b17a71a 100644
--- a/examples/src/main/java/com/google/genai/examples/ChatWithHistoryAsync.java
+++ b/examples/src/main/java/com/google/genai/examples/ChatWithHistoryAsync.java
@@ -50,9 +50,11 @@
/** An example of using the Unified Gen AI Java SDK to create an async chat session with history. */
public final class ChatWithHistoryAsync {
public static void main(String[] args) {
- String modelId = "gemini-2.0-flash-001";
+ final String modelId;
if (args.length != 0) {
modelId = args[0];
+ } else {
+ modelId = Constants.GEMINI_MODEL_NAME;
}
// Instantiate the client. The client by default uses the Gemini Developer API. It gets the API
diff --git a/examples/src/main/java/com/google/genai/examples/ChatWithHistoryAsyncStreaming.java b/examples/src/main/java/com/google/genai/examples/ChatWithHistoryAsyncStreaming.java
index 89112f774fb..427c3840b80 100644
--- a/examples/src/main/java/com/google/genai/examples/ChatWithHistoryAsyncStreaming.java
+++ b/examples/src/main/java/com/google/genai/examples/ChatWithHistoryAsyncStreaming.java
@@ -54,9 +54,11 @@
*/
public final class ChatWithHistoryAsyncStreaming {
public static void main(String[] args) {
- String modelId = "gemini-2.0-flash-001";
+ final String modelId;
if (args.length != 0) {
modelId = args[0];
+ } else {
+ modelId = Constants.GEMINI_MODEL_NAME;
}
// Instantiate the client. The client by default uses the Gemini Developer API. It gets the API
diff --git a/examples/src/main/java/com/google/genai/examples/ChatWithHistoryStreaming.java b/examples/src/main/java/com/google/genai/examples/ChatWithHistoryStreaming.java
index c7c69e7c40b..54b9be2f62f 100644
--- a/examples/src/main/java/com/google/genai/examples/ChatWithHistoryStreaming.java
+++ b/examples/src/main/java/com/google/genai/examples/ChatWithHistoryStreaming.java
@@ -52,9 +52,11 @@
*/
public final class ChatWithHistoryStreaming {
public static void main(String[] args) {
- String modelId = "gemini-2.0-flash-001";
+ final String modelId;
if (args.length != 0) {
modelId = args[0];
+ } else {
+ modelId = Constants.GEMINI_MODEL_NAME;
}
// Instantiate the client. The client by default uses the Gemini Developer API. It gets the API
diff --git a/examples/src/main/java/com/google/genai/examples/ComputeTokens.java b/examples/src/main/java/com/google/genai/examples/ComputeTokens.java
index 38b2d373026..e819816b19e 100644
--- a/examples/src/main/java/com/google/genai/examples/ComputeTokens.java
+++ b/examples/src/main/java/com/google/genai/examples/ComputeTokens.java
@@ -46,9 +46,11 @@
/** An example of using the Unified Gen AI Java SDK to compute tokens for simple text input. */
public final class ComputeTokens {
public static void main(String[] args) {
- String modelId = "gemini-2.0-flash-001";
+ final String modelId;
if (args.length != 0) {
modelId = args[0];
+ } else {
+ modelId = Constants.GEMINI_MODEL_NAME;
}
// Instantiate the client. The client by default uses the Gemini Developer API. It gets the API
@@ -64,7 +66,8 @@ public static void main(String[] args) {
if (client.vertexAI()) {
System.out.println("Using Vertex AI");
} else {
- System.out.println("Using Gemini Developer API");
+ System.out.println("Gemini Developer API is not supported for this example.");
+ System.exit(0);
}
ComputeTokensResponse response =
diff --git a/examples/src/main/java/com/google/genai/examples/Constants.java b/examples/src/main/java/com/google/genai/examples/Constants.java
new file mode 100644
index 00000000000..ff7d0cc67a4
--- /dev/null
+++ b/examples/src/main/java/com/google/genai/examples/Constants.java
@@ -0,0 +1,63 @@
+/*
+ * Copyright 2025 Google LLC
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * https://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.google.genai.examples;
+
+/** A final class to hold constants shared across all examples. */
+public final class Constants {
+
+ private Constants() {}
+
+ /** The name of the generative model to be used in the examples. */
+ public static final String GEMINI_MODEL_NAME = "gemini-2.5-flash";
+
+ /** The name of the live model to be used in the examples. */
+ public static final String GEMINI_LIVE_MODEL_NAME = "gemini-live-2.5-flash";
+
+ /** The name of the preview live model to be used in the examples. */
+ public static final String GEMINI_LIVE_MODEL_NAME_PREVIEW = "gemini-live-2.5-flash-preview";
+
+ /** The name of the image generation model to be used in the examples. */
+ public static final String GEMINI_IMAGE_GENERATION_MODEL_NAME =
+ "gemini-2.0-flash-preview-image-generation";
+
+ /** The name of the Imagen generate model to be used in the examples. */
+ public static final String IMAGEN_GENERATE_MODEL_NAME = "imagen-4.0-generate-001";
+
+ /** The name of the Imagen model to be used for image editing in the examples. */
+ public static final String IMAGEN_CAPABILITY_MODEL_NAME = "imagen-3.0-capability-001";
+
+ /** The name of the Imagen ingredients model to be used in the examples. */
+ public static final String IMAGEN_INGREDIENTS_MODEL_NAME = "imagen-4.0-ingredients-preview";
+
+ /** The name of the Imagen product recontext model to be used in the examples. */
+ public static final String IMAGEN_RECONTEXT_MODEL_NAME = "imagen-product-recontext-preview-06-30";
+
+ /** The name of the Virtual try-on model to be used in the examples. */
+ public static final String VIRTUAL_TRY_ON_MODEL_NAME = "virtual-try-on-preview-08-04";
+
+ /** The name of the segment image model to be used in the examples. */
+ public static final String SEGMENT_IMAGE_MODEL_NAME = "image-segmentation-001";
+
+ /** The name of the Veo model to be used in the examples. */
+ public static final String VEO_MODEL_NAME = "veo-2.0-generate-001";
+
+ /** The name of the embedding model to be used in the examples. */
+ public static final String EMBEDDING_MODEL_NAME = "text-embedding-004";
+
+ /** The file path to be used in the files operations examples. */
+ public static final String UPLOAD_FILE_PATH = "./resources/test.txt";
+}
diff --git a/examples/src/main/java/com/google/genai/examples/CountTokens.java b/examples/src/main/java/com/google/genai/examples/CountTokens.java
index 5a6cfa28bb5..1e80883700c 100644
--- a/examples/src/main/java/com/google/genai/examples/CountTokens.java
+++ b/examples/src/main/java/com/google/genai/examples/CountTokens.java
@@ -46,9 +46,11 @@
/** An example of using the Unified Gen AI Java SDK to count tokens for simple text input. */
public final class CountTokens {
public static void main(String[] args) {
- String modelId = "gemini-2.0-flash-001";
+ final String modelId;
if (args.length != 0) {
modelId = args[0];
+ } else {
+ modelId = Constants.GEMINI_MODEL_NAME;
}
// Instantiate the client. The client by default uses the Gemini Developer API. It gets the API
diff --git a/examples/src/main/java/com/google/genai/examples/CountTokensWithConfigs.java b/examples/src/main/java/com/google/genai/examples/CountTokensWithConfigs.java
index 770dbcbcbcd..1ac426d41c3 100644
--- a/examples/src/main/java/com/google/genai/examples/CountTokensWithConfigs.java
+++ b/examples/src/main/java/com/google/genai/examples/CountTokensWithConfigs.java
@@ -54,9 +54,11 @@
*/
public final class CountTokensWithConfigs {
public static void main(String[] args) {
- String modelId = "gemini-2.0-flash-001";
+ final String modelId;
if (args.length != 0) {
modelId = args[0];
+ } else {
+ modelId = Constants.GEMINI_MODEL_NAME;
}
// Instantiate the client. The client by default uses the Gemini Developer API. It gets the API
@@ -72,7 +74,10 @@ public static void main(String[] args) {
if (client.vertexAI()) {
System.out.println("Using Vertex AI");
} else {
- System.out.println("Using Gemini Developer API");
+ System.out.println(
+ "Gemini Developer API is not supported for this example since system instruction is not"
+ + " supported.");
+ System.exit(0);
}
// Sets the system instruction in the config.
diff --git a/examples/src/main/java/com/google/genai/examples/EditImageAsync.java b/examples/src/main/java/com/google/genai/examples/EditImageAsync.java
index c1d9a0f6168..0761645158d 100644
--- a/examples/src/main/java/com/google/genai/examples/EditImageAsync.java
+++ b/examples/src/main/java/com/google/genai/examples/EditImageAsync.java
@@ -56,9 +56,11 @@
/** An example of using the Unified Gen AI Java SDK to edit an image asynchronously. */
public final class EditImageAsync {
public static void main(String[] args) {
- String modelId = "imagen-3.0-capability-001";
+ final String modelId;
if (args.length != 0) {
modelId = args[0];
+ } else {
+ modelId = Constants.IMAGEN_CAPABILITY_MODEL_NAME;
}
// Instantiate the client. The client by default uses the Gemini Developer API. It gets the API
@@ -74,7 +76,8 @@ public static void main(String[] args) {
if (client.vertexAI()) {
System.out.println("Using Vertex AI");
} else {
- System.out.println("Using Gemini Developer API");
+ System.out.println("Gemini Developer API is not supported for this example.");
+ System.exit(0);
}
// Base image created using generateImages with prompt:
diff --git a/examples/src/main/java/com/google/genai/examples/EditImageContentReference.java b/examples/src/main/java/com/google/genai/examples/EditImageContentReference.java
new file mode 100644
index 00000000000..2a66cfdc520
--- /dev/null
+++ b/examples/src/main/java/com/google/genai/examples/EditImageContentReference.java
@@ -0,0 +1,114 @@
+/*
+ * Copyright 2025 Google LLC
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * https://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * Usage:
+ *
+ *
1a. If you are using Vertex AI, setup ADC to get credentials:
+ * https://cloud.google.com/docs/authentication/provide-credentials-adc#google-idp
+ *
+ *
Then set Project, Location, and USE_VERTEXAI flag as environment variables:
+ *
+ *
1b. If you are using Gemini Developer API, set an API key environment variable. You can find a
+ * list of available API keys here: https://aistudio.google.com/app/apikey
+ *
+ *
export GOOGLE_API_KEY=YOUR_API_KEY
+ *
+ *
2. Compile the java package and run the sample code.
+ *
+ *
mvn clean compile
+ *
+ *
mvn exec:java -Dexec.mainClass="com.google.genai.examples.EditImageContentReference"
+ * -Dexec.args="YOUR_MODEL_ID"
+ */
+package com.google.genai.examples;
+
+import com.google.genai.Client;
+import com.google.genai.types.ContentReferenceImage;
+import com.google.genai.types.EditImageConfig;
+import com.google.genai.types.EditImageResponse;
+import com.google.genai.types.Image;
+import com.google.genai.types.ReferenceImage;
+import com.google.genai.types.StyleReferenceConfig;
+import com.google.genai.types.StyleReferenceImage;
+import java.util.ArrayList;
+
+/** An example of using the Unified Gen AI Java SDK to edit an image (Mask reference). */
+public final class EditImageContentReference {
+ public static void main(String[] args) {
+ final String modelId;
+ if (args.length != 0) {
+ modelId = args[0];
+ } else {
+ modelId = Constants.IMAGEN_INGREDIENTS_MODEL_NAME;
+ }
+
+ // Instantiate the client. The client by default uses the Gemini Developer API. It gets the API
+ // key from the environment variable `GOOGLE_API_KEY`. Vertex AI API can be used by setting the
+ // environment variables `GOOGLE_CLOUD_LOCATION` and `GOOGLE_CLOUD_PROJECT`, as well as setting
+ // `GOOGLE_GENAI_USE_VERTEXAI` to "true".
+ //
+ // Note: Some services are only available in a specific API backend (Gemini or Vertex), you will
+ // get a `UnsupportedOperationException` if you try to use a service that is not available in
+ // the backend you are using.
+ Client client = new Client();
+
+ if (client.vertexAI()) {
+ System.out.println("Using Vertex AI");
+ } else {
+ System.out.println("Gemini Developer API is not supported for this example.");
+ System.exit(0);
+ }
+
+ EditImageConfig editImageConfig =
+ EditImageConfig.builder().numberOfImages(1).outputMimeType("image/jpeg").build();
+
+ ArrayList referenceImages = new ArrayList<>();
+ Image dogImage = Image.builder().gcsUri("gs://genai-sdk-tests/inputs/images/dog.jpg").build();
+ ContentReferenceImage contentReferenceImage =
+ ContentReferenceImage.builder().referenceImage(dogImage).referenceId(1).build();
+ referenceImages.add(contentReferenceImage);
+
+ Image cyberpunkImage =
+ Image.builder().gcsUri("gs://genai-sdk-tests/inputs/images/cyberpunk.jpg").build();
+ StyleReferenceImage styleReferenceImage =
+ StyleReferenceImage.builder()
+ .referenceId(2)
+ .referenceImage(cyberpunkImage)
+ .config(StyleReferenceConfig.builder().styleDescription("cyberpunk style").build())
+ .build();
+ referenceImages.add(styleReferenceImage);
+
+ EditImageResponse editImageResponse =
+ client.models.editImage(
+ modelId,
+ "Dog in [1] sleeping on the ground at the bottom of the image with the cyberpunk city"
+ + " landscape in [2] in the background visible on the side of the mug.",
+ referenceImages,
+ editImageConfig);
+
+ Image editedImage = editImageResponse.generatedImages().get().get(0).image().get();
+ // Do something with editedImage.
+ }
+
+ private EditImageContentReference() {}
+}
diff --git a/examples/src/main/java/com/google/genai/examples/EditImageControlReference.java b/examples/src/main/java/com/google/genai/examples/EditImageControlReference.java
index 7955580d4c4..81842b18500 100644
--- a/examples/src/main/java/com/google/genai/examples/EditImageControlReference.java
+++ b/examples/src/main/java/com/google/genai/examples/EditImageControlReference.java
@@ -55,9 +55,11 @@
/** An example of using the Unified Gen AI Java SDK to edit an image (Control reference). */
public final class EditImageControlReference {
public static void main(String[] args) {
- String modelId = "imagen-3.0-capability-001";
+ final String modelId;
if (args.length != 0) {
modelId = args[0];
+ } else {
+ modelId = Constants.IMAGEN_CAPABILITY_MODEL_NAME;
}
// Instantiate the client. The client by default uses the Gemini Developer API. It gets the API
@@ -73,7 +75,8 @@ public static void main(String[] args) {
if (client.vertexAI()) {
System.out.println("Using Vertex AI");
} else {
- System.out.println("Using Gemini Developer API");
+ System.out.println("Gemini Developer API is not supported for this example.");
+ System.exit(0);
}
// Base image created using generateImages with prompt:
diff --git a/examples/src/main/java/com/google/genai/examples/EditImageMaskReference.java b/examples/src/main/java/com/google/genai/examples/EditImageMaskReference.java
index 3b832a4ff60..e0044276578 100644
--- a/examples/src/main/java/com/google/genai/examples/EditImageMaskReference.java
+++ b/examples/src/main/java/com/google/genai/examples/EditImageMaskReference.java
@@ -57,9 +57,11 @@
/** An example of using the Unified Gen AI Java SDK to edit an image (Mask reference). */
public final class EditImageMaskReference {
public static void main(String[] args) {
- String modelId = "imagen-3.0-capability-001";
+ final String modelId;
if (args.length != 0) {
modelId = args[0];
+ } else {
+ modelId = Constants.IMAGEN_CAPABILITY_MODEL_NAME;
}
// Instantiate the client. The client by default uses the Gemini Developer API. It gets the API
@@ -75,7 +77,8 @@ public static void main(String[] args) {
if (client.vertexAI()) {
System.out.println("Using Vertex AI");
} else {
- System.out.println("Using Gemini Developer API");
+ System.out.println("Gemini Developer API is not supported for this example.");
+ System.exit(0);
}
// Base image created using generateImages with prompt:
diff --git a/examples/src/main/java/com/google/genai/examples/EditImageStyleTransfer.java b/examples/src/main/java/com/google/genai/examples/EditImageStyleTransfer.java
index 021e5702495..0e69cf8754b 100644
--- a/examples/src/main/java/com/google/genai/examples/EditImageStyleTransfer.java
+++ b/examples/src/main/java/com/google/genai/examples/EditImageStyleTransfer.java
@@ -54,9 +54,11 @@
/** An example of using the Unified Gen AI Java SDK to edit an image (Style transfer). */
public final class EditImageStyleTransfer {
public static void main(String[] args) {
- String modelId = "imagen-3.0-capability-001";
+ final String modelId;
if (args.length != 0) {
modelId = args[0];
+ } else {
+ modelId = Constants.IMAGEN_CAPABILITY_MODEL_NAME;
}
// Instantiate the client. The client by default uses the Gemini Developer API. It gets the API
@@ -72,7 +74,8 @@ public static void main(String[] args) {
if (client.vertexAI()) {
System.out.println("Using Vertex AI");
} else {
- System.out.println("Using Gemini Developer API");
+ System.out.println("Gemini Developer API is not supported for this example.");
+ System.exit(0);
}
// Base image created using generateImages with prompt:
diff --git a/examples/src/main/java/com/google/genai/examples/EditImageSubjectReference.java b/examples/src/main/java/com/google/genai/examples/EditImageSubjectReference.java
index 897dcc85635..0ba00a64f2f 100644
--- a/examples/src/main/java/com/google/genai/examples/EditImageSubjectReference.java
+++ b/examples/src/main/java/com/google/genai/examples/EditImageSubjectReference.java
@@ -55,9 +55,11 @@
/** An example of using the Unified Gen AI Java SDK to edit an image (Subject reference). */
public final class EditImageSubjectReference {
public static void main(String[] args) {
- String modelId = "imagen-3.0-capability-001";
+ final String modelId;
if (args.length != 0) {
modelId = args[0];
+ } else {
+ modelId = Constants.IMAGEN_CAPABILITY_MODEL_NAME;
}
// Instantiate the client. The client by default uses the Gemini Developer API. It gets the API
@@ -73,7 +75,8 @@ public static void main(String[] args) {
if (client.vertexAI()) {
System.out.println("Using Vertex AI");
} else {
- System.out.println("Using Gemini Developer API");
+ System.out.println("Gemini Developer API is not supported for this example.");
+ System.exit(0);
}
// Base image created using generateImages with prompt:
diff --git a/examples/src/main/java/com/google/genai/examples/EmbedContent.java b/examples/src/main/java/com/google/genai/examples/EmbedContent.java
index 2886f90929a..77de9bc09aa 100644
--- a/examples/src/main/java/com/google/genai/examples/EmbedContent.java
+++ b/examples/src/main/java/com/google/genai/examples/EmbedContent.java
@@ -46,9 +46,11 @@
/** An example of using the Unified Gen AI Java SDK to embed content. */
public final class EmbedContent {
public static void main(String[] args) {
- String modelId = "text-embedding-004";
+ final String modelId;
if (args.length != 0) {
modelId = args[0];
+ } else {
+ modelId = Constants.EMBEDDING_MODEL_NAME;
}
// Instantiate the client. The client by default uses the Gemini Developer API. It gets the API
diff --git a/examples/src/main/java/com/google/genai/examples/EmbedContentAsync.java b/examples/src/main/java/com/google/genai/examples/EmbedContentAsync.java
index 538f017c19a..731ec561a06 100644
--- a/examples/src/main/java/com/google/genai/examples/EmbedContentAsync.java
+++ b/examples/src/main/java/com/google/genai/examples/EmbedContentAsync.java
@@ -47,9 +47,11 @@
/** An example of using the Unified Gen AI Java SDK to embed content asynchronously. */
public final class EmbedContentAsync {
public static void main(String[] args) {
- String modelId = "text-embedding-004";
+ final String modelId;
if (args.length != 0) {
modelId = args[0];
+ } else {
+ modelId = Constants.EMBEDDING_MODEL_NAME;
}
// Instantiate the client. The client by default uses the Gemini Developer API. It gets the API
diff --git a/examples/src/main/java/com/google/genai/examples/EmbedContentWithConfig.java b/examples/src/main/java/com/google/genai/examples/EmbedContentWithConfig.java
index 1ffbd18f123..09770d6512d 100644
--- a/examples/src/main/java/com/google/genai/examples/EmbedContentWithConfig.java
+++ b/examples/src/main/java/com/google/genai/examples/EmbedContentWithConfig.java
@@ -50,9 +50,11 @@
/** An example of using the Unified Gen AI Java SDK to embed content with extra config. */
public final class EmbedContentWithConfig {
public static void main(String[] args) {
- String modelId = "text-embedding-004";
+ final String modelId;
if (args.length != 0) {
modelId = args[0];
+ } else {
+ modelId = Constants.EMBEDDING_MODEL_NAME;
}
// Instantiate the client. The client by default uses the Gemini Developer API. It gets the API
diff --git a/examples/src/main/java/com/google/genai/examples/FileOperations.java b/examples/src/main/java/com/google/genai/examples/FileOperations.java
index 3dc7eae5771..0a54cb0b1a4 100644
--- a/examples/src/main/java/com/google/genai/examples/FileOperations.java
+++ b/examples/src/main/java/com/google/genai/examples/FileOperations.java
@@ -38,7 +38,7 @@
*
mvn clean compile
*
*
mvn exec:java -Dexec.mainClass="com.google.genai.examples.FileOperations"
- * -Dexec.args="./resources/test.txt"
+ * -Dexec.args="path/to/file"
*/
package com.google.genai.examples;
@@ -52,12 +52,12 @@
/** An example of how to use the Files module to upload, retrieve, and delete files. */
public final class FileOperations {
public static void main(String[] args) {
-
- if (args.length == 0) {
- System.out.println("Please provide a file path on the -Dexec.args argument.");
- return;
+ final String filePath;
+ if (args.length != 0) {
+ filePath = args[0];
+ } else {
+ filePath = Constants.UPLOAD_FILE_PATH;
}
- String filePath = args[0];
// Instantiate the client. The client by default uses the Gemini Developer API. It gets the API
// key from the environment variable `GOOGLE_API_KEY`. Vertex AI API can be used by setting the
@@ -70,7 +70,8 @@ public static void main(String[] args) {
Client client = new Client();
if (client.vertexAI()) {
- System.out.println("Using Vertex AI");
+ System.out.println("Vertex AI API is not supported for this example.");
+ System.exit(0);
} else {
System.out.println("Using Gemini Developer API");
}
diff --git a/examples/src/main/java/com/google/genai/examples/FileOperationsAsync.java b/examples/src/main/java/com/google/genai/examples/FileOperationsAsync.java
index 6fc53aba435..f35c7ed057a 100644
--- a/examples/src/main/java/com/google/genai/examples/FileOperationsAsync.java
+++ b/examples/src/main/java/com/google/genai/examples/FileOperationsAsync.java
@@ -56,12 +56,12 @@
*/
public final class FileOperationsAsync {
public static void main(String[] args) {
-
- if (args.length == 0) {
- System.out.println("Please provide a file path on the -Dexec.args argument.");
- return;
+ final String filePath;
+ if (args.length != 0) {
+ filePath = args[0];
+ } else {
+ filePath = Constants.UPLOAD_FILE_PATH;
}
- String filePath = args[0];
// Instantiate the client. The client by default uses the Gemini Developer API. It gets the API
// key from the environment variable `GOOGLE_API_KEY`. Vertex AI API can be used by setting the
@@ -74,7 +74,8 @@ public static void main(String[] args) {
Client client = new Client();
if (client.vertexAI()) {
- System.out.println("Using Vertex AI");
+ System.out.println("Vertex AI API is not supported for this example.");
+ System.exit(0);
} else {
System.out.println("Using Gemini Developer API");
}
diff --git a/examples/src/main/java/com/google/genai/examples/GenerateContent.java b/examples/src/main/java/com/google/genai/examples/GenerateContent.java
index 2fd163e0155..f89c986fd14 100644
--- a/examples/src/main/java/com/google/genai/examples/GenerateContent.java
+++ b/examples/src/main/java/com/google/genai/examples/GenerateContent.java
@@ -46,9 +46,11 @@
/** An example of using the Unified Gen AI Java SDK to generate content. */
public final class GenerateContent {
public static void main(String[] args) {
- String modelId = "gemini-2.0-flash-001";
+ final String modelId;
if (args.length != 0) {
modelId = args[0];
+ } else {
+ modelId = Constants.GEMINI_MODEL_NAME;
}
// Instantiate the client. The client by default uses the Gemini Developer API. It gets the API
diff --git a/examples/src/main/java/com/google/genai/examples/GenerateContentAsync.java b/examples/src/main/java/com/google/genai/examples/GenerateContentAsync.java
index 7f180d9197e..655c99ce3f8 100644
--- a/examples/src/main/java/com/google/genai/examples/GenerateContentAsync.java
+++ b/examples/src/main/java/com/google/genai/examples/GenerateContentAsync.java
@@ -47,9 +47,11 @@
/** An example of using the Unified Gen AI Java SDK to generate content asynchronously. */
public final class GenerateContentAsync {
public static void main(String[] args) {
- String modelId = "gemini-2.0-flash-001";
+ final String modelId;
if (args.length != 0) {
modelId = args[0];
+ } else {
+ modelId = Constants.GEMINI_MODEL_NAME;
}
// Instantiate the client. The client by default uses the Gemini Developer API. It gets the API
diff --git a/examples/src/main/java/com/google/genai/examples/GenerateContentStream.java b/examples/src/main/java/com/google/genai/examples/GenerateContentStream.java
index da242a2924a..df8c1ff72c7 100644
--- a/examples/src/main/java/com/google/genai/examples/GenerateContentStream.java
+++ b/examples/src/main/java/com/google/genai/examples/GenerateContentStream.java
@@ -47,9 +47,11 @@
/** An example of using the Unified GenAI Java SDK to generate stream of content. */
public final class GenerateContentStream {
public static void main(String[] args) {
- String modelId = "gemini-2.0-flash-001";
+ final String modelId;
if (args.length != 0) {
modelId = args[0];
+ } else {
+ modelId = Constants.GEMINI_MODEL_NAME;
}
// Instantiate the client. The client by default uses the Gemini Developer API. It gets the API
diff --git a/examples/src/main/java/com/google/genai/examples/GenerateContentWithConfigs.java b/examples/src/main/java/com/google/genai/examples/GenerateContentWithConfigs.java
index 38833c0de22..770bbeb5751 100644
--- a/examples/src/main/java/com/google/genai/examples/GenerateContentWithConfigs.java
+++ b/examples/src/main/java/com/google/genai/examples/GenerateContentWithConfigs.java
@@ -57,9 +57,11 @@
/** An example of using the Unified Gen AI Java SDK to generate content with extra configs. */
public final class GenerateContentWithConfigs {
public static void main(String[] args) {
- String modelId = "gemini-2.0-flash-001";
+ final String modelId;
if (args.length != 0) {
modelId = args[0];
+ } else {
+ modelId = Constants.GEMINI_MODEL_NAME;
}
// Instantiate the client. The client by default uses the Gemini Developer API. It gets the API
diff --git a/examples/src/main/java/com/google/genai/examples/GenerateContentWithFunctionCall.java b/examples/src/main/java/com/google/genai/examples/GenerateContentWithFunctionCall.java
index dc6d4b3e7d4..de4514766a4 100644
--- a/examples/src/main/java/com/google/genai/examples/GenerateContentWithFunctionCall.java
+++ b/examples/src/main/java/com/google/genai/examples/GenerateContentWithFunctionCall.java
@@ -56,14 +56,16 @@ public static String getCurrentWeather(String location, String unit) {
}
/** A callable function to divide two integers. */
- public static Integer divideTwoIntegers(Integer numerator, Integer denominator) {
+ public static Integer divideTwoIntegers(int numerator, int denominator) {
return numerator / denominator;
}
public static void main(String[] args) throws NoSuchMethodException {
- String modelId = "gemini-2.0-flash-001";
+ final String modelId;
if (args.length != 0) {
modelId = args[0];
+ } else {
+ modelId = Constants.GEMINI_MODEL_NAME;
}
// Instantiate the client. The client by default uses the Gemini Developer API. It gets the API
@@ -82,12 +84,13 @@ public static void main(String[] args) throws NoSuchMethodException {
System.out.println("Using Gemini Developer API");
}
+ // Load the two methods as reflected Method objects so that they can be automatically executed
+ // on the client side.
Method method1 =
GenerateContentWithFunctionCall.class.getMethod(
"getCurrentWeather", String.class, String.class);
Method method2 =
- GenerateContentWithFunctionCall.class.getMethod(
- "divideTwoIntegers", Integer.class, Integer.class);
+ GenerateContentWithFunctionCall.class.getMethod("divideTwoIntegers", int.class, int.class);
// Add the two methods as callable functions to the list of tools.
GenerateContentConfig config =
diff --git a/examples/src/main/java/com/google/genai/examples/GenerateContentWithFunctionCallAsync.java b/examples/src/main/java/com/google/genai/examples/GenerateContentWithFunctionCallAsync.java
new file mode 100644
index 00000000000..66f87fcfae2
--- /dev/null
+++ b/examples/src/main/java/com/google/genai/examples/GenerateContentWithFunctionCallAsync.java
@@ -0,0 +1,125 @@
+/*
+ * Copyright 2025 Google LLC
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * https://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * Usage:
+ *
+ *
1a. If you are using Vertex AI, setup ADC to get credentials:
+ * https://cloud.google.com/docs/authentication/provide-credentials-adc#google-idp
+ *
+ *
Then set Project, Location, and USE_VERTEXAI flag as environment variables:
+ *
+ *
1b. If you are using Gemini Developer API, set an API key environment variable. You can find a
+ * list of available API keys here: https://aistudio.google.com/app/apikey
+ *
+ *
export GOOGLE_API_KEY=YOUR_API_KEY
+ *
+ *
2. Compile the java package and run the sample code.
+ *
+ *
mvn clean compile
+ *
+ *
mvn exec:java
+ * -Dexec.mainClass="com.google.genai.examples.GenerateContentWithFunctionCallAsync"
+ * -Dexec.args="YOUR_MODEL_ID"
+ */
+package com.google.genai.examples;
+
+import com.google.genai.Client;
+import com.google.genai.types.GenerateContentConfig;
+import com.google.genai.types.GenerateContentResponse;
+import com.google.genai.types.Tool;
+import java.lang.reflect.Method;
+import java.util.concurrent.CompletableFuture;
+import java.util.concurrent.ExecutionException;
+
+/**
+ * An example of using the Unified Gen AI Java SDK to generate content with (automatic) function
+ * calling asynchronously.
+ */
+public final class GenerateContentWithFunctionCallAsync {
+ /** A callable function to get the weather. */
+ public static String getCurrentWeather(String location, String unit) {
+ return "The weather in " + location + " is " + "very nice.";
+ }
+
+ /** A callable function to divide two integers. */
+ public static Integer divideTwoIntegers(int numerator, int denominator) {
+ return numerator / denominator;
+ }
+
+ public static void main(String[] args) throws NoSuchMethodException, InterruptedException {
+ final String modelId;
+ if (args.length != 0) {
+ modelId = args[0];
+ } else {
+ modelId = Constants.GEMINI_MODEL_NAME;
+ }
+
+ // Instantiate the client. The client by default uses the Gemini Developer API. It gets the API
+ // key from the environment variable `GOOGLE_API_KEY`. Vertex AI API can be used by setting the
+ // environment variables `GOOGLE_CLOUD_LOCATION` and `GOOGLE_CLOUD_PROJECT`, as well as setting
+ // `GOOGLE_GENAI_USE_VERTEXAI` to "true".
+ //
+ // Note: Some services are only available in a specific API backend (Gemini or Vertex), you will
+ // get a `UnsupportedOperationException` if you try to use a service that is not available in
+ // the backend you are using.
+ Client client = new Client();
+
+ if (client.vertexAI()) {
+ System.out.println("Using Vertex AI");
+ } else {
+ System.out.println("Using Gemini Developer API");
+ }
+
+ // Load the two methods as reflected Method objects so that they can be automatically executed
+ // on the client side.
+ Method method1 =
+ GenerateContentWithFunctionCall.class.getMethod(
+ "getCurrentWeather", String.class, String.class);
+ Method method2 =
+ GenerateContentWithFunctionCall.class.getMethod("divideTwoIntegers", int.class, int.class);
+
+ // Add the two methods as callable functions to the list of tools.
+ GenerateContentConfig config =
+ GenerateContentConfig.builder().tools(Tool.builder().functions(method1, method2)).build();
+
+ // --- Asynchronous Call ---
+ CompletableFuture future =
+ client.async.models.generateContent(
+ modelId, "What is the weather in Vancouver? And can you divide 10 by 0?", config);
+
+ try {
+ GenerateContentResponse response = future.get();
+
+ System.out.println("The response is: " + response.text());
+ System.out.println(
+ "The automatic function calling history is: "
+ + response.automaticFunctionCallingHistory().get());
+
+ } catch (ExecutionException e) {
+ // This shows how to handle errors in the async call.
+ System.err.println("Error during execution: " + e.getCause());
+ }
+ }
+
+ private GenerateContentWithFunctionCallAsync() {}
+}
diff --git a/examples/src/main/java/com/google/genai/examples/GenerateContentWithFunctionCallJson.java b/examples/src/main/java/com/google/genai/examples/GenerateContentWithFunctionCallJson.java
index a162b920fdf..6dd91f1c7bf 100644
--- a/examples/src/main/java/com/google/genai/examples/GenerateContentWithFunctionCallJson.java
+++ b/examples/src/main/java/com/google/genai/examples/GenerateContentWithFunctionCallJson.java
@@ -45,19 +45,20 @@
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.genai.Client;
+import com.google.genai.types.FunctionDeclaration;
import com.google.genai.types.GenerateContentConfig;
import com.google.genai.types.GenerateContentResponse;
import com.google.genai.types.Tool;
-import com.google.genai.types.FunctionDeclaration;
-
/** An example of using the Unified Gen AI Java SDK to generate content with function calling. */
public final class GenerateContentWithFunctionCallJson {
/** A callable function to get the weather. */
public static void main(String[] args) {
- String modelId = "gemini-2.0-flash-001";
+ final String modelId;
if (args.length != 0) {
modelId = args[0];
+ } else {
+ modelId = Constants.GEMINI_MODEL_NAME;
}
// Instantiate the client. The client by default uses the Gemini Developer API. It gets the API
@@ -76,28 +77,34 @@ public static void main(String[] args) {
System.out.println("Using Gemini Developer API");
}
- ImmutableMap schema =
+ // Define the schema for the function declaration, in Json format.
+ ImmutableMap parametersSchema =
ImmutableMap.of(
"type", "object",
"properties", ImmutableMap.of("location", ImmutableMap.of("type", "string")),
"required", ImmutableList.of("location"));
+ ImmutableMap responseSchema =
+ ImmutableMap.of(
+ "type", "object",
+ "properties", ImmutableMap.of("weather", ImmutableMap.of("type", "string")),
+ "required", ImmutableList.of("weather"));
+
+ // Define the tool with the function declaration.
Tool toolWithFunctionDeclarations =
Tool.builder()
.functionDeclarations(
- ImmutableList.of(
- FunctionDeclaration.builder()
- .name("get_weather")
- .description("Returns the weather in a given location.")
- .parametersJsonSchema(schema)
- .build()))
+ FunctionDeclaration.builder()
+ .name("get_weather")
+ .description("Returns the weather in a given location.")
+ .parametersJsonSchema(parametersSchema)
+ .responseJsonSchema(responseSchema)
+ .build())
.build();
- // Add the two methods as callable functions to the list of tools.
+ // Add the tool to the GenerateContentConfig.
GenerateContentConfig config =
- GenerateContentConfig.builder()
- .tools(ImmutableList.of(toolWithFunctionDeclarations))
- .build();
+ GenerateContentConfig.builder().tools(toolWithFunctionDeclarations).build();
GenerateContentResponse response =
client.models.generateContent(modelId, "What is the weather in Vancouver?", config);
diff --git a/examples/src/main/java/com/google/genai/examples/GenerateContentWithFunctionCallJsonString.java b/examples/src/main/java/com/google/genai/examples/GenerateContentWithFunctionCallJsonString.java
new file mode 100644
index 00000000000..774fc540039
--- /dev/null
+++ b/examples/src/main/java/com/google/genai/examples/GenerateContentWithFunctionCallJsonString.java
@@ -0,0 +1,133 @@
+/*
+ * Copyright 2025 Google LLC
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * https://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * Usage:
+ *
+ *
1a. If you are using Vertex AI, setup ADC to get credentials:
+ * https://cloud.google.com/docs/authentication/provide-credentials-adc#google-idp
+ *
+ *
Then set Project, Location, and USE_VERTEXAI flag as environment variables:
+ *
+ *
1b. If you are using Gemini Developer API, set an API key environment variable. You can find a
+ * list of available API keys here: https://aistudio.google.com/app/apikey
+ *
+ *
export GOOGLE_API_KEY=YOUR_API_KEY
+ *
+ *
2. Compile the java package and run the sample code.
+ *
+ *
mvn clean compile
+ *
+ *
mvn exec:java -Dexec.mainClass="com.google.genai.examples.GenerateContentWithFunctionCallJsonString"
+ * -Dexec.args="YOUR_MODEL_ID"
+ */
+package com.google.genai.examples;
+
+import com.google.genai.Client;
+import com.google.genai.JsonSerializable;
+import com.google.genai.types.FunctionDeclaration;
+import com.google.genai.types.GenerateContentConfig;
+import com.google.genai.types.GenerateContentResponse;
+import com.google.genai.types.Tool;
+
+/** An example of using the Unified Gen AI Java SDK to generate content with function calling. */
+public final class GenerateContentWithFunctionCallJsonString {
+ /** A callable function to get the weather. */
+ public static void main(String[] args) {
+ final String modelId;
+ if (args.length != 0) {
+ modelId = args[0];
+ } else {
+ modelId = Constants.GEMINI_MODEL_NAME;
+ }
+
+ // Instantiate the client. The client by default uses the Gemini Developer API. It gets the API
+ // key from the environment variable `GOOGLE_API_KEY`. Vertex AI API can be used by setting the
+ // environment variables `GOOGLE_CLOUD_LOCATION` and `GOOGLE_CLOUD_PROJECT`, as well as setting
+ // `GOOGLE_GENAI_USE_VERTEXAI` to "true".
+ //
+ // Note: Some services are only available in a specific API backend (Gemini or Vertex), you will
+ // get a `UnsupportedOperationException` if you try to use a service that is not available in
+ // the backend you are using.
+ Client client = new Client();
+
+ if (client.vertexAI()) {
+ System.out.println("Using Vertex AI");
+ } else {
+ System.out.println("Using Gemini Developer API");
+ }
+
+ // Define the schema for the function declaration, in Json format. Note if you have java 15 or
+ // above, you can use the following string block instead:
+ // String parametersSchemaString =
+ // """{
+ // "type": "object",
+ // "properties": {
+ // "location": {
+ // "type": "string"
+ // }
+ // },
+ // "required": [
+ // "location"
+ // ]
+ // }""";
+ // String responseSchemaString =
+ // """{
+ // "type": "object",
+ // "properties": {
+ // "weather": {
+ // "type": "string"
+ // }
+ // },
+ // "required": [
+ // "weather"
+ // ]
+ // }""";
+ String parametersSchemaString =
+ "{\"type\":\"object\",\"properties\":{\"location\":{\"type\":\"string\"}},\"required\":[\"location\"]}";
+ String responseSchemaString =
+ "{\"type\":\"object\",\"properties\":{\"weather\":{\"type\":\"string\"}},\"required\":[\"weather\"]}";
+
+ // Define the tool with the function declaration.
+ Tool toolWithFunctionDeclarations =
+ Tool.builder()
+ .functionDeclarations(
+ FunctionDeclaration.builder()
+ .name("get_weather")
+ .description("Returns the weather in a given location.")
+ .parametersJsonSchema(JsonSerializable.stringToJsonNode(parametersSchemaString))
+ .responseJsonSchema(JsonSerializable.stringToJsonNode(responseSchemaString))
+ .build())
+ .build();
+
+ // Add the tool to the GenerateContentConfig.
+ GenerateContentConfig config =
+ GenerateContentConfig.builder().tools(toolWithFunctionDeclarations).build();
+
+ GenerateContentResponse response =
+ client.models.generateContent(modelId, "What is the weather in Vancouver?", config);
+
+ System.out.println("The response is: " + response.functionCalls());
+ }
+
+ private GenerateContentWithFunctionCallJsonString() {}
+}
diff --git a/examples/src/main/java/com/google/genai/examples/GenerateContentWithHttpOptions.java b/examples/src/main/java/com/google/genai/examples/GenerateContentWithHttpOptions.java
new file mode 100644
index 00000000000..b07847e1c3e
--- /dev/null
+++ b/examples/src/main/java/com/google/genai/examples/GenerateContentWithHttpOptions.java
@@ -0,0 +1,84 @@
+/*
+ * Copyright 2025 Google LLC
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * https://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * Usage:
+ *
+ *
1a. If you are using Vertex AI, setup ADC to get credentials:
+ * https://cloud.google.com/docs/authentication/provide-credentials-adc#google-idp
+ *
+ *
Then set Project, Location, and USE_VERTEXAI flag as environment variables:
+ *
+ *
1b. If you are using Gemini Developer API, set an API key environment variable. You can find a
+ * list of available API keys here: https://aistudio.google.com/app/apikey
+ *
+ *
export GOOGLE_API_KEY=YOUR_API_KEY
+ *
+ *
2. Compile the java package and run the sample code.
+ *
+ *
mvn clean compile
+ *
+ *
mvn exec:java -Dexec.mainClass="com.google.genai.examples.GenerateContentWithHttpOptions"
+ * -Dexec.args="YOUR_MODEL_ID"
+ */
+package com.google.genai.examples;
+
+import com.google.genai.Client;
+import com.google.genai.types.GenerateContentResponse;
+import com.google.genai.types.HttpOptions;
+import com.google.genai.types.HttpRetryOptions;
+
+/** An example of setting http options in a GenerateContent request. */
+public final class GenerateContentWithHttpOptions {
+ public static void main(String[] args) {
+ final String modelId;
+ if (args.length != 0) {
+ modelId = args[0];
+ } else {
+ modelId = Constants.GEMINI_MODEL_NAME;
+ }
+
+ // Set the client level http options when creating the client. All the API requests will share
+ // the same http options.
+ HttpOptions httpOptions =
+ HttpOptions.builder()
+ .apiVersion("v1")
+ .timeout(5000)
+ .retryOptions(HttpRetryOptions.builder().attempts(3).httpStatusCodes(408, 429, 504))
+ .build();
+
+ Client client = Client.builder().httpOptions(httpOptions).build();
+
+ if (client.vertexAI()) {
+ System.out.println("Using Vertex AI");
+ } else {
+ System.out.println("Using Gemini Developer API");
+ }
+
+ GenerateContentResponse response =
+ client.models.generateContent(modelId, "Tell me the history of LLM in 100 words", null);
+
+ System.out.println("Response: " + response.text());
+ }
+
+ private GenerateContentWithHttpOptions() {}
+}
diff --git a/examples/src/main/java/com/google/genai/examples/GenerateContentWithImageInput.java b/examples/src/main/java/com/google/genai/examples/GenerateContentWithImageInput.java
index 997737cb60d..316dbb1ccbd 100644
--- a/examples/src/main/java/com/google/genai/examples/GenerateContentWithImageInput.java
+++ b/examples/src/main/java/com/google/genai/examples/GenerateContentWithImageInput.java
@@ -50,9 +50,11 @@
/** An example of using the Unified Gen AI Java SDK to generate content with image input. */
public final class GenerateContentWithImageInput {
public static void main(String[] args) {
- String modelId = "gemini-2.0-flash-001";
+ final String modelId;
if (args.length != 0) {
modelId = args[0];
+ } else {
+ modelId = Constants.GEMINI_MODEL_NAME;
}
// Instantiate the client. The client by default uses the Gemini Developer API. It gets the API
@@ -68,9 +70,10 @@ public static void main(String[] args) {
if (client.vertexAI()) {
System.out.println("Using Vertex AI");
} else {
- throw new IllegalArgumentException(
+ System.out.println(
"This example is not supported for Gemini Developer API since the image uri from GCS is"
+ " only supported in Vertex AI.");
+ System.exit(0);
}
Content content =
diff --git a/examples/src/main/java/com/google/genai/examples/GenerateContentWithResponseJsonSchema.java b/examples/src/main/java/com/google/genai/examples/GenerateContentWithResponseJsonSchema.java
index bd3882be126..b6ec9707d3f 100644
--- a/examples/src/main/java/com/google/genai/examples/GenerateContentWithResponseJsonSchema.java
+++ b/examples/src/main/java/com/google/genai/examples/GenerateContentWithResponseJsonSchema.java
@@ -37,7 +37,8 @@
*
*
mvn exec:java
+ * -Dexec.mainClass="com.google.genai.examples.GenerateContentWithResponseJsonSchema"
* -Dexec.args="YOUR_MODEL_ID"
*/
package com.google.genai.examples;
@@ -54,9 +55,11 @@
*/
public final class GenerateContentWithResponseJsonSchema {
public static void main(String[] args) {
- String modelId = "gemini-2.0-flash-001";
+ final String modelId;
if (args.length != 0) {
modelId = args[0];
+ } else {
+ modelId = Constants.GEMINI_MODEL_NAME;
}
// Instantiate the client. The client by default uses the Gemini Developer API. It gets the API
diff --git a/examples/src/main/java/com/google/genai/examples/GenerateContentWithResponseJsonSchemaString.java b/examples/src/main/java/com/google/genai/examples/GenerateContentWithResponseJsonSchemaString.java
new file mode 100644
index 00000000000..8cb4e6a4d5b
--- /dev/null
+++ b/examples/src/main/java/com/google/genai/examples/GenerateContentWithResponseJsonSchemaString.java
@@ -0,0 +1,115 @@
+/*
+ * Copyright 2025 Google LLC
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * https://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * Usage:
+ *
+ *
1a. If you are using Vertex AI, setup ADC to get credentials:
+ * https://cloud.google.com/docs/authentication/provide-credentials-adc#google-idp
+ *
+ *
Then set Project, Location, and USE_VERTEXAI flag as environment variables:
+ *
+ *
1b. If you are using Gemini Developer API, set an API key environment variable. You can find a
+ * list of available API keys here: https://aistudio.google.com/app/apikey
+ *
+ *
export GOOGLE_API_KEY=YOUR_API_KEY
+ *
+ *
2. Compile the java package and run the sample code.
+ *
+ *
mvn clean compile
+ *
+ *
mvn exec:java
+ * -Dexec.mainClass="com.google.genai.examples.GenerateContentWithResponseJsonSchemaString"
+ * -Dexec.args="YOUR_MODEL_ID"
+ */
+package com.google.genai.examples;
+
+import com.google.genai.Client;
+import com.google.genai.JsonSerializable;
+import com.google.genai.types.GenerateContentConfig;
+import com.google.genai.types.GenerateContentResponse;
+
+/**
+ * GenerateContentWithResponseJsonSchema generates a content and returns a json object by passing a
+ * schema.
+ */
+public final class GenerateContentWithResponseJsonSchemaString {
+ public static void main(String[] args) {
+ final String modelId;
+ if (args.length != 0) {
+ modelId = args[0];
+ } else {
+ modelId = Constants.GEMINI_MODEL_NAME;
+ }
+
+ // Instantiate the client. The client by default uses the Gemini Developer API. It gets the API
+ // key from the environment variable `GOOGLE_API_KEY`. Vertex AI API can be used by setting the
+ // environment variables `GOOGLE_CLOUD_LOCATION` and `GOOGLE_CLOUD_PROJECT`, as well as setting
+ // `GOOGLE_GENAI_USE_VERTEXAI` to "true".
+ //
+ // Note: Some services are only available in a specific API backend (Gemini or Vertex), you will
+ // get a `UnsupportedOperationException` if you try to use a service that is not available in
+ // the backend you are using.
+ Client client = new Client();
+
+ if (client.vertexAI()) {
+ System.out.println("Using Vertex AI");
+ } else {
+ System.out.println("Using Gemini Developer API");
+ }
+ // Note if you have java 15 or above, you can use the following string block instead:
+ // String schema = """{
+ // "type": "object",
+ // "properties": {
+ // "recipe_name": {
+ // "type": "string"
+ // },
+ // "ingredients": {
+ // "type": "array",
+ // "items": {
+ // "type": "string"
+ // }
+ // }
+ // },
+ // "required": [
+ // "recipe_name",
+ // "ingredients"
+ // ]
+ // }""";
+ String schema =
+ "{\"type\":\"object\",\"properties\":{\"recipe_name\":{\"type\":\"string\"},\"ingredients\":{\"type\":\"array\",\"items\":{\"type\":\"string\"}}},\"required\":[\"recipe_name\",\"ingredients\"]}";
+
+ GenerateContentConfig config =
+ GenerateContentConfig.builder()
+ .responseMimeType("application/json")
+ .candidateCount(1)
+ .responseJsonSchema(JsonSerializable.stringToJsonNode(schema))
+ .build();
+
+ GenerateContentResponse response =
+ client.models.generateContent(modelId, "List a few popular cookie recipes.", config);
+
+ System.out.println("Response: " + response.text());
+ }
+
+ private GenerateContentWithResponseJsonSchemaString() {}
+}
diff --git a/examples/src/main/java/com/google/genai/examples/GenerateContentWithResponseModality.java b/examples/src/main/java/com/google/genai/examples/GenerateContentWithResponseModality.java
index c6422966202..55c5e1d08c3 100644
--- a/examples/src/main/java/com/google/genai/examples/GenerateContentWithResponseModality.java
+++ b/examples/src/main/java/com/google/genai/examples/GenerateContentWithResponseModality.java
@@ -49,9 +49,11 @@
/** An example of using the Unified Gen AI Java SDK to generate content with response modality. */
public final class GenerateContentWithResponseModality {
public static void main(String[] args) {
- String modelId = "gemini-2.0-flash-preview-image-generation";
+ final String modelId;
if (args.length != 0) {
modelId = args[0];
+ } else {
+ modelId = Constants.GEMINI_IMAGE_GENERATION_MODEL_NAME;
}
// Instantiate the client. The client by default uses the Gemini Developer API. It gets the API
diff --git a/examples/src/main/java/com/google/genai/examples/GenerateContentWithResponseSchema.java b/examples/src/main/java/com/google/genai/examples/GenerateContentWithResponseSchema.java
index ce9e53e41e4..ea44ba2c6d2 100644
--- a/examples/src/main/java/com/google/genai/examples/GenerateContentWithResponseSchema.java
+++ b/examples/src/main/java/com/google/genai/examples/GenerateContentWithResponseSchema.java
@@ -55,9 +55,11 @@
*/
public final class GenerateContentWithResponseSchema {
public static void main(String[] args) {
- String modelId = "gemini-2.0-flash-001";
+ final String modelId;
if (args.length != 0) {
modelId = args[0];
+ } else {
+ modelId = Constants.GEMINI_MODEL_NAME;
}
// Instantiate the client. The client by default uses the Gemini Developer API. It gets the API
diff --git a/examples/src/main/java/com/google/genai/examples/GenerateImages.java b/examples/src/main/java/com/google/genai/examples/GenerateImages.java
index c146985827e..45ed006c24f 100644
--- a/examples/src/main/java/com/google/genai/examples/GenerateImages.java
+++ b/examples/src/main/java/com/google/genai/examples/GenerateImages.java
@@ -48,9 +48,11 @@
/** An example of using the Unified Gen AI Java SDK to generate images. */
public final class GenerateImages {
public static void main(String[] args) {
- String modelId = "imagen-3.0-generate-002";
+ final String modelId;
if (args.length != 0) {
modelId = args[0];
+ } else {
+ modelId = Constants.IMAGEN_GENERATE_MODEL_NAME;
}
// Instantiate the client. The client by default uses the Gemini Developer API. It gets the API
diff --git a/examples/src/main/java/com/google/genai/examples/GenerateImagesAsync.java b/examples/src/main/java/com/google/genai/examples/GenerateImagesAsync.java
index b9ceb3bcc43..2556b5e472a 100644
--- a/examples/src/main/java/com/google/genai/examples/GenerateImagesAsync.java
+++ b/examples/src/main/java/com/google/genai/examples/GenerateImagesAsync.java
@@ -49,9 +49,11 @@
/** An example of using the Unified Gen AI Java SDK to generate images asynchronously. */
public final class GenerateImagesAsync {
public static void main(String[] args) {
- String modelId = "imagen-3.0-generate-002";
+ final String modelId;
if (args.length != 0) {
modelId = args[0];
+ } else {
+ modelId = Constants.IMAGEN_GENERATE_MODEL_NAME;
}
// Instantiate the client. The client by default uses the Gemini Developer API. It gets the API
diff --git a/examples/src/main/java/com/google/genai/examples/GenerateVideos.java b/examples/src/main/java/com/google/genai/examples/GenerateVideos.java
index 690ee862abd..85a793364bf 100644
--- a/examples/src/main/java/com/google/genai/examples/GenerateVideos.java
+++ b/examples/src/main/java/com/google/genai/examples/GenerateVideos.java
@@ -44,14 +44,17 @@
import com.google.genai.errors.GenAiIOException;
import com.google.genai.types.GenerateVideosConfig;
import com.google.genai.types.GenerateVideosOperation;
+import com.google.genai.types.GenerateVideosSource;
import com.google.genai.types.Video;
/** An example of using the Unified Gen AI Java SDK to generate videos. */
public final class GenerateVideos {
public static void main(String[] args) {
- String modelId = "veo-2.0-generate-001";
+ final String modelId;
if (args.length != 0) {
modelId = args[0];
+ } else {
+ modelId = Constants.VEO_MODEL_NAME;
}
// Instantiate the client. The client by default uses the Gemini Developer API. It gets the API
@@ -77,17 +80,19 @@ public static void main(String[] args) {
generateVideosConfigBuilder.outputGcsUri("gs://genai-sdk-tests/tmp/videos");
}
GenerateVideosConfig generateVideosConfig = generateVideosConfigBuilder.build();
+ GenerateVideosSource generateVideosSource =
+ GenerateVideosSource.builder()
+ .prompt("A neon hologram of a cat driving at top speed")
+ .build();
GenerateVideosOperation generateVideosOperation =
- client.models.generateVideos(
- modelId, "A neon hologram of a cat driving at top speed", null, generateVideosConfig);
+ client.models.generateVideos(modelId, generateVideosSource, generateVideosConfig);
// GenerateVideosOperation.done() is empty if the operation is not done.
while (!generateVideosOperation.done().filter(Boolean::booleanValue).isPresent()) {
try {
Thread.sleep(10000); // Sleep for 10 seconds.
- generateVideosOperation =
- client.operations.getVideosOperation(generateVideosOperation, null);
+ generateVideosOperation = client.operations.get(generateVideosOperation, null);
System.out.println("Waiting for operation to complete...");
} catch (InterruptedException e) {
System.out.println("Thread was interrupted while sleeping.");
diff --git a/examples/src/main/java/com/google/genai/examples/GenerateVideosAsync.java b/examples/src/main/java/com/google/genai/examples/GenerateVideosAsync.java
index 5f97af5c0d3..29b0a96be49 100644
--- a/examples/src/main/java/com/google/genai/examples/GenerateVideosAsync.java
+++ b/examples/src/main/java/com/google/genai/examples/GenerateVideosAsync.java
@@ -43,6 +43,7 @@
import com.google.genai.Client;
import com.google.genai.types.GenerateVideosConfig;
import com.google.genai.types.GenerateVideosOperation;
+import com.google.genai.types.GenerateVideosSource;
import com.google.genai.types.Video;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.ExecutionException;
@@ -50,9 +51,11 @@
/** An example of using the Unified Gen AI Java SDK to generate images asynchronously. */
public final class GenerateVideosAsync {
public static void main(String[] args) {
- String modelId = "veo-2.0-generate-001";
+ final String modelId;
if (args.length != 0) {
modelId = args[0];
+ } else {
+ modelId = Constants.VEO_MODEL_NAME;
}
// Instantiate the client. The client by default uses the Gemini Developer API. It gets the API
@@ -78,10 +81,13 @@ public static void main(String[] args) {
generateVideosConfigBuilder.outputGcsUri("gs://genai-sdk-tests/tmp/videos");
}
GenerateVideosConfig generateVideosConfig = generateVideosConfigBuilder.build();
+ GenerateVideosSource generateVideosSource =
+ GenerateVideosSource.builder()
+ .prompt("A neon hologram of a cat driving at top speed")
+ .build();
CompletableFuture generateVideosOperationFuture =
- client.async.models.generateVideos(
- modelId, "A neon hologram of a cat driving at top speed", null, generateVideosConfig);
+ client.async.models.generateVideos(modelId, generateVideosSource, generateVideosConfig);
generateVideosOperationFuture
.thenAccept(
@@ -92,7 +98,7 @@ public static void main(String[] args) {
try {
Thread.sleep(10000); // Sleep for 10 seconds.
try {
- operation = client.async.operations.getVideosOperation(operation, null).get();
+ operation = client.async.operations.get(operation, null).get();
} catch (ExecutionException e) {
throw new RuntimeException(e);
}
@@ -109,7 +115,7 @@ public static void main(String[] args) {
Video generatedVideo =
operation.response().get().generatedVideos().get().get(0).video().get();
- // Do something with the video.
+ System.out.println("Video URL: " + generatedVideo.uri().get());
})
.join();
}
diff --git a/examples/src/main/java/com/google/genai/examples/GenerateVideosEditOutpaint.java b/examples/src/main/java/com/google/genai/examples/GenerateVideosEditOutpaint.java
new file mode 100644
index 00000000000..3acfafc6e2f
--- /dev/null
+++ b/examples/src/main/java/com/google/genai/examples/GenerateVideosEditOutpaint.java
@@ -0,0 +1,133 @@
+/*
+ * Copyright 2025 Google LLC
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * https://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * Usage:
+ *
+ *
1a. If you are using Vertex AI, setup ADC to get credentials:
+ * https://cloud.google.com/docs/authentication/provide-credentials-adc#google-idp
+ *
+ *
Then set Project, Location, and USE_VERTEXAI flag as environment variables:
+ *
+ *
1b. If you are using Gemini Developer API, set an API key environment variable. You can find a
+ * list of available API keys here: https://aistudio.google.com/app/apikey
+ *
+ *
export GOOGLE_API_KEY=YOUR_API_KEY
+ *
+ *
2. Compile the java package and run the sample code.
+ *
+ *
mvn clean compile exec:java
+ * -Dexec.mainClass="com.google.genai.examples.GenerateVideosEditOutpaint"
+ * -Dexec.args="YOUR_MODEL_ID"
+ */
+package com.google.genai.examples;
+
+import com.google.genai.Client;
+import com.google.genai.types.GenerateVideosConfig;
+import com.google.genai.types.GenerateVideosOperation;
+import com.google.genai.types.GenerateVideosSource;
+import com.google.genai.types.Image;
+import com.google.genai.types.Video;
+import com.google.genai.types.VideoGenerationMask;
+import com.google.genai.types.VideoGenerationMaskMode;
+
+/** An example of using the Unified Gen AI Java SDK to edit a video with outpaint mode. */
+public final class GenerateVideosEditOutpaint {
+ public static void main(String[] args) {
+ final String modelId;
+ if (args.length != 0) {
+ modelId = args[0];
+ } else {
+ modelId = "veo-2.0-generate-exp"; // Only supported on experimental model currently.
+ }
+
+ // Instantiate the client. The client by default uses the Gemini Developer API. It gets the API
+ // key from the environment variable `GOOGLE_API_KEY`. Vertex AI API can be used by setting the
+ // environment variables `GOOGLE_CLOUD_LOCATION` and `GOOGLE_CLOUD_PROJECT`, as well as setting
+ // `GOOGLE_GENAI_USE_VERTEXAI` to "true".
+ //
+ // Note: Some services are only available in a specific API backend (Gemini or Vertex), you will
+ // get a `UnsupportedOperationException` if you try to use a service that is not available in
+ // the backend you are using.
+ Client client = new Client();
+
+ if (client.vertexAI()) {
+ System.out.println("Using Vertex AI");
+ } else {
+ System.out.println("Gemini Developer API is not supported for this example.");
+ System.exit(0);
+ }
+
+ VideoGenerationMask videoGenerationMask =
+ VideoGenerationMask.builder()
+ .image(
+ Image.builder()
+ .gcsUri("gs://genai-sdk-tests/inputs/videos/video_outpaint_mask.png")
+ .mimeType("image/png")
+ .build())
+ .maskMode(VideoGenerationMaskMode.Known.OUTPAINT)
+ .build();
+
+ GenerateVideosConfig generateVideosConfig =
+ GenerateVideosConfig.builder()
+ .numberOfVideos(1)
+ .outputGcsUri("gs://genai-sdk-tests/tmp/videos")
+ .aspectRatio("16:9")
+ .mask(videoGenerationMask)
+ .build();
+
+ GenerateVideosSource generateVideosSource =
+ GenerateVideosSource.builder()
+ .prompt("A neon hologram of a cat driving at top speed")
+ .video(
+ Video.builder()
+ .uri("gs://genai-sdk-tests/inputs/videos/editing_demo.mp4")
+ .mimeType("video/mp4")
+ .build())
+ .build();
+
+ GenerateVideosOperation generateVideosOperation =
+ client.models.generateVideos(modelId, generateVideosSource, generateVideosConfig);
+
+ // GenerateVideosOperation.done() is empty if the operation is not done.
+ while (!generateVideosOperation.done().filter(Boolean::booleanValue).isPresent()) {
+ try {
+ Thread.sleep(10000); // Sleep for 10 seconds.
+ generateVideosOperation =
+ client.operations.getVideosOperation(generateVideosOperation, null);
+ System.out.println("Waiting for operation to complete...");
+ } catch (InterruptedException e) {
+ System.out.println("Thread was interrupted while sleeping.");
+ Thread.currentThread().interrupt();
+ }
+ }
+ System.out.println(
+ "Generated "
+ + generateVideosOperation.response().get().generatedVideos().get().size()
+ + " video(s).");
+
+ Video generatedVideo =
+ generateVideosOperation.response().get().generatedVideos().get().get(0).video().get();
+ }
+
+ private GenerateVideosEditOutpaint() {}
+}
diff --git a/examples/src/main/java/com/google/genai/examples/GenerateVideosExtension.java b/examples/src/main/java/com/google/genai/examples/GenerateVideosExtension.java
new file mode 100644
index 00000000000..ca1befb012f
--- /dev/null
+++ b/examples/src/main/java/com/google/genai/examples/GenerateVideosExtension.java
@@ -0,0 +1,157 @@
+/*
+ * Copyright 2025 Google LLC
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * https://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * Usage:
+ *
+ *
1a. If you are using Vertex AI, setup ADC to get credentials:
+ * https://cloud.google.com/docs/authentication/provide-credentials-adc#google-idp
+ *
+ *
Then set Project, Location, and USE_VERTEXAI flag as environment variables:
+ *
+ *
1b. If you are using Gemini Developer API, set an API key environment variable. You can find a
+ * list of available API keys here: https://aistudio.google.com/app/apikey
+ *
+ *
export GOOGLE_API_KEY=YOUR_API_KEY
+ *
+ *
2. Compile the java package and run the sample code.
+ *
+ *
mvn clean compile exec:java -Dexec.mainClass="com.google.genai.examples.GenerateVideos"
+ * -Dexec.args="YOUR_MODEL_ID"
+ */
+package com.google.genai.examples;
+
+import com.google.genai.Client;
+import com.google.genai.errors.GenAiIOException;
+import com.google.genai.types.GenerateVideosConfig;
+import com.google.genai.types.GenerateVideosOperation;
+import com.google.genai.types.GenerateVideosSource;
+import com.google.genai.types.Video;
+
+/** An example of using the Unified Gen AI Java SDK to generate videos. */
+public final class GenerateVideosExtension {
+ public static void main(String[] args) {
+ final String modelId;
+ if (args.length != 0) {
+ modelId = args[0];
+ } else {
+ modelId = "veo-3-exp";
+ }
+
+ // Instantiate the client. The client by default uses the Gemini Developer API. It gets the API
+ // key from the environment variable `GOOGLE_API_KEY`. Vertex AI API can be used by setting the
+ // environment variables `GOOGLE_CLOUD_LOCATION` and `GOOGLE_CLOUD_PROJECT`, as well as setting
+ // `GOOGLE_GENAI_USE_VERTEXAI` to "true".
+ //
+ // Note: Some services are only available in a specific API backend (Gemini or Vertex), you will
+ // get a `UnsupportedOperationException` if you try to use a service that is not available in
+ // the backend you are using.
+ Client client = new Client();
+
+ if (client.vertexAI()) {
+ System.out.println("Sample is only available for Gemini Developer API.");
+ return;
+ } else {
+ System.out.println("Using Gemini Developer API");
+ }
+
+ // Generate first video.
+ GenerateVideosConfig generateVideosConfig =
+ GenerateVideosConfig.builder().numberOfVideos(1).build();
+ GenerateVideosSource generateVideosSource =
+ GenerateVideosSource.builder()
+ .prompt("A neon hologram of a cat driving at top speed")
+ .build();
+
+ GenerateVideosOperation generateVideosOperation1 =
+ client.models.generateVideos(modelId, generateVideosSource, generateVideosConfig);
+
+ // GenerateVideosOperation.done() is empty if the operation is not done.
+ while (!generateVideosOperation1.done().filter(Boolean::booleanValue).isPresent()) {
+ try {
+ Thread.sleep(10000); // Sleep for 10 seconds.
+ generateVideosOperation1 =
+ client.operations.getVideosOperation(generateVideosOperation1, null);
+ System.out.println("Waiting for operation to complete...");
+ } catch (InterruptedException e) {
+ System.out.println("Thread was interrupted while sleeping.");
+ Thread.currentThread().interrupt();
+ }
+ }
+ System.out.println(
+ "Generated "
+ + generateVideosOperation1.response().get().generatedVideos().get().size()
+ + " video(s).");
+
+ Video generatedVideo1 =
+ generateVideosOperation1.response().get().generatedVideos().get().get(0).video().get();
+
+ if (!client.vertexAI()) {
+ try {
+ client.files.download(generatedVideo1, "video.mp4", null);
+ System.out.println("Downloaded video to video.mp4");
+ } catch (GenAiIOException e) {
+ System.out.println("An error occurred while downloading the video: " + e.getMessage());
+ }
+ }
+
+ // Extend the generated video.
+ GenerateVideosConfig generateVideosConfig2 =
+ GenerateVideosConfig.builder().numberOfVideos(1).build();
+ GenerateVideosSource generateVideosSource2 =
+ GenerateVideosSource.builder().prompt("Rain").video(generatedVideo1).build();
+
+ GenerateVideosOperation generateVideosOperation2 =
+ client.models.generateVideos(modelId, generateVideosSource2, generateVideosConfig2);
+
+ // GenerateVideosOperation.done() is empty if the operation is not done.
+ while (!generateVideosOperation2.done().filter(Boolean::booleanValue).isPresent()) {
+ try {
+ Thread.sleep(10000); // Sleep for 10 seconds.
+ generateVideosOperation2 =
+ client.operations.getVideosOperation(generateVideosOperation2, null);
+ System.out.println("Waiting for operation to complete...");
+ } catch (InterruptedException e) {
+ System.out.println("Thread was interrupted while sleeping.");
+ Thread.currentThread().interrupt();
+ }
+ }
+ System.out.println(
+ "Generated "
+ + generateVideosOperation2.response().get().generatedVideos().get().size()
+ + " video(s).");
+
+ Video generatedVideo2 =
+ generateVideosOperation2.response().get().generatedVideos().get().get(0).video().get();
+
+ if (!client.vertexAI()) {
+ try {
+ client.files.download(generatedVideo2, "video.mp4", null);
+ System.out.println("Downloaded extended video to video.mp4");
+ } catch (GenAiIOException e) {
+ System.out.println("An error occurred while downloading the video: " + e.getMessage());
+ }
+ }
+ }
+
+ private GenerateVideosExtension() {}
+}
diff --git a/examples/src/main/java/com/google/genai/examples/HttpOptionsExtraBody.java b/examples/src/main/java/com/google/genai/examples/HttpOptionsExtraBody.java
new file mode 100644
index 00000000000..c4fecf6ba01
--- /dev/null
+++ b/examples/src/main/java/com/google/genai/examples/HttpOptionsExtraBody.java
@@ -0,0 +1,105 @@
+/*
+ * Copyright 2025 Google LLC
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * https://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * Usage:
+ *
+ *
1a. If you are using Vertex AI, setup ADC to get credentials:
+ * https://cloud.google.com/docs/authentication/provide-credentials-adc#google-idp
+ *
+ *
Then set Project, Location, and USE_VERTEXAI flag as environment variables:
+ *
+ *
1b. If you are using Gemini Developer API, set an API key environment variable. You can find a
+ * list of available API keys here: https://aistudio.google.com/app/apikey
+ *
+ *
export GOOGLE_API_KEY=YOUR_API_KEY
+ *
+ *
2. Compile the java package and run the sample code.
+ *
+ *
mvn clean compile exec:java -Dexec.mainClass="com.google.genai.examples.HttpOptionsExtraBody"
+ * -Dexec.args="YOUR_MODEL_ID"
+ */
+package com.google.genai.examples;
+
+import com.google.common.collect.ImmutableList;
+import com.google.common.collect.ImmutableMap;
+import com.google.genai.Client;
+import com.google.genai.types.GenerateContentConfig;
+import com.google.genai.types.GenerateContentResponse;
+import com.google.genai.types.HttpOptions;
+
+/**
+ * An example of using HttpOption extraBody to inject additional parameters to http request body.
+ */
+public final class HttpOptionsExtraBody {
+ public static void main(String[] args) {
+ final String modelId;
+ if (args.length != 0) {
+ modelId = args[0];
+ } else {
+ modelId = Constants.GEMINI_MODEL_NAME;
+ }
+
+ // Instantiate the client. The client by default uses the Gemini Developer API.
+ // It gets the API
+ // key from the environment variable `GOOGLE_API_KEY`. Vertex AI API can be used
+ // by setting the
+ // environment variables `GOOGLE_CLOUD_LOCATION` and `GOOGLE_CLOUD_PROJECT`, as
+ // well as setting
+ // `GOOGLE_GENAI_USE_VERTEXAI` to "true".
+ //
+ // Note: Some services are only available in a specific API backend (Gemini or
+ // Vertex), you will
+ // get a `UnsupportedOperationException` if you try to use a service that is not
+ // available in
+ // the backend you are using.
+ Client client = new Client();
+
+ if (client.vertexAI()) {
+ System.out.println("Using Vertex AI");
+ } else {
+ System.out.println("Using Gemini Developer API");
+ }
+
+ GenerateContentResponse response =
+ client.models.generateContent(
+ modelId,
+ "What is your name?",
+ GenerateContentConfig.builder()
+ .httpOptions(
+ HttpOptions.builder()
+ .extraBody(
+ ImmutableMap.of(
+ "systemInstruction",
+ ImmutableMap.of(
+ "parts",
+ ImmutableList.of(
+ ImmutableMap.of("text", "You are a chatbot.")))))
+ .build())
+ .build());
+
+ System.out.println(
+ "GenerateContent prompt token count: " + response.usageMetadata().get().promptTokenCount());
+ }
+
+ private HttpOptionsExtraBody() {}
+}
diff --git a/examples/src/main/java/com/google/genai/examples/LiveAudioConversationAsync.java b/examples/src/main/java/com/google/genai/examples/LiveAudioConversationAsync.java
index c127d81125e..e1a6c82e0a9 100644
--- a/examples/src/main/java/com/google/genai/examples/LiveAudioConversationAsync.java
+++ b/examples/src/main/java/com/google/genai/examples/LiveAudioConversationAsync.java
@@ -50,12 +50,16 @@
import com.google.genai.AsyncSession;
import com.google.genai.Client;
import com.google.genai.types.Blob;
+import com.google.genai.types.AutomaticActivityDetection;
+import com.google.genai.types.EndSensitivity;
import com.google.genai.types.LiveConnectConfig;
import com.google.genai.types.LiveSendRealtimeInputParameters;
import com.google.genai.types.LiveServerMessage;
import com.google.genai.types.Modality;
+import com.google.genai.types.RealtimeInputConfig;
import com.google.genai.types.PrebuiltVoiceConfig;
import com.google.genai.types.SpeechConfig;
+import com.google.genai.types.StartSensitivity;
import com.google.genai.types.VoiceConfig;
import java.util.Collection;
import java.util.Optional;
@@ -86,7 +90,6 @@ public final class LiveAudioConversationAsync {
// --------------------------
private static volatile boolean running = true;
- private static volatile boolean speakerPlaying = false;
private static TargetDataLine microphoneLine;
private static SourceDataLine speakerLine;
private static AsyncSession session;
@@ -113,8 +116,7 @@ private static void sendMicrophoneAudio() {
while (running && microphoneLine != null && microphoneLine.isOpen()) {
bytesRead = microphoneLine.read(buffer, 0, buffer.length);
- if (bytesRead > 0 && !speakerPlaying) {
- // Create a copy of the buffer with the actual bytes read
+ if (bytesRead > 0) {
byte[] audioChunk = new byte[bytesRead];
System.arraycopy(buffer, 0, audioChunk, 0, bytesRead);
@@ -153,14 +155,13 @@ public static void main(String[] args) throws LineUnavailableException {
System.out.println("Using Gemini Developer API");
}
- String modelId;
- if (client.vertexAI()) {
- modelId = "gemini-2.0-flash-live-preview-04-09";
- } else {
- modelId = "gemini-live-2.5-flash-preview";
- }
+ final String modelId;
if (args.length != 0) {
modelId = args[0];
+ } else if (client.vertexAI()) {
+ modelId = Constants.GEMINI_LIVE_MODEL_NAME;
+ } else {
+ modelId = Constants.GEMINI_LIVE_MODEL_NAME_PREVIEW;
}
// --- Audio Line Setup ---
@@ -180,6 +181,14 @@ public static void main(String[] args) throws LineUnavailableException {
.prebuiltVoiceConfig(
PrebuiltVoiceConfig.builder().voiceName(voiceName)))
.languageCode("en-US"))
+ .realtimeInputConfig(
+ RealtimeInputConfig.builder()
+ .automaticActivityDetection(
+ AutomaticActivityDetection.builder()
+ .startOfSpeechSensitivity(StartSensitivity.Known.START_SENSITIVITY_HIGH)
+ .endOfSpeechSensitivity(EndSensitivity.Known.END_SENSITIVITY_HIGH)
+ .prefixPaddingMs(5)
+ .silenceDurationMs(100)))
.build();
// --- Shutdown Hook for Cleanup ---
@@ -302,25 +311,35 @@ public static void handleAudioResponse(LiveServerMessage message) {
.serverContent()
.ifPresent(
content -> {
+ // Handle interruptions from Gemini.
+ if (content.interrupted().orElse(false)) {
+ speakerLine.flush();
+ return; // Skip processing the rest of this message's audio.
+ }
+
+ // Handle Model turn completion.
if (content.turnComplete().orElse(false)) {
- // When interrupted, Gemini sends a turn_complete.
- // Stop the speaker if the turn is complete.
- if (speakerLine != null && speakerLine.isOpen()) {
- speakerLine.flush();
- }
- } else {
- content.modelTurn().stream()
- .flatMap(modelTurn -> modelTurn.parts().stream())
- .flatMap(Collection::stream)
- .map(part -> part.inlineData().flatMap(Blob::data))
- .flatMap(Optional::stream)
- .forEach(
- audioBytes -> {
- if (speakerLine != null && speakerLine.isOpen()) {
- // Write audio data to the speaker
- speakerLine.write(audioBytes, 0, audioBytes.length);
- }
- });
+ // The turn is over, no more audio will be sent for this turn.
+ return;
+ }
+
+ // Process audio content for playback.
+ content.modelTurn().stream()
+ .flatMap(modelTurn -> modelTurn.parts().stream())
+ .flatMap(Collection::stream)
+ .map(part -> part.inlineData().flatMap(Blob::data))
+ .flatMap(Optional::stream)
+ .forEach(
+ audioBytes -> {
+ if (speakerLine != null && speakerLine.isOpen()) {
+ // Write audio data to the speaker
+ speakerLine.write(audioBytes, 0, audioBytes.length);
+ }
+ });
+
+ // If this is the last message of a generation, drain the buffer.
+ if (content.generationComplete().orElse(false)) {
+ speakerLine.drain();
}
});
}
diff --git a/examples/src/main/java/com/google/genai/examples/LiveTextContextWindowCompressionAsync.java b/examples/src/main/java/com/google/genai/examples/LiveTextContextWindowCompressionAsync.java
index aa4b7e636dd..526e4250a8e 100644
--- a/examples/src/main/java/com/google/genai/examples/LiveTextContextWindowCompressionAsync.java
+++ b/examples/src/main/java/com/google/genai/examples/LiveTextContextWindowCompressionAsync.java
@@ -77,14 +77,13 @@ public static void main(String[] args) {
System.out.println("Using Gemini Developer API");
}
- String modelId;
- if (client.vertexAI()) {
- modelId = "gemini-2.0-flash-live-preview-04-09";
- } else {
- modelId = "gemini-live-2.5-flash-preview";
- }
+ final String modelId;
if (args.length != 0) {
modelId = args[0];
+ } else if (client.vertexAI()) {
+ modelId = Constants.GEMINI_LIVE_MODEL_NAME;
+ } else {
+ modelId = Constants.GEMINI_LIVE_MODEL_NAME_PREVIEW;
}
// Configures live session and context window compression.
diff --git a/examples/src/main/java/com/google/genai/examples/LiveTextConversationAsync.java b/examples/src/main/java/com/google/genai/examples/LiveTextConversationAsync.java
index c0cef47f12e..de2f868c277 100644
--- a/examples/src/main/java/com/google/genai/examples/LiveTextConversationAsync.java
+++ b/examples/src/main/java/com/google/genai/examples/LiveTextConversationAsync.java
@@ -75,14 +75,13 @@ public static void main(String[] args) {
System.out.println("Using Gemini Developer API");
}
- String modelId;
- if (client.vertexAI()) {
- modelId = "gemini-2.0-flash-live-preview-04-09";
- } else {
- modelId = "gemini-live-2.5-flash-preview";
- }
+ final String modelId;
if (args.length != 0) {
modelId = args[0];
+ } else if (client.vertexAI()) {
+ modelId = Constants.GEMINI_LIVE_MODEL_NAME;
+ } else {
+ modelId = Constants.GEMINI_LIVE_MODEL_NAME_PREVIEW;
}
LiveConnectConfig config =
diff --git a/examples/src/main/java/com/google/genai/examples/LiveTextConversationResumptionAsync.java b/examples/src/main/java/com/google/genai/examples/LiveTextConversationResumptionAsync.java
index ea18e0eeeac..25bbc8cb5a2 100644
--- a/examples/src/main/java/com/google/genai/examples/LiveTextConversationResumptionAsync.java
+++ b/examples/src/main/java/com/google/genai/examples/LiveTextConversationResumptionAsync.java
@@ -70,11 +70,9 @@
public final class LiveTextConversationResumptionAsync {
public static void main(String[] args) {
- boolean containsModelId = false;
// Get the session handle from the command line, if provided
String sessionHandle = null;
if (args.length > 1) {
- containsModelId = true;
if (args[1].startsWith("--session_handle")) {
String[] parts = args[1].split("=", 2);
if (parts.length == 2) {
@@ -95,8 +93,6 @@ public static void main(String[] args) {
System.err.println("Usage: mvn ... --session_handle=");
System.exit(1);
}
- } else {
- containsModelId = true;
}
}
@@ -116,14 +112,13 @@ public static void main(String[] args) {
System.out.println("Using Gemini Developer API");
}
- String modelId;
- if (client.vertexAI()) {
- modelId = "gemini-2.0-flash-live-preview-04-09";
- } else {
- modelId = "gemini-live-2.5-flash-preview";
- }
- if (containsModelId) {
+ final String modelId;
+ if (args.length != 0) {
modelId = args[0];
+ } else if (client.vertexAI()) {
+ modelId = Constants.GEMINI_LIVE_MODEL_NAME;
+ } else {
+ modelId = Constants.GEMINI_LIVE_MODEL_NAME_PREVIEW;
}
SessionResumptionConfig.Builder sessionResumptionConfigBuilder =
diff --git a/examples/src/main/java/com/google/genai/examples/LiveTextToAudioTranscriptionAsync.java b/examples/src/main/java/com/google/genai/examples/LiveTextToAudioTranscriptionAsync.java
index 5690819f880..b6cd68d3989 100644
--- a/examples/src/main/java/com/google/genai/examples/LiveTextToAudioTranscriptionAsync.java
+++ b/examples/src/main/java/com/google/genai/examples/LiveTextToAudioTranscriptionAsync.java
@@ -80,14 +80,13 @@ public static void main(String[] args) {
System.out.println("Using Gemini Developer API");
}
- String modelId;
- if (client.vertexAI()) {
- modelId = "gemini-2.0-flash-live-preview-04-09";
- } else {
- modelId = "gemini-live-2.5-flash-preview";
- }
+ final String modelId;
if (args.length != 0) {
modelId = args[0];
+ } else if (client.vertexAI()) {
+ modelId = Constants.GEMINI_LIVE_MODEL_NAME;
+ } else {
+ modelId = Constants.GEMINI_LIVE_MODEL_NAME_PREVIEW;
}
// Sets the system instruction in the config.
diff --git a/examples/src/main/java/com/google/genai/examples/LiveTextToTextGenerationAsync.java b/examples/src/main/java/com/google/genai/examples/LiveTextToTextGenerationAsync.java
index 8e7541e9c04..92c1a11fd0f 100644
--- a/examples/src/main/java/com/google/genai/examples/LiveTextToTextGenerationAsync.java
+++ b/examples/src/main/java/com/google/genai/examples/LiveTextToTextGenerationAsync.java
@@ -73,14 +73,13 @@ public static void main(String[] args) {
System.out.println("Using Gemini Developer API");
}
- String modelId;
- if (client.vertexAI()) {
- modelId = "gemini-2.0-flash-live-preview-04-09";
- } else {
- modelId = "gemini-live-2.5-flash-preview";
- }
+ final String modelId;
if (args.length != 0) {
modelId = args[0];
+ } else if (client.vertexAI()) {
+ modelId = Constants.GEMINI_LIVE_MODEL_NAME;
+ } else {
+ modelId = Constants.GEMINI_LIVE_MODEL_NAME_PREVIEW;
}
LiveConnectConfig config =
@@ -94,6 +93,8 @@ public static void main(String[] args) {
.thenCompose(
session -> {
String inputText = "Write a short poem about a cat.";
+ System.out.println("Connecting to live session...");
+ System.out.println(session.sessionId());
System.out.println("\n**Input**\n" + inputText);
return session
@@ -133,6 +134,8 @@ public static void printLiveServerMessage(
// Check if the server's turn is complete and signal the allDone future if so.
if (message.serverContent().flatMap(LiveServerContent::turnComplete).orElse(false)) {
+ System.out.println("\n**End of turn, full message: **\n");
+ System.out.println(message);
System.out.println();
allDone.complete(null);
}
diff --git a/examples/src/main/java/com/google/genai/examples/LocalComputeTokens.java b/examples/src/main/java/com/google/genai/examples/LocalComputeTokens.java
new file mode 100644
index 00000000000..d142e31af94
--- /dev/null
+++ b/examples/src/main/java/com/google/genai/examples/LocalComputeTokens.java
@@ -0,0 +1,35 @@
+/*
+ * Copyright 2025 Google LLC
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * https://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * Usage:
+ *
+ *
1. Compile the java package and run the sample code.
+ *
+ *
mvn clean compile exec:java -Dexec.mainClass="com.google.genai.examples.LocalComputeTokens"
+ */
+package com.google.genai.examples;
+
+import com.google.genai.LocalTokenizer;
+
+/** An example of using the Unified Gen AI Java SDK to compute tokens locally. */
+public class LocalComputeTokens {
+ public static void main(String[] args) {
+ LocalTokenizer tokenizer = new LocalTokenizer(Constants.GEMINI_MODEL_NAME);
+ System.out.println(
+ "Compute tokens for 'Hello world': " + tokenizer.computeTokens("Hello world").toJson());
+ }
+}
diff --git a/examples/src/main/java/com/google/genai/examples/LocalCountTokens.java b/examples/src/main/java/com/google/genai/examples/LocalCountTokens.java
new file mode 100644
index 00000000000..9e1c0fc425b
--- /dev/null
+++ b/examples/src/main/java/com/google/genai/examples/LocalCountTokens.java
@@ -0,0 +1,35 @@
+/*
+ * Copyright 2025 Google LLC
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * https://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * Usage:
+ *
+ *
1. Compile the java package and run the sample code.
+ *
+ *
mvn clean compile exec:java -Dexec.mainClass="com.google.genai.examples.LocalCountTokens"
+ */
+package com.google.genai.examples;
+
+import com.google.genai.LocalTokenizer;
+
+/** An example of using the Unified Gen AI Java SDK to count tokens locally. */
+public class LocalCountTokens {
+ public static void main(String[] args) {
+ LocalTokenizer tokenizer = new LocalTokenizer(Constants.GEMINI_MODEL_NAME);
+ System.out.println(
+ "Count for 'Hello world': " + tokenizer.countTokens("Hello world").totalTokens());
+ }
+}
diff --git a/examples/src/main/java/com/google/genai/examples/ModelManagement.java b/examples/src/main/java/com/google/genai/examples/ModelManagement.java
index f6652492d88..0c51973a64e 100644
--- a/examples/src/main/java/com/google/genai/examples/ModelManagement.java
+++ b/examples/src/main/java/com/google/genai/examples/ModelManagement.java
@@ -49,13 +49,13 @@
public final class ModelManagement {
public static void main(String[] args) {
- if (args.length == 0) {
- System.out.println("Please provide a model ID on the -Dexec.args argument.");
- return;
+ final String modelId;
+ if (args.length != 0) {
+ modelId = args[0];
+ } else {
+ modelId = Constants.GEMINI_MODEL_NAME;
}
- String modelId = args[0];
-
// Instantiate the client. The client by default uses the Gemini Developer API. It gets the API
// key from the environment variable `GOOGLE_API_KEY`. Vertex AI API can be used by setting the
// environment variables `GOOGLE_CLOUD_LOCATION` and `GOOGLE_CLOUD_PROJECT`, as well as setting
diff --git a/examples/src/main/java/com/google/genai/examples/ModelManagementAsync.java b/examples/src/main/java/com/google/genai/examples/ModelManagementAsync.java
index 58829301c54..a310cc28d68 100644
--- a/examples/src/main/java/com/google/genai/examples/ModelManagementAsync.java
+++ b/examples/src/main/java/com/google/genai/examples/ModelManagementAsync.java
@@ -51,13 +51,13 @@
public final class ModelManagementAsync {
public static void main(String[] args) {
- if (args.length == 0) {
- System.out.println("Please provide a model ID on the -Dexec.args argument.");
- return;
+ final String modelId;
+ if (args.length != 0) {
+ modelId = args[0];
+ } else {
+ modelId = Constants.GEMINI_MODEL_NAME;
}
- String modelId = args[0];
-
// Instantiate the client. The client by default uses the Gemini Developer API. It gets the API
// key from the environment variable `GOOGLE_API_KEY`. Vertex AI API can be used by setting the
// environment variables `GOOGLE_CLOUD_LOCATION` and `GOOGLE_CLOUD_PROJECT`, as well as setting
diff --git a/examples/src/main/java/com/google/genai/examples/RecontextImageAsync.java b/examples/src/main/java/com/google/genai/examples/RecontextImageAsync.java
new file mode 100644
index 00000000000..aab3a33b777
--- /dev/null
+++ b/examples/src/main/java/com/google/genai/examples/RecontextImageAsync.java
@@ -0,0 +1,114 @@
+/*
+ * Copyright 2025 Google LLC
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * https://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * Usage:
+ *
+ *
1a. If you are using Vertex AI, setup ADC to get credentials:
+ * https://cloud.google.com/docs/authentication/provide-credentials-adc#google-idp
+ *
+ *
Then set Project, Location, and USE_VERTEXAI flag as environment variables:
+ *
+ *
1b. If you are using Gemini Developer API, set an API key environment variable. You can find a
+ * list of available API keys here: https://aistudio.google.com/app/apikey
+ *
+ *
export GOOGLE_API_KEY=YOUR_API_KEY
+ *
+ *
2. Compile the java package and run the sample code.
+ *
+ *
mvn clean compile
+ *
+ *
mvn exec:java -Dexec.mainClass="com.google.genai.examples.RecontextImageAsync"
+ * -Dexec.args="YOUR_MODEL_ID"
+ */
+package com.google.genai.examples;
+
+import com.google.genai.Client;
+import com.google.genai.types.Image;
+import com.google.genai.types.ProductImage;
+import com.google.genai.types.RecontextImageConfig;
+import com.google.genai.types.RecontextImageResponse;
+import com.google.genai.types.RecontextImageSource;
+import java.util.ArrayList;
+import java.util.concurrent.CompletableFuture;
+
+/**
+ * An example of using the Unified Gen AI Java SDK to recontextualize an image (product recontext)
+ * asynchronously.
+ */
+public final class RecontextImageAsync {
+ public static void main(String[] args) {
+ final String modelId;
+ if (args.length != 0) {
+ modelId = args[0];
+ } else {
+ modelId = Constants.IMAGEN_RECONTEXT_MODEL_NAME;
+ }
+
+ // Instantiate the client. The client by default uses the Gemini Developer API. It gets the API
+ // key from the environment variable `GOOGLE_API_KEY`. Vertex AI API can be used by setting the
+ // environment variables `GOOGLE_CLOUD_LOCATION` and `GOOGLE_CLOUD_PROJECT`, as well as setting
+ // `GOOGLE_GENAI_USE_VERTEXAI` to "true".
+ //
+ // Note: Some services are only available in a specific API backend (Gemini or Vertex), you will
+ // get a `UnsupportedOperationException` if you try to use a service that is not available in
+ // the backend you are using.
+ Client client = new Client();
+
+ if (client.vertexAI()) {
+ System.out.println("Using Vertex AI");
+ } else {
+ System.out.println("Gemini Developer API is not supported for this example.");
+ System.exit(0);
+ }
+
+ Image productImageBackpack =
+ Image.builder().gcsUri("gs://genai-sdk-tests/inputs/images/backpack1.png").build();
+
+ RecontextImageConfig recontextImageConfig =
+ RecontextImageConfig.builder().numberOfImages(1).outputMimeType("image/jpeg").build();
+
+ ArrayList productImages = new ArrayList<>();
+ ProductImage productImage = ProductImage.builder().productImage(productImageBackpack).build();
+ productImages.add(productImage);
+
+ RecontextImageSource recontextImageSource =
+ RecontextImageSource.builder()
+ .prompt("On a school desk.")
+ .productImages(productImages)
+ .build();
+
+ CompletableFuture recontextImageResponseFuture =
+ client.async.models.recontextImage(modelId, recontextImageSource, recontextImageConfig);
+
+ recontextImageResponseFuture
+ .thenAccept(
+ recontextImageResponse -> {
+ Image generatedImage =
+ recontextImageResponse.generatedImages().get().get(0).image().get();
+ // Do something with generatedImage.
+ })
+ .join();
+ }
+
+ private RecontextImageAsync() {}
+}
diff --git a/examples/src/main/java/com/google/genai/examples/RecontextImageProductRecontext.java b/examples/src/main/java/com/google/genai/examples/RecontextImageProductRecontext.java
new file mode 100644
index 00000000000..5bddb418e12
--- /dev/null
+++ b/examples/src/main/java/com/google/genai/examples/RecontextImageProductRecontext.java
@@ -0,0 +1,106 @@
+/*
+ * Copyright 2025 Google LLC
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * https://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * Usage:
+ *
+ *
1a. If you are using Vertex AI, setup ADC to get credentials:
+ * https://cloud.google.com/docs/authentication/provide-credentials-adc#google-idp
+ *
+ *
Then set Project, Location, and USE_VERTEXAI flag as environment variables:
+ *
+ *
1b. If you are using Gemini Developer API, set an API key environment variable. You can find a
+ * list of available API keys here: https://aistudio.google.com/app/apikey
+ *
+ *
export GOOGLE_API_KEY=YOUR_API_KEY
+ *
+ *
2. Compile the java package and run the sample code.
+ *
+ *
mvn clean compile
+ *
+ *
mvn exec:java -Dexec.mainClass="com.google.genai.examples.RecontextImageProductRecontext"
+ * -Dexec.args="YOUR_MODEL_ID"
+ */
+package com.google.genai.examples;
+
+import com.google.genai.Client;
+import com.google.genai.types.Image;
+import com.google.genai.types.ProductImage;
+import com.google.genai.types.RecontextImageConfig;
+import com.google.genai.types.RecontextImageResponse;
+import com.google.genai.types.RecontextImageSource;
+import java.util.ArrayList;
+
+/**
+ * An example of using the Unified Gen AI Java SDK to recontextualize an image (product recontext).
+ */
+public final class RecontextImageProductRecontext {
+ public static void main(String[] args) {
+ final String modelId;
+ if (args.length != 0) {
+ modelId = args[0];
+ } else {
+ modelId = Constants.IMAGEN_RECONTEXT_MODEL_NAME;
+ }
+
+ // Instantiate the client. The client by default uses the Gemini Developer API. It gets the API
+ // key from the environment variable `GOOGLE_API_KEY`. Vertex AI API can be used by setting the
+ // environment variables `GOOGLE_CLOUD_LOCATION` and `GOOGLE_CLOUD_PROJECT`, as well as setting
+ // `GOOGLE_GENAI_USE_VERTEXAI` to "true".
+ //
+ // Note: Some services are only available in a specific API backend (Gemini or Vertex), you will
+ // get a `UnsupportedOperationException` if you try to use a service that is not available in
+ // the backend you are using.
+ Client client = new Client();
+
+ if (client.vertexAI()) {
+ System.out.println("Using Vertex AI");
+ } else {
+ System.out.println("Gemini Developer API is not supported for this example.");
+ System.exit(0);
+ }
+
+ Image productImageBackpack =
+ Image.builder().gcsUri("gs://genai-sdk-tests/inputs/images/backpack1.png").build();
+
+ RecontextImageConfig recontextImageConfig =
+ RecontextImageConfig.builder().numberOfImages(1).outputMimeType("image/jpeg").build();
+
+ ArrayList productImages = new ArrayList<>();
+ ProductImage productImage = ProductImage.builder().productImage(productImageBackpack).build();
+ productImages.add(productImage);
+
+ RecontextImageSource recontextImageSource =
+ RecontextImageSource.builder()
+ .prompt("On a school desk.")
+ .productImages(productImages)
+ .build();
+
+ RecontextImageResponse recontextImageResponse =
+ client.models.recontextImage(modelId, recontextImageSource, recontextImageConfig);
+
+ Image generatedImage = recontextImageResponse.generatedImages().get().get(0).image().get();
+ // Do something with generatedImage.
+ }
+
+ private RecontextImageProductRecontext() {}
+}
diff --git a/examples/src/main/java/com/google/genai/examples/RecontextImageVirtualTryOn.java b/examples/src/main/java/com/google/genai/examples/RecontextImageVirtualTryOn.java
new file mode 100644
index 00000000000..28e12fb1ccc
--- /dev/null
+++ b/examples/src/main/java/com/google/genai/examples/RecontextImageVirtualTryOn.java
@@ -0,0 +1,107 @@
+/*
+ * Copyright 2025 Google LLC
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * https://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * Usage:
+ *
+ *
1a. If you are using Vertex AI, setup ADC to get credentials:
+ * https://cloud.google.com/docs/authentication/provide-credentials-adc#google-idp
+ *
+ *
Then set Project, Location, and USE_VERTEXAI flag as environment variables:
+ *
+ *
1b. If you are using Gemini Developer API, set an API key environment variable. You can find a
+ * list of available API keys here: https://aistudio.google.com/app/apikey
+ *
+ *
export GOOGLE_API_KEY=YOUR_API_KEY
+ *
+ *
2. Compile the java package and run the sample code.
+ *
+ *
mvn clean compile
+ *
+ *
mvn exec:java -Dexec.mainClass="com.google.genai.examples.RecontextImageVirtualTryOn"
+ * -Dexec.args="YOUR_MODEL_ID"
+ */
+package com.google.genai.examples;
+
+import com.google.genai.Client;
+import com.google.genai.types.Image;
+import com.google.genai.types.ProductImage;
+import com.google.genai.types.RecontextImageConfig;
+import com.google.genai.types.RecontextImageResponse;
+import com.google.genai.types.RecontextImageSource;
+import java.util.ArrayList;
+
+/** An example of using the Unified Gen AI Java SDK to recontextualize an image (virtual try-on). */
+public final class RecontextImageVirtualTryOn {
+ public static void main(String[] args) {
+ final String modelId;
+ if (args.length != 0) {
+ modelId = args[0];
+ } else {
+ modelId = Constants.VIRTUAL_TRY_ON_MODEL_NAME;
+ }
+
+ // Instantiate the client. The client by default uses the Gemini Developer API. It gets the API
+ // key from the environment variable `GOOGLE_API_KEY`. Vertex AI API can be used by setting the
+ // environment variables `GOOGLE_CLOUD_LOCATION` and `GOOGLE_CLOUD_PROJECT`, as well as setting
+ // `GOOGLE_GENAI_USE_VERTEXAI` to "true".
+ //
+ // Note: Some services are only available in a specific API backend (Gemini or Vertex), you will
+ // get a `UnsupportedOperationException` if you try to use a service that is not available in
+ // the backend you are using.
+ Client client = new Client();
+
+ if (client.vertexAI()) {
+ System.out.println("Using Vertex AI");
+ } else {
+ System.out.println("Gemini Developer API is not supported for this example.");
+ System.exit(0);
+ }
+
+ Image productImagePants =
+ Image.builder().gcsUri("gs://genai-sdk-tests/inputs/images/pants.jpg").build();
+
+ Image personImage =
+ Image.builder().gcsUri("gs://genai-sdk-tests/inputs/images/man.jpg").build();
+
+ RecontextImageConfig recontextImageConfig =
+ RecontextImageConfig.builder().numberOfImages(1).outputMimeType("image/jpeg").build();
+
+ ArrayList productImages = new ArrayList<>();
+ ProductImage productImage = ProductImage.builder().productImage(productImagePants).build();
+ productImages.add(productImage);
+
+ RecontextImageSource recontextImageSource =
+ RecontextImageSource.builder()
+ .personImage(personImage)
+ .productImages(productImages)
+ .build();
+
+ RecontextImageResponse recontextImageResponse =
+ client.models.recontextImage(modelId, recontextImageSource, recontextImageConfig);
+
+ Image generatedImage = recontextImageResponse.generatedImages().get().get(0).image().get();
+ // Do something with generatedImage.
+ }
+
+ private RecontextImageVirtualTryOn() {}
+}
diff --git a/examples/src/main/java/com/google/genai/examples/RequestLevelHttpOptions.java b/examples/src/main/java/com/google/genai/examples/RequestLevelHttpOptions.java
index 5ba7dd1e712..d21ecee4850 100644
--- a/examples/src/main/java/com/google/genai/examples/RequestLevelHttpOptions.java
+++ b/examples/src/main/java/com/google/genai/examples/RequestLevelHttpOptions.java
@@ -47,13 +47,16 @@
import com.google.genai.types.GenerateContentConfig;
import com.google.genai.types.GenerateContentResponse;
import com.google.genai.types.HttpOptions;
+import com.google.genai.types.HttpRetryOptions;
/** An example of setting http options at request level. */
public final class RequestLevelHttpOptions {
public static void main(String[] args) {
- String modelId = "gemini-2.0-flash-001";
+ final String modelId;
if (args.length != 0) {
modelId = args[0];
+ } else {
+ modelId = Constants.GEMINI_MODEL_NAME;
}
// Instantiate the client. The client by default uses the Gemini Developer API. It gets the API
@@ -72,10 +75,13 @@ public static void main(String[] args) {
System.out.println("Using Gemini Developer API");
}
- // Set a customized header per request config.
+ // Set a customized header and retry options per request config.
GenerateContentConfig config =
GenerateContentConfig.builder()
- .httpOptions(HttpOptions.builder().headers(ImmutableMap.of("my-header", "my-value")))
+ .httpOptions(
+ HttpOptions.builder()
+ .headers(ImmutableMap.of("my-header", "my-value"))
+ .retryOptions(HttpRetryOptions.builder().attempts(3).httpStatusCodes(408, 429)))
.build();
GenerateContentResponse response =
diff --git a/examples/src/main/java/com/google/genai/examples/SegmentImage.java b/examples/src/main/java/com/google/genai/examples/SegmentImage.java
new file mode 100644
index 00000000000..9adcf6f93a3
--- /dev/null
+++ b/examples/src/main/java/com/google/genai/examples/SegmentImage.java
@@ -0,0 +1,96 @@
+/*
+ * Copyright 2025 Google LLC
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * https://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * Usage:
+ *
+ *
1a. If you are using Vertex AI, setup ADC to get credentials:
+ * https://cloud.google.com/docs/authentication/provide-credentials-adc#google-idp
+ *
+ *
Then set Project, Location, and USE_VERTEXAI flag as environment variables:
+ *
+ *
1b. If you are using Gemini Developer API, set an API key environment variable. You can find a
+ * list of available API keys here: https://aistudio.google.com/app/apikey
+ *
+ *
export GOOGLE_API_KEY=YOUR_API_KEY
+ *
+ *
2. Compile the java package and run the sample code.
+ *
+ *
mvn clean compile
+ *
+ *
mvn exec:java -Dexec.mainClass="com.google.genai.examples.SegmentImage"
+ * -Dexec.args="YOUR_MODEL_ID"
+ */
+package com.google.genai.examples;
+
+import com.google.genai.Client;
+import com.google.genai.types.Image;
+import com.google.genai.types.SegmentImageConfig;
+import com.google.genai.types.SegmentImageResponse;
+import com.google.genai.types.SegmentImageSource;
+import com.google.genai.types.SegmentMode;
+
+/** An example of using the Unified Gen AI Java SDK to segment an image. */
+public final class SegmentImage {
+ public static void main(String[] args) {
+ final String modelId;
+ if (args.length != 0) {
+ modelId = args[0];
+ } else {
+ modelId = Constants.SEGMENT_IMAGE_MODEL_NAME;
+ }
+
+ // Instantiate the client. The client by default uses the Gemini Developer API. It gets the API
+ // key from the environment variable `GOOGLE_API_KEY`. Vertex AI API can be used by setting the
+ // environment variables `GOOGLE_CLOUD_LOCATION` and `GOOGLE_CLOUD_PROJECT`, as well as setting
+ // `GOOGLE_GENAI_USE_VERTEXAI` to "true".
+ //
+ // Note: Some services are only available in a specific API backend (Gemini or Vertex), you will
+ // get a `UnsupportedOperationException` if you try to use a service that is not available in
+ // the backend you are using.
+ Client client = new Client();
+
+ if (client.vertexAI()) {
+ System.out.println("Using Vertex AI");
+ } else {
+ System.out.println("Gemini Developer API is not supported for this example.");
+ System.exit(0);
+ }
+
+ // Base image created using generateImages with prompt:
+ // "A square, circle, and triangle with a white background"
+ Image image = Image.fromFile("./resources/shapes.jpg");
+
+ // Control reference.
+ SegmentImageConfig segmentImageConfig =
+ SegmentImageConfig.builder().mode(SegmentMode.Known.FOREGROUND).build();
+
+ SegmentImageResponse segmentImageResponse =
+ client.models.segmentImage(
+ modelId, SegmentImageSource.builder().image(image).build(), segmentImageConfig);
+
+ Image maskImage = segmentImageResponse.generatedMasks().get().get(0).mask().get();
+ // Do something with maskImage.
+ }
+
+ private SegmentImage() {}
+}
diff --git a/examples/src/main/java/com/google/genai/examples/SegmentImageAsync.java b/examples/src/main/java/com/google/genai/examples/SegmentImageAsync.java
new file mode 100644
index 00000000000..32ffead7e76
--- /dev/null
+++ b/examples/src/main/java/com/google/genai/examples/SegmentImageAsync.java
@@ -0,0 +1,102 @@
+/*
+ * Copyright 2025 Google LLC
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * https://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * Usage:
+ *
+ *
1a. If you are using Vertex AI, setup ADC to get credentials:
+ * https://cloud.google.com/docs/authentication/provide-credentials-adc#google-idp
+ *
+ *
Then set Project, Location, and USE_VERTEXAI flag as environment variables:
+ *
+ *
1b. If you are using Gemini Developer API, set an API key environment variable. You can find a
+ * list of available API keys here: https://aistudio.google.com/app/apikey
+ *
+ *
export GOOGLE_API_KEY=YOUR_API_KEY
+ *
+ *
2. Compile the java package and run the sample code.
+ *
+ *
mvn clean compile
+ *
+ *
mvn exec:java -Dexec.mainClass="com.google.genai.examples.SegmentImage"
+ * -Dexec.args="YOUR_MODEL_ID"
+ */
+package com.google.genai.examples;
+
+import com.google.genai.Client;
+import com.google.genai.types.Image;
+import com.google.genai.types.SegmentImageConfig;
+import com.google.genai.types.SegmentImageResponse;
+import com.google.genai.types.SegmentImageSource;
+import com.google.genai.types.SegmentMode;
+import java.util.concurrent.CompletableFuture;
+
+/** An example of using the Unified Gen AI Java SDK to segment an image asynchronously. */
+public final class SegmentImageAsync {
+ public static void main(String[] args) {
+ final String modelId;
+ if (args.length != 0) {
+ modelId = args[0];
+ } else {
+ modelId = Constants.SEGMENT_IMAGE_MODEL_NAME;
+ }
+
+ // Instantiate the client. The client by default uses the Gemini Developer API. It gets the API
+ // key from the environment variable `GOOGLE_API_KEY`. Vertex AI API can be used by setting the
+ // environment variables `GOOGLE_CLOUD_LOCATION` and `GOOGLE_CLOUD_PROJECT`, as well as setting
+ // `GOOGLE_GENAI_USE_VERTEXAI` to "true".
+ //
+ // Note: Some services are only available in a specific API backend (Gemini or Vertex), you will
+ // get a `UnsupportedOperationException` if you try to use a service that is not available in
+ // the backend you are using.
+ Client client = new Client();
+
+ if (client.vertexAI()) {
+ System.out.println("Using Vertex AI");
+ } else {
+ System.out.println("Gemini Developer API is not supported for this example.");
+ System.exit(0);
+ }
+
+ // Base image created using generateImages with prompt:
+ // "A square, circle, and triangle with a white background"
+ Image image = Image.fromFile("./resources/shapes.jpg");
+
+ // Control reference.
+ SegmentImageConfig segmentImageConfig =
+ SegmentImageConfig.builder().mode(SegmentMode.Known.FOREGROUND).build();
+
+ CompletableFuture segmentImageResponseFuture =
+ client.async.models.segmentImage(
+ modelId, SegmentImageSource.builder().image(image).build(), segmentImageConfig);
+
+ segmentImageResponseFuture
+ .thenAccept(
+ segmentImageResponse -> {
+ Image maskImage = segmentImageResponse.generatedMasks().get().get(0).mask().get();
+ // Do something with maskImage.
+ })
+ .join();
+ }
+
+ private SegmentImageAsync() {}
+}
diff --git a/examples/src/main/java/com/google/genai/examples/TuningJobs.java b/examples/src/main/java/com/google/genai/examples/TuningJobs.java
index 67765fcdde0..05f323a6164 100644
--- a/examples/src/main/java/com/google/genai/examples/TuningJobs.java
+++ b/examples/src/main/java/com/google/genai/examples/TuningJobs.java
@@ -51,9 +51,11 @@
public final class TuningJobs {
public static void main(String[] args) {
- String modelId = "gemini-2.0-flash-001";
+ final String modelId;
if (args.length != 0) {
modelId = args[0];
+ } else {
+ modelId = Constants.GEMINI_MODEL_NAME;
}
// Instantiate the client. The client by default uses the Gemini Developer API. It gets the API
diff --git a/examples/src/main/java/com/google/genai/examples/TuningJobsAsync.java b/examples/src/main/java/com/google/genai/examples/TuningJobsAsync.java
index b76235894fb..442a062ba2a 100644
--- a/examples/src/main/java/com/google/genai/examples/TuningJobsAsync.java
+++ b/examples/src/main/java/com/google/genai/examples/TuningJobsAsync.java
@@ -56,9 +56,11 @@
public final class TuningJobsAsync {
public static void main(String[] args) {
- String modelId = "gemini-2.0-flash-001";
+ final String modelId;
if (args.length != 0) {
modelId = args[0];
+ } else {
+ modelId = Constants.GEMINI_MODEL_NAME;
}
// Instantiate the client. The client by default uses the Gemini Developer API. It gets the API
diff --git a/examples/src/main/java/com/google/genai/examples/UpscaleImage.java b/examples/src/main/java/com/google/genai/examples/UpscaleImage.java
index ef726d4c105..a7fa7393561 100644
--- a/examples/src/main/java/com/google/genai/examples/UpscaleImage.java
+++ b/examples/src/main/java/com/google/genai/examples/UpscaleImage.java
@@ -48,9 +48,11 @@
/** An example of using the Unified Gen AI Java SDK to upscale an image. */
public final class UpscaleImage {
public static void main(String[] args) {
- String modelId = "imagen-3.0-generate-001";
+ final String modelId;
if (args.length != 0) {
modelId = args[0];
+ } else {
+ modelId = Constants.IMAGEN_CAPABILITY_MODEL_NAME;
}
// Instantiate the client. The client by default uses the Gemini Developer API. It gets the API
@@ -66,7 +68,8 @@ public static void main(String[] args) {
if (client.vertexAI()) {
System.out.println("Using Vertex AI");
} else {
- System.out.println("Using Gemini Developer API");
+ System.out.println("Gemini Developer API is not supported for this example.");
+ System.exit(0);
}
// Base image created using generateImages with prompt:
diff --git a/examples/src/main/java/com/google/genai/examples/UpscaleImageAsync.java b/examples/src/main/java/com/google/genai/examples/UpscaleImageAsync.java
index 0f7e136bf3b..df3e099c881 100644
--- a/examples/src/main/java/com/google/genai/examples/UpscaleImageAsync.java
+++ b/examples/src/main/java/com/google/genai/examples/UpscaleImageAsync.java
@@ -49,9 +49,11 @@
/** An example of using the Unified Gen AI Java SDK to upscale an image asynchronously. */
public final class UpscaleImageAsync {
public static void main(String[] args) {
- String modelId = "imagen-3.0-generate-001";
+ final String modelId;
if (args.length != 0) {
modelId = args[0];
+ } else {
+ modelId = Constants.IMAGEN_CAPABILITY_MODEL_NAME;
}
// Instantiate the client. The client by default uses the Gemini Developer API. It gets the API
@@ -67,7 +69,8 @@ public static void main(String[] args) {
if (client.vertexAI()) {
System.out.println("Using Vertex AI");
} else {
- System.out.println("Using Gemini Developer API");
+ System.out.println("Gemini Developer API is not supported for this example.");
+ System.exit(0);
}
// Base image created using generateImages with prompt:
diff --git a/pom.xml b/pom.xml
index caea5b8c446..b71713b2c2f 100644
--- a/pom.xml
+++ b/pom.xml
@@ -6,7 +6,7 @@
com.google.genaigoogle-genaigoogle-genai
- 1.11.0-SNAPSHOT
+ 1.26.0-SNAPSHOTjar
Java idiomatic SDK for the Gemini Developer APIs and Vertex AI APIs.
@@ -47,6 +47,7 @@
1.81.81.33.0
+ 3.25.12.47.04.5.141.11.0
@@ -90,11 +91,6 @@
httpclient${apache.httpcomponents.httpclient.version}
-
- com.google.auto.value
- auto-value
- ${auto-value.version}
- com.google.auto.valueauto-value-annotations
@@ -130,6 +126,11 @@
okhttp${okhttp.version}
+
+ com.google.protobuf
+ protobuf-java
+ ${protobuf.version}
+ org.junit.jupiter
@@ -155,12 +156,6 @@
jspecify1.0.0
-
- com.github.tomakehurst
- wiremock-jre8
- 2.35.0
- test
- org.jetbrains.kotlin
@@ -170,6 +165,13 @@
+
+
+ kr.motd.maven
+ os-maven-plugin
+ 1.7.1
+
+
@@ -193,6 +195,9 @@
${auto-value.version}
+
+ -parameters
+
@@ -200,20 +205,27 @@
3.5.2
-
- me.fabriciorby
- maven-surefire-junit5-tree-reporter
- 0.1.0
-
+
+ me.fabriciorby
+ maven-surefire-junit5-tree-reporter
+ 0.1.0
+
- plain
-
+ plain
+ maven-jar-plugin
- 3.0.2
+ 3.3.0
+
+
+
+ com.google.genai
+
+
+ maven-install-plugin
@@ -259,16 +271,6 @@
-
- org.apache.maven.plugins
- maven-compiler-plugin
- 3.14.0
-
-
- -parameters
-
-
- org.apache.maven.pluginsmaven-javadoc-plugin
@@ -313,6 +315,7 @@
testreport
+ check
@@ -321,6 +324,85 @@
com/google/genai/types/AutoValue_*.class
+
+
+ PACKAGE
+
+ com.google.genai
+
+
+
+ INSTRUCTION
+ COVEREDRATIO
+
+ 0.70
+
+
+
+
+ PACKAGE
+
+ com.google.genai.errors
+
+
+
+ INSTRUCTION
+ COVEREDRATIO
+ 0.95
+
+
+
+
+ PACKAGE
+
+ com.google.genai.types
+
+
+
+ INSTRUCTION
+ COVEREDRATIO
+
+ 0.40
+
+
+
+
+
+
+
+
+
+ org.xolstice.maven.plugins
+ protobuf-maven-plugin
+ 0.6.1
+
+ com.google.protobuf:protoc:${protobuf.version}:exe:${os.detected.classifier}
+ ${project.basedir}/src/main/proto
+
+
+
+
+ compile
+ test-compile
+
+
+
+
+
+ org.codehaus.mojo
+ build-helper-maven-plugin
+ 3.5.0
+
+
+ add-source
+ generate-sources
+
+ add-source
+
+
+
+ ${project.build.directory}/generated-sources/protobuf/java
+
diff --git a/run_shared_tests.sh b/run_shared_tests.sh
new file mode 100755
index 00000000000..d84285b67e7
--- /dev/null
+++ b/run_shared_tests.sh
@@ -0,0 +1,12 @@
+#!/bin/bash
+
+export GOOGLE_GENAI_CLIENT_MODE=api
+export GOOGLE_GENAI_TESTS_SUBDIR=shared
+REPLAYS_DIR="$(blaze info workspace 2>/dev/null)/google/cloud/aiplatform/sdk/genai/replays"
+export GOOGLE_GENAI_REPLAYS_DIRECTORY="$REPLAYS_DIR"
+
+echo "Replays directory: $GOOGLE_GENAI_REPLAYS_DIRECTORY"
+echo "Client mode: $GOOGLE_GENAI_CLIENT_MODE"
+echo "Tests subdirectory: $GOOGLE_GENAI_TESTS_SUBDIR"
+echo "Running shared table tests in API mode..."
+mvn clean test -Dtest=TableTest -Djacoco.skip=true && mvn clean
diff --git a/src/main/java/com/google/genai/ApiClient.java b/src/main/java/com/google/genai/ApiClient.java
index 48d5f3c7878..ecd5414c559 100644
--- a/src/main/java/com/google/genai/ApiClient.java
+++ b/src/main/java/com/google/genai/ApiClient.java
@@ -19,18 +19,22 @@
import static com.google.common.base.Preconditions.checkNotNull;
import static com.google.common.collect.ImmutableMap.toImmutableMap;
+import com.fasterxml.jackson.core.JsonProcessingException;
+import com.fasterxml.jackson.databind.ObjectMapper;
import com.google.auth.oauth2.GoogleCredentials;
import com.google.common.base.Ascii;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
-import com.google.errorprone.annotations.CanIgnoreReturnValue;
import com.google.genai.errors.GenAiIOException;
import com.google.genai.types.ClientOptions;
import com.google.genai.types.HttpOptions;
+import com.google.genai.types.HttpRetryOptions;
import java.io.IOException;
import java.time.Duration;
+import java.util.List;
import java.util.Map;
import java.util.Optional;
+import java.util.concurrent.CompletableFuture;
import java.util.logging.Logger;
import java.util.stream.Stream;
import okhttp3.Dispatcher;
@@ -40,12 +44,11 @@
import okhttp3.RequestBody;
import org.jspecify.annotations.Nullable;
-
/** Interface for an API client which issues HTTP requests to the GenAI APIs. */
abstract class ApiClient {
// {x-version-update-start:google-genai:released}
- private static final String SDK_VERSION = "1.10.0";
+ private static final String SDK_VERSION = "1.25.0";
// {x-version-update-end:google-genai:released}
private static final Logger logger = Logger.getLogger(ApiClient.class.getName());
@@ -99,7 +102,7 @@ protected ApiClient(
this.httpOptions = mergeHttpOptions(customHttpOptions.get());
}
- this.httpClient = createHttpClient(httpOptions.timeout(), clientOptions);
+ this.httpClient = createHttpClient(httpOptions, clientOptions);
}
ApiClient(
@@ -187,14 +190,18 @@ protected ApiClient(
apiKeyValue = null;
}
+ if (locationValue == null && apiKeyValue == null) {
+ locationValue = "global";
+ }
+
this.apiKey = Optional.ofNullable(apiKeyValue);
this.project = Optional.ofNullable(projectValue);
this.location = Optional.ofNullable(locationValue);
// Validate that either project and location or API key is set.
- if (!((this.project.isPresent() && this.location.isPresent()) || this.apiKey.isPresent())) {
+ if (!(this.project.isPresent() || this.apiKey.isPresent())) {
throw new IllegalArgumentException(
- "For Vertex AI APIs, either project/location or API key must be set.");
+ "For Vertex AI APIs, either project or API key must be set.");
}
// Only set credentials if using project/location.
@@ -211,18 +218,24 @@ protected ApiClient(
this.httpOptions = mergeHttpOptions(customHttpOptions.get());
}
this.vertexAI = true;
- this.httpClient = createHttpClient(httpOptions.timeout(), clientOptions);
+ this.httpClient = createHttpClient(httpOptions, clientOptions);
}
private OkHttpClient createHttpClient(
- Optional timeout, Optional clientOptions) {
+ HttpOptions httpOptions, Optional clientOptions) {
OkHttpClient.Builder builder = new OkHttpClient.Builder();
// Remove timeouts by default (OkHttp has a default of 10 seconds)
builder.connectTimeout(Duration.ofMillis(0));
builder.readTimeout(Duration.ofMillis(0));
builder.writeTimeout(Duration.ofMillis(0));
- timeout.ifPresent(connectTimeout -> builder.connectTimeout(Duration.ofMillis(connectTimeout)));
+ httpOptions
+ .timeout()
+ .ifPresent(connectTimeout -> builder.connectTimeout(Duration.ofMillis(connectTimeout)));
+
+ HttpRetryOptions retryOptions =
+ httpOptions.retryOptions().orElse(HttpRetryOptions.builder().build());
+ builder.addInterceptor(new RetryInterceptor(retryOptions));
clientOptions.ifPresent(
options -> {
@@ -236,6 +249,7 @@ private OkHttpClient createHttpClient(
}
/** Builds a HTTP request given the http method, path, and request json string. */
+ @SuppressWarnings("unchecked")
protected Request buildRequest(
String httpMethod,
String path,
@@ -280,15 +294,41 @@ protected Request buildRequest(
throw new IllegalArgumentException("Unsupported HTTP method: " + capitalizedHttpMethod);
}
+ ObjectMapper objectMapper = new ObjectMapper();
RequestBody body;
if (METHODS_WITH_BODY.contains(capitalizedHttpMethod)) {
body = RequestBody.create(requestJson, MediaType.parse("application/json"));
} else {
body = null;
}
+
+ if (mergedHttpOptions.extraBody().isPresent() && body != null) {
+ try {
+ Map requestBodyMap = objectMapper.readValue(requestJson, Map.class);
+ mergeMaps(requestBodyMap, mergedHttpOptions.extraBody().get());
+ requestJson = objectMapper.writeValueAsString(requestBodyMap);
+ body = RequestBody.create(requestJson, MediaType.parse("application/json"));
+ } catch (JsonProcessingException e) {
+ logger.warning("Failed to merge extraBody into request body: " + e.getMessage());
+ // If merging fails, proceed with the original request body
+ body = RequestBody.create(requestJson, MediaType.parse("application/json"));
+ }
+ } else if (mergedHttpOptions.extraBody().isPresent()) {
+ logger.warning(
+ "HttpOptions.extraBody is set, but the HTTP method does not support a request body. "
+ + "The extraBody will be ignored.");
+ }
+
Request.Builder requestBuilder =
new Request.Builder().url(requestUrl).method(capitalizedHttpMethod, body);
+ requestHttpOptions.ifPresent(
+ httpOptions -> {
+ if (httpOptions.retryOptions().isPresent()) {
+ requestBuilder.tag(HttpRetryOptions.class, mergedHttpOptions.retryOptions().get());
+ }
+ });
+
setHeaders(requestBuilder, mergedHttpOptions);
return requestBuilder.build();
}
@@ -304,6 +344,12 @@ protected Request buildRequest(
RequestBody body =
RequestBody.create(requestBytes, MediaType.get("application/octet-stream"));
Request.Builder requestBuilder = new Request.Builder().url(url).post(body);
+ requestHttpOptions.ifPresent(
+ httpOptions -> {
+ if (httpOptions.retryOptions().isPresent()) {
+ requestBuilder.tag(HttpRetryOptions.class, mergedHttpOptions.retryOptions().get());
+ }
+ });
setHeaders(requestBuilder, mergedHttpOptions);
return requestBuilder.build();
} else {
@@ -341,7 +387,6 @@ private void setHeaders(Request.Builder request, HttpOptions requestHttpOptions)
}
/** Sends a Http request given the http method, path, and request json string. */
- @CanIgnoreReturnValue
public abstract ApiResponse request(
String httpMethod, String path, String requestJson, Optional httpOptions);
@@ -349,6 +394,20 @@ public abstract ApiResponse request(
public abstract ApiResponse request(
String httpMethod, String path, byte[] requestBytes, Optional httpOptions);
+ /**
+ * Sends an asynchronous Http request given the http method, path, request json string, and http
+ * options.
+ */
+ public abstract CompletableFuture asyncRequest(
+ String httpMethod, String path, String requestJson, Optional httpOptions);
+
+ /**
+ * Sends an asynchronous Http request given the http method, path, request bytes, and http
+ * options.
+ */
+ public abstract CompletableFuture asyncRequest(
+ String httpMethod, String path, byte[] requestBytes, Optional httpOptions);
+
/** Returns the library version. */
static String libraryVersion() {
// TODO: Automate revisions to the SDK library version.
@@ -382,6 +441,47 @@ OkHttpClient httpClient() {
return httpClient;
}
+ /**
+ * Merges two maps recursively. If a key exists in both maps, the value from `source` overwrites
+ * the value in `target`. If the value is a list, then update the whole list. A warning is logged
+ * if the types of the values for the same key are different.
+ *
+ * @param target The target map to merge into.
+ * @param source The source map to merge from.
+ */
+ @SuppressWarnings("unchecked")
+ private void mergeMaps(Map target, Map source) {
+ for (Map.Entry entry : source.entrySet()) {
+ String key = entry.getKey();
+ Object sourceValue = entry.getValue();
+
+ if (target.containsKey(key)) {
+ Object targetValue = target.get(key);
+
+ if (targetValue instanceof Map && sourceValue instanceof Map) {
+ // Both values are maps, recursively merge them
+ mergeMaps((Map) targetValue, (Map) sourceValue);
+ } else if (targetValue instanceof List && sourceValue instanceof List) {
+ // Both values are lists, replace the target list with the source list
+ target.put(key, sourceValue);
+ } else {
+ // Values are not both maps or both lists, check if they have the same type
+ if (targetValue.getClass() != sourceValue.getClass()) {
+ logger.warning(
+ String.format(
+ "Type mismatch for key '%s'. Original type: %s, new type: %s. Overwriting"
+ + " with the new value.",
+ key, targetValue.getClass().getName(), sourceValue.getClass().getName()));
+ }
+ target.put(key, sourceValue);
+ }
+ } else {
+ // Key does not exist in target, add it
+ target.put(key, sourceValue);
+ }
+ }
+ }
+
private Optional