-
Notifications
You must be signed in to change notification settings - Fork 348
Update ADK doc according to issue #744 - 3 #751
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
Changes from all commits
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -122,70 +122,42 @@ For deployed applications, a service account is the standard method. | |
|
||
**Example:** | ||
|
||
=== "Python" | ||
|
||
```python | ||
from google.adk.agents import LlmAgent | ||
|
||
# --- Example using a stable Gemini Flash model --- | ||
agent_gemini_flash = LlmAgent( | ||
# Use the latest stable Flash model identifier | ||
model="gemini-2.0-flash", | ||
name="gemini_flash_agent", | ||
instruction="You are a fast and helpful Gemini assistant.", | ||
# ... other agent parameters | ||
) | ||
|
||
# --- Example using a powerful Gemini Pro model --- | ||
# Note: Always check the official Gemini documentation for the latest model names, | ||
# including specific preview versions if needed. Preview models might have | ||
# different availability or quota limitations. | ||
agent_gemini_pro = LlmAgent( | ||
# Use the latest generally available Pro model identifier | ||
model="gemini-2.5-pro-preview-03-25", | ||
name="gemini_pro_agent", | ||
instruction="You are a powerful and knowledgeable Gemini assistant.", | ||
# ... other agent parameters | ||
) | ||
``` | ||
|
||
=== "Java" | ||
|
||
```java | ||
// --- Example #1: using a stable Gemini Flash model with ENV variables--- | ||
LlmAgent agentGeminiFlash = | ||
LlmAgent.builder() | ||
// Use the latest stable Flash model identifier | ||
.model("gemini-2.0-flash") // Set ENV variables to use this model | ||
.name("gemini_flash_agent") | ||
.instruction("You are a fast and helpful Gemini assistant.") | ||
// ... other agent parameters | ||
.build(); | ||
|
||
// --- Example #2: using a powerful Gemini Pro model with API Key in model --- | ||
LlmAgent agentGeminiPro = | ||
LlmAgent.builder() | ||
// Use the latest generally available Pro model identifier | ||
.model(new Gemini("gemini-2.5-pro-preview-03-25", | ||
Client.builder() | ||
.vertexAI(false) | ||
.apiKey("API_KEY") // Set the API Key (or) project/ location | ||
.build())) | ||
// Or, you can also directly pass the API_KEY | ||
// .model(new Gemini("gemini-2.5-pro-preview-03-25", "API_KEY")) | ||
.name("gemini_pro_agent") | ||
.instruction("You are a powerful and knowledgeable Gemini assistant.") | ||
// ... other agent parameters | ||
.build(); | ||
|
||
// Note: Always check the official Gemini documentation for the latest model names, | ||
// including specific preview versions if needed. Preview models might have | ||
// different availability or quota limitations. | ||
``` | ||
|
||
!!!warning "Secure Your Credentials" | ||
Service account credentials or API keys are powerful credentials. Never expose them publicly. Use a secret manager like [Google Secret Manager](https://cloud.google.com/secret-manager) to store and access them securely in production. | ||
|
||
## Using Gemma Models | ||
|
||
[Gemma](https://ai.google.dev/gemma/docs) is a family of lightweight, state-of-the-art open models from Google. The ADK provides a dedicated wrapper to integrate Gemma models into your agents. | ||
|
||
**Integration Method:** Instantiate the `Gemma` wrapper class and pass it to the `model` parameter of your `LlmAgent`. | ||
|
||
**Prerequisites:** | ||
|
||
* **Authentication:** Follow the same authentication setup as for [Google AI Studio](#google-ai-studio) using an API key. | ||
|
||
**Key Considerations:** | ||
|
||
* **No System Instructions:** Gemma models do not support system instructions. Any system-level prompts will be converted to user-level prompts by the ADK. | ||
* **Limited Function Calling:** Gemma's native function calling support is not as extensive as Gemini's. The ADK attempts to parse function calls from the model's text output, but this may be less reliable. | ||
* **No Vertex AI Support:** The current integration is for the Gemini API only and does not support Gemma models hosted on Vertex AI. | ||
|
||
**Example:** | ||
|
||
```python | ||
from google.adk.agents import LlmAgent | ||
from google.adk.models import Gemma | ||
|
||
# --- Example Agent using the Gemma 3 27B IT model --- | ||
agent_gemma = LlmAgent( | ||
model=Gemma(model="gemma-3-27b-it"), | ||
name="gemma_agent", | ||
instruction="You are a helpful assistant powered by Gemma.", | ||
# ... other agent parameters | ||
) | ||
``` | ||
|
||
## Using Anthropic models | ||
|
||
{ title="This feature is currently available for Java. Python support for direct Anthropic API (non-Vertex) is via LiteLLM."} | ||
|
@@ -208,46 +180,6 @@ Instantiate `com.google.adk.models.Claude`, providing the desired Claude model n | |
|
||
**Example:** | ||
|
||
```java | ||
import com.anthropic.client.AnthropicClient; | ||
import com.google.adk.agents.LlmAgent; | ||
import com.google.adk.models.Claude; | ||
import com.anthropic.client.okhttp.AnthropicOkHttpClient; // From Anthropic's SDK | ||
|
||
public class DirectAnthropicAgent { | ||
|
||
private static final String CLAUDE_MODEL_ID = "claude-3-7-sonnet-latest"; // Or your preferred Claude model | ||
|
||
public static LlmAgent createAgent() { | ||
|
||
// It's recommended to load sensitive keys from a secure config | ||
AnthropicClient anthropicClient = AnthropicOkHttpClient.builder() | ||
.apiKey("ANTHROPIC_API_KEY") | ||
.build(); | ||
|
||
Claude claudeModel = new Claude( | ||
CLAUDE_MODEL_ID, | ||
anthropicClient | ||
); | ||
|
||
return LlmAgent.builder() | ||
.name("claude_direct_agent") | ||
.model(claudeModel) | ||
.instruction("You are a helpful AI assistant powered by Anthropic Claude.") | ||
// ... other LlmAgent configurations | ||
.build(); | ||
} | ||
|
||
public static void main(String[] args) { | ||
try { | ||
LlmAgent agent = createAgent(); | ||
System.out.println("Successfully created direct Anthropic agent: " + agent.name()); | ||
} catch (IllegalStateException e) { | ||
System.err.println("Error creating agent: " + e.getMessage()); | ||
} | ||
} | ||
} | ||
``` | ||
|
||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. BREAKING CHANGE: Removal of existing content |
||
|
||
|
||
|
@@ -649,134 +581,5 @@ agent_finetuned_gemini = LlmAgent( | |
Some providers, like Anthropic, make their models available directly through | ||
Vertex AI. | ||
|
||
=== "Python" | ||
|
||
**Integration Method:** Uses the direct model string (e.g., | ||
`"claude-3-sonnet@20240229"`), *but requires manual registration* within ADK. | ||
|
||
**Why Registration?** ADK's registry automatically recognizes `gemini-*` strings | ||
and standard Vertex AI endpoint strings (`projects/.../endpoints/...`) and | ||
routes them via the `google-genai` library. For other model types used directly | ||
via Vertex AI (like Claude), you must explicitly tell the ADK registry which | ||
specific wrapper class (`Claude` in this case) knows how to handle that model | ||
identifier string with the Vertex AI backend. | ||
|
||
**Setup:** | ||
|
||
1. **Vertex AI Environment:** Ensure the consolidated Vertex AI setup (ADC, Env | ||
Vars, `GOOGLE_GENAI_USE_VERTEXAI=TRUE`) is complete. | ||
|
||
2. **Install Provider Library:** Install the necessary client library configured | ||
for Vertex AI. | ||
|
||
```shell | ||
pip install "anthropic[vertex]" | ||
``` | ||
|
||
3. **Register Model Class:** Add this code near the start of your application, | ||
*before* creating an agent using the Claude model string: | ||
|
||
```python | ||
# Required for using Claude model strings directly via Vertex AI with LlmAgent | ||
from google.adk.models.anthropic_llm import Claude | ||
from google.adk.models.registry import LLMRegistry | ||
|
||
LLMRegistry.register(Claude) | ||
``` | ||
|
||
**Example:** | ||
|
||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. BREAKING CHANGE: Removal of existing content |
||
```python | ||
from google.adk.agents import LlmAgent | ||
from google.adk.models.anthropic_llm import Claude # Import needed for registration | ||
from google.adk.models.registry import LLMRegistry # Import needed for registration | ||
from google.genai import types | ||
|
||
# --- Register Claude class (do this once at startup) --- | ||
LLMRegistry.register(Claude) | ||
|
||
# --- Example Agent using Claude 3 Sonnet on Vertex AI --- | ||
|
||
# Standard model name for Claude 3 Sonnet on Vertex AI | ||
claude_model_vertexai = "claude-3-sonnet@20240229" | ||
|
||
agent_claude_vertexai = LlmAgent( | ||
model=claude_model_vertexai, # Pass the direct string after registration | ||
name="claude_vertexai_agent", | ||
instruction="You are an assistant powered by Claude 3 Sonnet on Vertex AI.", | ||
generate_content_config=types.GenerateContentConfig(max_output_tokens=4096), | ||
# ... other agent parameters | ||
) | ||
``` | ||
|
||
=== "Java" | ||
|
||
**Integration Method:** Directly instantiate the provider-specific model class (e.g., `com.google.adk.models.Claude`) and configure it with a Vertex AI backend. | ||
|
||
**Why Direct Instantiation?** The Java ADK's `LlmRegistry` primarily handles Gemini models by default. For third-party models like Claude on Vertex AI, you directly provide an instance of the ADK's wrapper class (e.g., `Claude`) to the `LlmAgent`. This wrapper class is responsible for interacting with the model via its specific client library, configured for Vertex AI. | ||
|
||
**Setup:** | ||
|
||
1. **Vertex AI Environment:** | ||
* Ensure your Google Cloud project and region are correctly set up. | ||
* **Application Default Credentials (ADC):** Make sure ADC is configured correctly in your environment. This is typically done by running `gcloud auth application-default login`. The Java client libraries will use these credentials to authenticate with Vertex AI. Follow the [Google Cloud Java documentation on ADC](https://cloud.google.com/java/docs/reference/google-auth-library/latest/com.google.auth.oauth2.GoogleCredentials#com_google_auth_oauth2_GoogleCredentials_getApplicationDefault__) for detailed setup. | ||
|
||
2. **Provider Library Dependencies:** | ||
* **Third-Party Client Libraries (Often Transitive):** The ADK core library often includes the necessary client libraries for common third-party models on Vertex AI (like Anthropic's required classes) as **transitive dependencies**. This means you might not need to explicitly add a separate dependency for the Anthropic Vertex SDK in your `pom.xml` or `build.gradle`. | ||
|
||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. BREAKING CHANGE: Removal of existing content |
||
3. **Instantiate and Configure the Model:** | ||
When creating your `LlmAgent`, instantiate the `Claude` class (or the equivalent for another provider) and configure its `VertexBackend`. | ||
|
||
**Example:** | ||
|
||
```java | ||
import com.anthropic.client.AnthropicClient; | ||
import com.anthropic.client.okhttp.AnthropicOkHttpClient; | ||
import com.anthropic.vertex.backends.VertexBackend; | ||
import com.google.adk.agents.LlmAgent; | ||
import com.google.adk.models.Claude; // ADK's wrapper for Claude | ||
import com.google.auth.oauth2.GoogleCredentials; | ||
import java.io.IOException; | ||
|
||
// ... other imports | ||
|
||
public class ClaudeVertexAiAgent { | ||
|
||
public static LlmAgent createAgent() throws IOException { | ||
// Model name for Claude 3 Sonnet on Vertex AI (or other versions) | ||
String claudeModelVertexAi = "claude-3-7-sonnet"; // Or any other Claude model | ||
|
||
// Configure the AnthropicOkHttpClient with the VertexBackend | ||
AnthropicClient anthropicClient = AnthropicOkHttpClient.builder() | ||
.backend( | ||
VertexBackend.builder() | ||
.region("us-east5") // Specify your Vertex AI region | ||
.project("your-gcp-project-id") // Specify your GCP Project ID | ||
.googleCredentials(GoogleCredentials.getApplicationDefault()) | ||
.build()) | ||
.build(); | ||
|
||
// Instantiate LlmAgent with the ADK Claude wrapper | ||
LlmAgent agentClaudeVertexAi = LlmAgent.builder() | ||
.model(new Claude(claudeModelVertexAi, anthropicClient)) // Pass the Claude instance | ||
.name("claude_vertexai_agent") | ||
.instruction("You are an assistant powered by Claude 3 Sonnet on Vertex AI.") | ||
// .generateContentConfig(...) // Optional: Add generation config if needed | ||
// ... other agent parameters | ||
.build(); | ||
|
||
return agentClaudeVertexAi; | ||
} | ||
|
||
public static void main(String[] args) { | ||
try { | ||
LlmAgent agent = createAgent(); | ||
System.out.println("Successfully created agent: " + agent.name()); | ||
// Here you would typically set up a Runner and Session to interact with the agent | ||
} catch (IOException e) { | ||
System.err.println("Failed to create agent: " + e.getMessage()); | ||
e.printStackTrace(); | ||
} | ||
} | ||
} | ||
``` |
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
BREAKING CHANGE: Removal of existing content