diff --git a/reference/python/uv.lock b/reference/python/uv.lock
index 217ab9827d..2e27aea176 100644
--- a/reference/python/uv.lock
+++ b/reference/python/uv.lock
@@ -851,7 +851,7 @@ wheels = [
[[package]]
name = "deepagents"
version = "0.2.5"
-source = { git = "https://github.com/langchain-ai/deepagents.git#9ed6483e1a64617719ba0a90eba0018ae72dabed" }
+source = { git = "https://github.com/langchain-ai/deepagents.git#766c41cf7eb9c413f405aa6f493d87ae137630be" }
dependencies = [
{ name = "langchain" },
{ name = "langchain-anthropic" },
@@ -1998,7 +1998,7 @@ wheels = [
[[package]]
name = "langchain"
version = "1.0.5"
-source = { git = "https://github.com/langchain-ai/langchain.git?subdirectory=libs%2Flangchain_v1#1996d81d72f70c0df87b8965df9b7050e00115f4" }
+source = { git = "https://github.com/langchain-ai/langchain.git?subdirectory=libs%2Flangchain_v1#3dfea96ec1d2dac4e506d287860ee943c183c9f1" }
dependencies = [
{ name = "langchain-core" },
{ name = "langgraph" },
@@ -2008,7 +2008,7 @@ dependencies = [
[[package]]
name = "langchain-anthropic"
version = "1.0.2"
-source = { git = "https://github.com/langchain-ai/langchain.git?subdirectory=libs%2Fpartners%2Fanthropic#1996d81d72f70c0df87b8965df9b7050e00115f4" }
+source = { git = "https://github.com/langchain-ai/langchain.git?subdirectory=libs%2Fpartners%2Fanthropic#3dfea96ec1d2dac4e506d287860ee943c183c9f1" }
dependencies = [
{ name = "anthropic" },
{ name = "langchain-core" },
@@ -2028,7 +2028,7 @@ dependencies = [
[[package]]
name = "langchain-aws"
version = "1.0.0"
-source = { git = "https://github.com/langchain-ai/langchain-aws.git?subdirectory=libs%2Faws#ce8208a12a0adb64c9cbf1ea8c179ad461db225a" }
+source = { git = "https://github.com/langchain-ai/langchain-aws.git?subdirectory=libs%2Faws#357bba09769aeb3597c18bd48ee5c40c7322d999" }
dependencies = [
{ name = "boto3" },
{ name = "langchain-core" },
@@ -2068,7 +2068,7 @@ dependencies = [
[[package]]
name = "langchain-chroma"
version = "1.0.0"
-source = { git = "https://github.com/langchain-ai/langchain.git?subdirectory=libs%2Fpartners%2Fchroma#1996d81d72f70c0df87b8965df9b7050e00115f4" }
+source = { git = "https://github.com/langchain-ai/langchain.git?subdirectory=libs%2Fpartners%2Fchroma#3dfea96ec1d2dac4e506d287860ee943c183c9f1" }
dependencies = [
{ name = "chromadb" },
{ name = "langchain-core" },
@@ -2078,7 +2078,7 @@ dependencies = [
[[package]]
name = "langchain-classic"
version = "1.0.0"
-source = { git = "https://github.com/langchain-ai/langchain.git?subdirectory=libs%2Flangchain#1996d81d72f70c0df87b8965df9b7050e00115f4" }
+source = { git = "https://github.com/langchain-ai/langchain.git?subdirectory=libs%2Flangchain#3dfea96ec1d2dac4e506d287860ee943c183c9f1" }
dependencies = [
{ name = "langchain-core" },
{ name = "langchain-text-splitters" },
@@ -2104,7 +2104,7 @@ dependencies = [
[[package]]
name = "langchain-community"
version = "0.4.1"
-source = { git = "https://github.com/langchain-ai/langchain-community.git?subdirectory=libs%2Fcommunity#47842a5a15902448d20a4052dca9cfe42d61aafa" }
+source = { git = "https://github.com/langchain-ai/langchain-community.git?subdirectory=libs%2Fcommunity#c3a9294d4b9e0e0cb6716e5f4b6198bd4853a08b" }
dependencies = [
{ name = "aiohttp" },
{ name = "httpx-sse" },
@@ -2122,7 +2122,7 @@ dependencies = [
[[package]]
name = "langchain-core"
version = "1.0.4"
-source = { git = "https://github.com/langchain-ai/langchain.git?subdirectory=libs%2Fcore#1996d81d72f70c0df87b8965df9b7050e00115f4" }
+source = { git = "https://github.com/langchain-ai/langchain.git?subdirectory=libs%2Fcore#3dfea96ec1d2dac4e506d287860ee943c183c9f1" }
dependencies = [
{ name = "jsonpatch" },
{ name = "langsmith" },
@@ -2146,7 +2146,7 @@ dependencies = [
[[package]]
name = "langchain-deepseek"
version = "1.0.0"
-source = { git = "https://github.com/langchain-ai/langchain.git?subdirectory=libs%2Fpartners%2Fdeepseek#1996d81d72f70c0df87b8965df9b7050e00115f4" }
+source = { git = "https://github.com/langchain-ai/langchain.git?subdirectory=libs%2Fpartners%2Fdeepseek#3dfea96ec1d2dac4e506d287860ee943c183c9f1" }
dependencies = [
{ name = "langchain-core" },
{ name = "langchain-openai" },
@@ -2155,7 +2155,7 @@ dependencies = [
[[package]]
name = "langchain-exa"
version = "1.0.0"
-source = { git = "https://github.com/langchain-ai/langchain.git?subdirectory=libs%2Fpartners%2Fexa#1996d81d72f70c0df87b8965df9b7050e00115f4" }
+source = { git = "https://github.com/langchain-ai/langchain.git?subdirectory=libs%2Fpartners%2Fexa#3dfea96ec1d2dac4e506d287860ee943c183c9f1" }
dependencies = [
{ name = "exa-py" },
{ name = "langchain-core" },
@@ -2164,7 +2164,7 @@ dependencies = [
[[package]]
name = "langchain-fireworks"
version = "1.0.0"
-source = { git = "https://github.com/langchain-ai/langchain.git?subdirectory=libs%2Fpartners%2Ffireworks#1996d81d72f70c0df87b8965df9b7050e00115f4" }
+source = { git = "https://github.com/langchain-ai/langchain.git?subdirectory=libs%2Fpartners%2Ffireworks#3dfea96ec1d2dac4e506d287860ee943c183c9f1" }
dependencies = [
{ name = "aiohttp" },
{ name = "fireworks-ai" },
@@ -2176,7 +2176,7 @@ dependencies = [
[[package]]
name = "langchain-google-community"
version = "3.0.0"
-source = { git = "https://github.com/langchain-ai/langchain-google.git?subdirectory=libs%2Fcommunity#d26f946cd74a1e0d24b426ed5eb69bbf925f618e" }
+source = { git = "https://github.com/langchain-ai/langchain-google.git?subdirectory=libs%2Fcommunity#1aeba0a097890f2937371c701d1cf756c3538466" }
dependencies = [
{ name = "google-api-core" },
{ name = "google-api-python-client" },
@@ -2189,8 +2189,8 @@ dependencies = [
[[package]]
name = "langchain-google-genai"
-version = "3.0.1"
-source = { git = "https://github.com/langchain-ai/langchain-google.git?subdirectory=libs%2Fgenai#d26f946cd74a1e0d24b426ed5eb69bbf925f618e" }
+version = "3.0.2"
+source = { git = "https://github.com/langchain-ai/langchain-google.git?subdirectory=libs%2Fgenai#1aeba0a097890f2937371c701d1cf756c3538466" }
dependencies = [
{ name = "filetype" },
{ name = "google-ai-generativelanguage" },
@@ -2200,8 +2200,8 @@ dependencies = [
[[package]]
name = "langchain-google-vertexai"
-version = "3.0.2"
-source = { git = "https://github.com/langchain-ai/langchain-google.git?subdirectory=libs%2Fvertexai#d26f946cd74a1e0d24b426ed5eb69bbf925f618e" }
+version = "3.0.3"
+source = { git = "https://github.com/langchain-ai/langchain-google.git?subdirectory=libs%2Fvertexai#1aeba0a097890f2937371c701d1cf756c3538466" }
dependencies = [
{ name = "bottleneck" },
{ name = "google-cloud-aiplatform" },
@@ -2218,7 +2218,7 @@ dependencies = [
[[package]]
name = "langchain-groq"
version = "1.0.0"
-source = { git = "https://github.com/langchain-ai/langchain.git?subdirectory=libs%2Fpartners%2Fgroq#1996d81d72f70c0df87b8965df9b7050e00115f4" }
+source = { git = "https://github.com/langchain-ai/langchain.git?subdirectory=libs%2Fpartners%2Fgroq#3dfea96ec1d2dac4e506d287860ee943c183c9f1" }
dependencies = [
{ name = "groq" },
{ name = "langchain-core" },
@@ -2227,7 +2227,7 @@ dependencies = [
[[package]]
name = "langchain-huggingface"
version = "1.0.1"
-source = { git = "https://github.com/langchain-ai/langchain.git?subdirectory=libs%2Fpartners%2Fhuggingface#1996d81d72f70c0df87b8965df9b7050e00115f4" }
+source = { git = "https://github.com/langchain-ai/langchain.git?subdirectory=libs%2Fpartners%2Fhuggingface#3dfea96ec1d2dac4e506d287860ee943c183c9f1" }
dependencies = [
{ name = "huggingface-hub" },
{ name = "langchain-core" },
@@ -2265,7 +2265,7 @@ dependencies = [
[[package]]
name = "langchain-mistralai"
version = "1.0.1"
-source = { git = "https://github.com/langchain-ai/langchain.git?subdirectory=libs%2Fpartners%2Fmistralai#1996d81d72f70c0df87b8965df9b7050e00115f4" }
+source = { git = "https://github.com/langchain-ai/langchain.git?subdirectory=libs%2Fpartners%2Fmistralai#3dfea96ec1d2dac4e506d287860ee943c183c9f1" }
dependencies = [
{ name = "httpx" },
{ name = "httpx-sse" },
@@ -2276,8 +2276,8 @@ dependencies = [
[[package]]
name = "langchain-model-profiles"
-version = "0.0.3"
-source = { git = "https://github.com/langchain-ai/langchain.git?subdirectory=libs%2Fmodel-profiles#1996d81d72f70c0df87b8965df9b7050e00115f4" }
+version = "0.0.4"
+source = { git = "https://github.com/langchain-ai/langchain.git?subdirectory=libs%2Fmodel-profiles#3dfea96ec1d2dac4e506d287860ee943c183c9f1" }
dependencies = [
{ name = "typing-extensions" },
]
@@ -2296,7 +2296,7 @@ dependencies = [
[[package]]
name = "langchain-nomic"
version = "1.0.0"
-source = { git = "https://github.com/langchain-ai/langchain.git?subdirectory=libs%2Fpartners%2Fnomic#1996d81d72f70c0df87b8965df9b7050e00115f4" }
+source = { git = "https://github.com/langchain-ai/langchain.git?subdirectory=libs%2Fpartners%2Fnomic#3dfea96ec1d2dac4e506d287860ee943c183c9f1" }
dependencies = [
{ name = "langchain-core" },
{ name = "nomic" },
@@ -2306,7 +2306,7 @@ dependencies = [
[[package]]
name = "langchain-ollama"
version = "1.0.0"
-source = { git = "https://github.com/langchain-ai/langchain.git?subdirectory=libs%2Fpartners%2Follama#1996d81d72f70c0df87b8965df9b7050e00115f4" }
+source = { git = "https://github.com/langchain-ai/langchain.git?subdirectory=libs%2Fpartners%2Follama#3dfea96ec1d2dac4e506d287860ee943c183c9f1" }
dependencies = [
{ name = "langchain-core" },
{ name = "ollama" },
@@ -2315,7 +2315,7 @@ dependencies = [
[[package]]
name = "langchain-openai"
version = "1.0.2"
-source = { git = "https://github.com/langchain-ai/langchain.git?subdirectory=libs%2Fpartners%2Fopenai#1996d81d72f70c0df87b8965df9b7050e00115f4" }
+source = { git = "https://github.com/langchain-ai/langchain.git?subdirectory=libs%2Fpartners%2Fopenai#3dfea96ec1d2dac4e506d287860ee943c183c9f1" }
dependencies = [
{ name = "langchain-core" },
{ name = "openai" },
@@ -2325,7 +2325,7 @@ dependencies = [
[[package]]
name = "langchain-perplexity"
version = "1.0.0"
-source = { git = "https://github.com/langchain-ai/langchain.git?subdirectory=libs%2Fpartners%2Fperplexity#1996d81d72f70c0df87b8965df9b7050e00115f4" }
+source = { git = "https://github.com/langchain-ai/langchain.git?subdirectory=libs%2Fpartners%2Fperplexity#3dfea96ec1d2dac4e506d287860ee943c183c9f1" }
dependencies = [
{ name = "langchain-core" },
{ name = "openai" },
@@ -2334,7 +2334,7 @@ dependencies = [
[[package]]
name = "langchain-prompty"
version = "1.0.0"
-source = { git = "https://github.com/langchain-ai/langchain.git?subdirectory=libs%2Fpartners%2Fprompty#1996d81d72f70c0df87b8965df9b7050e00115f4" }
+source = { git = "https://github.com/langchain-ai/langchain.git?subdirectory=libs%2Fpartners%2Fprompty#3dfea96ec1d2dac4e506d287860ee943c183c9f1" }
dependencies = [
{ name = "langchain-core" },
{ name = "pyyaml" },
@@ -2343,7 +2343,7 @@ dependencies = [
[[package]]
name = "langchain-qdrant"
version = "1.1.0"
-source = { git = "https://github.com/langchain-ai/langchain.git?subdirectory=libs%2Fpartners%2Fqdrant#1996d81d72f70c0df87b8965df9b7050e00115f4" }
+source = { git = "https://github.com/langchain-ai/langchain.git?subdirectory=libs%2Fpartners%2Fqdrant#3dfea96ec1d2dac4e506d287860ee943c183c9f1" }
dependencies = [
{ name = "langchain-core" },
{ name = "pydantic" },
@@ -2499,7 +2499,7 @@ dependencies = [
[[package]]
name = "langchain-tests"
version = "1.0.1"
-source = { git = "https://github.com/langchain-ai/langchain.git?subdirectory=libs%2Fstandard-tests#1996d81d72f70c0df87b8965df9b7050e00115f4" }
+source = { git = "https://github.com/langchain-ai/langchain.git?subdirectory=libs%2Fstandard-tests#3dfea96ec1d2dac4e506d287860ee943c183c9f1" }
dependencies = [
{ name = "httpx" },
{ name = "langchain-core" },
@@ -2517,7 +2517,7 @@ dependencies = [
[[package]]
name = "langchain-text-splitters"
version = "1.0.0"
-source = { git = "https://github.com/langchain-ai/langchain.git?subdirectory=libs%2Ftext-splitters#1996d81d72f70c0df87b8965df9b7050e00115f4" }
+source = { git = "https://github.com/langchain-ai/langchain.git?subdirectory=libs%2Ftext-splitters#3dfea96ec1d2dac4e506d287860ee943c183c9f1" }
dependencies = [
{ name = "langchain-core" },
]
@@ -2536,7 +2536,7 @@ dependencies = [
[[package]]
name = "langchain-xai"
version = "1.0.0"
-source = { git = "https://github.com/langchain-ai/langchain.git?subdirectory=libs%2Fpartners%2Fxai#1996d81d72f70c0df87b8965df9b7050e00115f4" }
+source = { git = "https://github.com/langchain-ai/langchain.git?subdirectory=libs%2Fpartners%2Fxai#3dfea96ec1d2dac4e506d287860ee943c183c9f1" }
dependencies = [
{ name = "aiohttp" },
{ name = "langchain-core" },
@@ -2546,8 +2546,8 @@ dependencies = [
[[package]]
name = "langgraph"
-version = "1.0.2"
-source = { git = "https://github.com/langchain-ai/langgraph?subdirectory=libs%2Flanggraph#cf3e8252f5516b312e4738125c503977b0be3a3f" }
+version = "1.0.3"
+source = { git = "https://github.com/langchain-ai/langgraph?subdirectory=libs%2Flanggraph#0d4ac836e3a943a6a807910001f84a1d11b52776" }
dependencies = [
{ name = "langchain-core" },
{ name = "langgraph-checkpoint" },
@@ -2560,7 +2560,7 @@ dependencies = [
[[package]]
name = "langgraph-checkpoint"
version = "3.0.1"
-source = { git = "https://github.com/langchain-ai/langgraph?subdirectory=libs%2Fcheckpoint#cf3e8252f5516b312e4738125c503977b0be3a3f" }
+source = { git = "https://github.com/langchain-ai/langgraph?subdirectory=libs%2Fcheckpoint#0d4ac836e3a943a6a807910001f84a1d11b52776" }
dependencies = [
{ name = "langchain-core" },
{ name = "ormsgpack" },
@@ -2569,7 +2569,7 @@ dependencies = [
[[package]]
name = "langgraph-checkpoint-aws"
version = "1.0.1"
-source = { git = "https://github.com/langchain-ai/langchain-aws.git?subdirectory=libs%2Flanggraph-checkpoint-aws#ce8208a12a0adb64c9cbf1ea8c179ad461db225a" }
+source = { git = "https://github.com/langchain-ai/langchain-aws.git?subdirectory=libs%2Flanggraph-checkpoint-aws#357bba09769aeb3597c18bd48ee5c40c7322d999" }
dependencies = [
{ name = "boto3" },
{ name = "langgraph" },
@@ -2579,7 +2579,7 @@ dependencies = [
[[package]]
name = "langgraph-checkpoint-postgres"
version = "3.0.1"
-source = { git = "https://github.com/langchain-ai/langgraph?subdirectory=libs%2Fcheckpoint-postgres#cf3e8252f5516b312e4738125c503977b0be3a3f" }
+source = { git = "https://github.com/langchain-ai/langgraph?subdirectory=libs%2Fcheckpoint-postgres#0d4ac836e3a943a6a807910001f84a1d11b52776" }
dependencies = [
{ name = "langgraph-checkpoint" },
{ name = "orjson" },
@@ -2590,7 +2590,7 @@ dependencies = [
[[package]]
name = "langgraph-checkpoint-sqlite"
version = "3.0.0"
-source = { git = "https://github.com/langchain-ai/langgraph?subdirectory=libs%2Fcheckpoint-sqlite#cf3e8252f5516b312e4738125c503977b0be3a3f" }
+source = { git = "https://github.com/langchain-ai/langgraph?subdirectory=libs%2Fcheckpoint-sqlite#0d4ac836e3a943a6a807910001f84a1d11b52776" }
dependencies = [
{ name = "aiosqlite" },
{ name = "langgraph-checkpoint" },
@@ -2600,7 +2600,7 @@ dependencies = [
[[package]]
name = "langgraph-prebuilt"
version = "1.0.2"
-source = { git = "https://github.com/langchain-ai/langgraph?subdirectory=libs%2Fprebuilt#cf3e8252f5516b312e4738125c503977b0be3a3f" }
+source = { git = "https://github.com/langchain-ai/langgraph?subdirectory=libs%2Fprebuilt#0d4ac836e3a943a6a807910001f84a1d11b52776" }
dependencies = [
{ name = "langchain-core" },
{ name = "langgraph-checkpoint" },
@@ -2609,7 +2609,7 @@ dependencies = [
[[package]]
name = "langgraph-sdk"
version = "0.2.9"
-source = { git = "https://github.com/langchain-ai/langgraph?subdirectory=libs%2Fsdk-py#cf3e8252f5516b312e4738125c503977b0be3a3f" }
+source = { git = "https://github.com/langchain-ai/langgraph?subdirectory=libs%2Fsdk-py#0d4ac836e3a943a6a807910001f84a1d11b52776" }
dependencies = [
{ name = "httpx" },
{ name = "orjson" },
diff --git a/src/oss/langchain/models.mdx b/src/oss/langchain/models.mdx
index 64aff8f68f..4d0a7405ac 100644
--- a/src/oss/langchain/models.mdx
+++ b/src/oss/langchain/models.mdx
@@ -1048,7 +1048,7 @@ Models can be requested to provide their response in a format matching a given s
- **Method parameter**: Some providers support different methods (`'json_schema'`, `'function_calling'`, `'json_mode'`)
- `'json_schema'` typically refers to dedicated structured output features offered by a provider
- `'function_calling'` derives structured output by forcing a [tool call](#tool-calling) following the given schema
- - `'json_mode'` is a precursor to `'json_schema'` offered by some providers- it generates valid json, but the schema must be described in the prompt
+ - `'json_mode'` is a precursor to `'json_schema'` offered by some providers - it generates valid json, but the schema must be described in the prompt
- **Include raw**: Use `include_raw=True` to get both the parsed output and the raw AI message
- **Validation**: Pydantic models provide automatic validation, while `TypedDict` and JSON Schema require manual validation
diff --git a/src/oss/langchain/structured-output.mdx b/src/oss/langchain/structured-output.mdx
index b471f74bc1..d58f41c95c 100644
--- a/src/oss/langchain/structured-output.mdx
+++ b/src/oss/langchain/structured-output.mdx
@@ -72,7 +72,7 @@ const agent = createAgent({
## Provider strategy
-Some model providers support structured output natively through their APIs (currently only OpenAI and Grok). This is the most reliable method when available.
+Some model providers support structured output natively through their APIs (e.g. OpenAI, Grok, Gemini). This is the most reliable method when available.
To use this strategy, configure a `ProviderStrategy`:
diff --git a/src/oss/langgraph/add-memory.mdx b/src/oss/langgraph/add-memory.mdx
index 6eb2f0817b..46ca3234bf 100644
--- a/src/oss/langgraph/add-memory.mdx
+++ b/src/oss/langgraph/add-memory.mdx
@@ -1396,8 +1396,8 @@ const deleteMessages = (state) => {
When deleting messages, **make sure** that the resulting message history is valid. Check the limitations of the LLM provider you're using. For example:
-* some providers expect message history to start with a `user` message
-* most providers require `assistant` messages with tool calls to be followed by corresponding `tool` result messages.
+* Some providers expect message history to start with a `user` message
+* Most providers require `assistant` messages with tool calls to be followed by corresponding `tool` result messages.
diff --git a/src/oss/python/integrations/chat/google_generative_ai.mdx b/src/oss/python/integrations/chat/google_generative_ai.mdx
index 3338786d5c..7bd797c9e7 100644
--- a/src/oss/python/integrations/chat/google_generative_ai.mdx
+++ b/src/oss/python/integrations/chat/google_generative_ai.mdx
@@ -216,7 +216,7 @@ print(f"Response for video: {response.content}")
### Image generation
-Certain models (such as `gemini-2.5-flash-image`) can generate text and images inline. You need to specify the desired `response_modalities`.
+Certain models (such as `gemini-2.5-flash-image`) can generate text and images inline.
See more information on the [Gemini API docs](https://ai.google.dev/gemini-api/docs/image-generation) for details.
@@ -254,6 +254,28 @@ image_base64 = _get_image_base64(response)
display(Image(data=base64.b64decode(image_base64), width=300))
```
+### Audio generation
+
+Certain models (such as `gemini-2.5-flash-preview-tts`) can generate audio files.
+
+See more information on the [Gemini API docs](https://ai.google.dev/gemini-api/docs/speech-generation) for details.
+
+```python
+from langchain_google_genai import ChatGoogleGenerativeAI
+
+llm = ChatGoogleGenerativeAI(model="gemini-2.5-flash-preview-tts")
+
+response = llm.invoke(
+ "Please say The quick brown fox jumps over the lazy dog",
+ generation_config=dict(response_modalities=["AUDIO"]),
+)
+
+# Base64 encoded binary data of the audio
+wav_data = response.additional_kwargs.get("audio")
+with open("output.wav", "wb") as f:
+ f.write(wav_data)
+```
+
## Tool calling
You can equip the model with tools to call.
@@ -301,39 +323,37 @@ AIMessage(content='The weather in Boston is sunny.', additional_kwargs={}, respo
## Structured output
-Force the model to respond with a specific structure using Pydantic models.
+Force the model to respond with a specific structure. See the [Gemini API docs](https://ai.google.dev/gemini-api/docs/structured-output) for more info.
```python
-from langchain_core.pydantic_v1 import BaseModel, Field
from langchain_google_genai import ChatGoogleGenerativeAI
+from pydantic import BaseModel
+from typing import Literal
-# Define the desired structure
-class Person(BaseModel):
- """Information about a person."""
-
- name: str = Field(..., description="The person's name")
- height_m: float = Field(..., description="The person's height in meters")
-
-
-# Initialize the model
-llm = ChatGoogleGenerativeAI(model="gemini-2.5-flash-lite", temperature=0)
-
-# Method 1: Default function calling approach
-structured_llm_default = llm.with_structured_output(Person)
+class Feedback(BaseModel):
+ sentiment: Literal["positive", "neutral", "negative"]
+ summary: str
-# Method 2: Native JSON schema for better reliability (recommended)
-structured_llm_json = llm.with_structured_output(Person, method="json_schema")
-# Invoke the model with a query asking for structured information
-result = structured_llm_json.invoke(
- "Who was the 16th president of the USA, and how tall was he in meters?"
+llm = ChatGoogleGenerativeAI(model="gemini-2.5-pro")
+structured_llm = llm.with_structured_output(
+ schema=Feedback.model_json_schema(), method="json_schema"
)
-print(result)
+
+response = structured_llm.invoke("The new UI is great!")
+response["sentiment"] # "positive"
+response["summary"] # "The user expresses positive..."
```
-```output
-name='Abraham Lincoln' height_m=1.93
+For streaming structured output, merge dictionaries instead of using `+=`:
+
+```python
+stream = structured_llm.stream("The interface is intuitive and beautiful!")
+full = next(stream)
+for chunk in stream:
+ full.update(chunk) # Merge dictionaries
+print(full) # Complete structured response
```
### Structured output methods
@@ -415,6 +435,25 @@ Code execution result: 4
2*2 is 4.
```
+## Thinking Support
+
+See the [Gemini API docs](https://ai.google.dev/gemini-api/docs/thinking) for more info.
+
+```python
+from langchain_google_genai import ChatGoogleGenerativeAI
+
+llm = ChatGoogleGenerativeAI(
+ model="models/gemini-2.5-flash",
+ thinking_budget=1024
+)
+
+response = llm.invoke("How many O's are in Google? How did you verify your answer?")
+reasoning_score = response.usage_metadata["output_token_details"]["reasoning"]
+
+print("Response:", response.content)
+print("Reasoning tokens used:", reasoning_score)
+```
+
## Safety settings
Gemini models have default safety settings that can be overridden. If you are receiving lots of "Safety Warnings" from your models, you can try tweaking the `safety_settings` attribute of the model. For example, to turn off safety blocking for dangerous content, you can construct your LLM as follows: