Skip to content

Commit 628ef63

Browse files
committed
Update documentation to reflect broader model support for parallel tool calling
- Update README to include all GenericChatRequest models (Grok, OpenAI, Mistral) - Update code comments and docstrings - Update error messages with complete model list - Clarify that feature works with GenericChatRequest, not just Meta/Llama
1 parent dcd788b commit 628ef63

File tree

3 files changed

+11
-8
lines changed

3 files changed

+11
-8
lines changed

libs/oci/README.md

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -79,29 +79,29 @@ structured_llm = llm.with_structured_output(Joke)
7979
structured_llm.invoke("Tell me a joke about programming")
8080
```
8181

82-
### 5. Use Parallel Tool Calling (Meta/Llama models only)
82+
### 5. Use Parallel Tool Calling
8383
Enable parallel tool calling to execute multiple tools simultaneously, improving performance for multi-tool workflows.
8484

8585
```python
8686
from langchain_oci import ChatOCIGenAI
8787

8888
# Option 1: Set at class level for all tool bindings
8989
llm = ChatOCIGenAI(
90-
model_id="meta.llama-3.3-70b-instruct",
90+
model_id="meta.llama-3.3-70b-instruct", # Works with Meta, Llama, Grok, OpenAI, Mistral
9191
service_endpoint="https://inference.generativeai.us-chicago-1.oci.oraclecloud.com",
9292
compartment_id="MY_COMPARTMENT_ID",
9393
parallel_tool_calls=True # Enable parallel tool calling
9494
)
9595

9696
# Option 2: Set per-binding
97-
llm = ChatOCIGenAI(model_id="meta.llama-3.3-70b-instruct")
97+
llm = ChatOCIGenAI(model_id="xai.grok-4-fast") # Example with Grok
9898
llm_with_tools = llm.bind_tools(
9999
[get_weather, calculate_tip, get_population],
100100
parallel_tool_calls=True # Tools can execute simultaneously
101101
)
102102
```
103103

104-
<sub>**Note:** Parallel tool calling is only supported for Meta/Llama models. Cohere models will raise an error if this parameter is used.</sub>
104+
<sub>**Note:** Parallel tool calling is supported for all models using GenericChatRequest (Meta, Llama, xAI Grok, OpenAI, Mistral). Cohere models will raise an error if this parameter is used.</sub>
105105

106106

107107
## OCI Data Science Model Deployment Examples

libs/oci/langchain_oci/chat_models/oci_generative_ai.py

Lines changed: 5 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -361,7 +361,8 @@ def messages_to_oci_params(
361361
if kwargs.get("is_parallel_tool_calls"):
362362
raise ValueError(
363363
"Parallel tool calls are not supported for Cohere models. "
364-
"This feature is only available for Meta/Llama models using GenericChatRequest."
364+
"This feature is only available for models using GenericChatRequest "
365+
"(Meta, Llama, xAI Grok, OpenAI, Mistral)."
365366
)
366367

367368
is_force_single_step = kwargs.get("is_force_single_step", False)
@@ -852,7 +853,7 @@ def _should_allow_more_tool_calls(
852853
result["tool_choice"] = self.oci_tool_choice_none()
853854
# else: Allow model to decide (default behavior)
854855

855-
# Add parallel tool calls support for Meta/Llama models
856+
# Add parallel tool calls support (GenericChatRequest models)
856857
if "is_parallel_tool_calls" in kwargs:
857858
result["is_parallel_tool_calls"] = kwargs["is_parallel_tool_calls"]
858859

@@ -1241,7 +1242,8 @@ def bind_tools(
12411242
If True, the model can call multiple tools simultaneously.
12421243
If False, tools are called sequentially.
12431244
If None (default), uses the class-level parallel_tool_calls setting.
1244-
Only supported for Meta/Llama models using GenericChatRequest.
1245+
Supported for models using GenericChatRequest (Meta, Llama, xAI Grok,
1246+
OpenAI, Mistral). Not supported for Cohere models.
12451247
kwargs: Any additional parameters are passed directly to
12461248
:meth:`~langchain_oci.chat_models.oci_generative_ai.ChatOCIGenAI.bind`.
12471249
"""

libs/oci/langchain_oci/llms/oci_generative_ai.py

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -123,7 +123,8 @@ class OCIGenAIBase(BaseModel, ABC):
123123
parallel_tool_calls: bool = False
124124
"""Whether to enable parallel function calling during tool use.
125125
If True, the model can call multiple tools simultaneously.
126-
Only supported for Meta/Llama models using GenericChatRequest.
126+
Supported for all models using GenericChatRequest (Meta, Llama, xAI Grok, OpenAI, Mistral).
127+
Not supported for Cohere models.
127128
Default: False for backward compatibility."""
128129

129130
model_config = ConfigDict(

0 commit comments

Comments
 (0)