Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
13 changes: 13 additions & 0 deletions examples/bedrock/fast-agent.config.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,13 @@
# Example minimalfast-agent.config.yaml for Bedrock

# List of supported models: https://docs.aws.amazon.com/bedrock/latest/userguide/models-supported.html
default_model: bedrock.amazon.nova-lite-v1:0

# Bedrock uses the AWS credentials provider chain to authenticate.
# This can be accomplished with aws sso login on local machines,
# or by IAM roles within AWS.
# see https://docs.aws.amazon.com/res/latest/ug/sso-idc.html
bedrock:
region: "us-east-1" # required
profile: "default" # optional, defaults to "default" - only needed if you have multiple profiles.
# Only needed on local machines, not on AWS.
1 change: 1 addition & 0 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,7 @@ dependencies = [
"anthropic>=0.55.0",
"openai>=1.93.0",
"azure-identity>=1.14.0",
"boto3>=1.35.0",
"prompt-toolkit>=3.0.50",
"aiohttp>=3.11.13",
"opentelemetry-instrumentation-openai>=0.40.14; python_version >= '3.10' and python_version < '4.0'",
Expand Down
17 changes: 17 additions & 0 deletions src/mcp_agent/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -253,6 +253,20 @@ class TensorZeroSettings(BaseModel):
model_config = ConfigDict(extra="allow", arbitrary_types_allowed=True)


class BedrockSettings(BaseModel):
"""
Settings for using AWS Bedrock models in the fast-agent application.
"""

region: str | None = None
"""AWS region for Bedrock service"""

profile: str | None = None
"""AWS profile to use for authentication"""

model_config = ConfigDict(extra="allow", arbitrary_types_allowed=True)


class HuggingFaceSettings(BaseModel):
"""
Settings for HuggingFace authentication (used for MCP connections).
Expand Down Expand Up @@ -419,6 +433,9 @@ class Settings(BaseSettings):
aliyun: OpenAISettings | None = None
"""Settings for using Aliyun OpenAI Service in the fast-agent application"""

bedrock: BedrockSettings | None = None
"""Settings for using AWS Bedrock models in the fast-agent application"""

huggingface: HuggingFaceSettings | None = None
"""Settings for HuggingFace authentication (used for MCP connections)"""

Expand Down
9 changes: 9 additions & 0 deletions src/mcp_agent/llm/model_factory.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,7 @@
from mcp_agent.llm.providers.augmented_llm_aliyun import AliyunAugmentedLLM
from mcp_agent.llm.providers.augmented_llm_anthropic import AnthropicAugmentedLLM
from mcp_agent.llm.providers.augmented_llm_azure import AzureOpenAIAugmentedLLM
from mcp_agent.llm.providers.augmented_llm_bedrock import BedrockAugmentedLLM
from mcp_agent.llm.providers.augmented_llm_deepseek import DeepSeekAugmentedLLM
from mcp_agent.llm.providers.augmented_llm_generic import GenericAugmentedLLM
from mcp_agent.llm.providers.augmented_llm_google_native import GoogleNativeAugmentedLLM
Expand Down Expand Up @@ -41,6 +42,7 @@
Type[GoogleNativeAugmentedLLM],
Type[GenericAugmentedLLM],
Type[AzureOpenAIAugmentedLLM],
Type[BedrockAugmentedLLM],
]


Expand Down Expand Up @@ -120,6 +122,7 @@ class ModelFactory:
"qwen-plus": Provider.ALIYUN,
"qwen-max": Provider.ALIYUN,
"qwen-long": Provider.ALIYUN,

}

MODEL_ALIASES = {
Expand Down Expand Up @@ -155,6 +158,7 @@ class ModelFactory:
Provider.TENSORZERO: TensorZeroAugmentedLLM,
Provider.AZURE: AzureOpenAIAugmentedLLM,
Provider.ALIYUN: AliyunAugmentedLLM,
Provider.BEDROCK: BedrockAugmentedLLM,
}

# Mapping of special model names to their specific LLM classes
Expand Down Expand Up @@ -209,6 +213,11 @@ def parse_model_string(cls, model_string: str) -> ModelConfig:
# If provider still None, try to get from DEFAULT_PROVIDERS using the model_name_str
if provider is None:
provider = cls.DEFAULT_PROVIDERS.get(model_name_str)

# If still None, try pattern matching for Bedrock models
if provider is None and BedrockAugmentedLLM.matches_model_pattern(model_name_str):
provider = Provider.BEDROCK

if provider is None:
raise ModelConfigError(
f"Unknown model or provider for: {model_string}. Model name parsed as '{model_name_str}'"
Expand Down
1 change: 1 addition & 0 deletions src/mcp_agent/llm/provider_types.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,3 +27,4 @@ def __new__(cls, config_name, display_name=None):
ALIYUN = ("aliyun", "Aliyun") # Aliyun Bailian OpenAI Service
HUGGINGFACE = ("huggingface", "HuggingFace") # For HuggingFace MCP connections
XAI = ("xai", "XAI") # For xAI Grok models
BEDROCK = ("bedrock", "Bedrock")
Loading
Loading