diff --git a/src/agentlab/llm/chat_api.py b/src/agentlab/llm/chat_api.py index eaabf38f..188747ac 100644 --- a/src/agentlab/llm/chat_api.py +++ b/src/agentlab/llm/chat_api.py @@ -553,3 +553,43 @@ def make_model(self): temperature=self.temperature, max_tokens=self.max_new_tokens, ) + + +class BedrockChatModel(AnthropicChatModel): + def __init__( + self, + model_name, + api_key=None, + temperature=0.5, + max_tokens=100, + max_retry=4, + ): + self.model_name = model_name + self.temperature = temperature + self.max_tokens = max_tokens + self.max_retry = max_retry + + if ( + not os.getenv("AWS_REGION") + or not os.getenv("AWS_ACCESS_KEY") + or not os.getenv("AWS_SECRET_KEY") + ): + raise ValueError( + "AWS_REGION, AWS_ACCESS_KEY and AWS_SECRET_KEY must be set in the environment when using BedrockChatModel" + ) + + self.client = anthropic.AnthropicBedrock( + aws_region=os.getenv("AWS_REGION"), + aws_access_key=os.getenv("AWS_ACCESS_KEY"), + aws_secret_key=os.getenv("AWS_SECRET_KEY"), + ) + + +@dataclass +class BedrockModelArgs(BaseModelArgs): + def make_model(self): + return BedrockChatModel( + model_name=self.model_name, + temperature=self.temperature, + max_tokens=self.max_new_tokens, + ) diff --git a/src/agentlab/llm/llm_configs.py b/src/agentlab/llm/llm_configs.py index c1ee458f..e0e12e6b 100644 --- a/src/agentlab/llm/llm_configs.py +++ b/src/agentlab/llm/llm_configs.py @@ -3,6 +3,7 @@ from agentlab.llm.chat_api import ( AnthropicModelArgs, AzureModelArgs, + BedrockModelArgs, OpenAIModelArgs, OpenRouterModelArgs, SelfHostedModelArgs, @@ -209,6 +210,22 @@ max_new_tokens=16_384, temperature=1e-1, ), + # ------------ Anthropic / Bedrock ------------# + "bedrock/claude-3-7-sonnet": BedrockModelArgs( + model_name="us.anthropic.claude-3-7-sonnet-20250219-v1:0", + max_new_tokens=16_384, + temperature=1e-1, + ), + "bedrock/claude-4-0-sonnet": BedrockModelArgs( + model_name="us.anthropic.claude-sonnet-4-20250514-v1:0", + max_new_tokens=16_384, + temperature=1e-1, + ), + "bedrock/claude-4-5-sonnet": BedrockModelArgs( + model_name="us.anthropic.claude-sonnet-4-5-20250929-v1:0", + max_new_tokens=16_384, + temperature=1e-1, + ), # ---------------- OSS LLMs ----------------# "meta-llama/Meta-Llama-3-70B-Instruct": SelfHostedModelArgs( model_name="meta-llama/Meta-Llama-3-70B-Instruct",