Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

feat: update aws_bedrock #1194

Open
wants to merge 12 commits into
base: master
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 4 additions & 0 deletions .env
Original file line number Diff line number Diff line change
Expand Up @@ -90,3 +90,7 @@

# OpenBB Platform API (https://my.openbb.co/app/credentials)
# OPENBB_TOKEN="Fill your API key here"

# AWS API (https://aws.amazon.com/)
# AWS_ACCESS_KEY_ID="Fill your Access Key ID here"
# AWS_SECRET_ACCESS_KEY="Fill your Secret Access Key here"
3 changes: 3 additions & 0 deletions camel/configs/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,7 @@
# ========= Copyright 2023-2024 @ CAMEL-AI.org. All Rights Reserved. =========
from .anthropic_config import ANTHROPIC_API_PARAMS, AnthropicConfig
from .base_config import BaseConfig
from .bedrock_config import BEDROCK_API_PARAMS, BedrockConfig
from .cohere_config import COHERE_API_PARAMS, CohereConfig
from .deepseek_config import DEEPSEEK_API_PARAMS, DeepSeekConfig
from .gemini_config import Gemini_API_PARAMS, GeminiConfig
Expand Down Expand Up @@ -75,6 +76,8 @@
'YI_API_PARAMS',
'QwenConfig',
'QWEN_API_PARAMS',
'BedrockConfig',
'BEDROCK_API_PARAMS',
'DeepSeekConfig',
'DEEPSEEK_API_PARAMS',
'InternLMConfig',
Expand Down
36 changes: 36 additions & 0 deletions camel/configs/bedrock_config.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,36 @@
# ========= Copyright 2023-2024 @ CAMEL-AI.org. All Rights Reserved. =========
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ========= Copyright 2023-2024 @ CAMEL-AI.org. All Rights Reserved. =========
from typing import Optional, Union

from camel.configs.base_config import BaseConfig


class BedrockConfig(BaseConfig):
r"""Defines the parameters for generating chat completions using Bedrock
compatibility.

Args:
maxTokens (int, optional): The maximum number of tokens.
temperatue (float, optional): Controls the randomness of the output.
top_p (float, optional): Use nucleus sampling.
tool_choice (Union[dict[str, str], str], optional): The tool choice.
"""

max_tokens: Optional[int] = 400
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

400 token is quiet limited, could we set this to None?

temperature: Optional[float] = 0.7
top_p: Optional[float] = 0.7
tool_choice: Optional[Union[dict[str, str], str]] = None


BEDROCK_API_PARAMS = {param for param in BedrockConfig.model_fields.keys()}
2 changes: 2 additions & 0 deletions camel/models/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,7 @@
# limitations under the License.
# ========= Copyright 2023-2024 @ CAMEL-AI.org. All Rights Reserved. =========
from .anthropic_model import AnthropicModel
from .aws_bedrock_model import AWSBedrockModel
from .azure_openai_model import AzureOpenAIModel
from .base_model import BaseModelBackend
from .cohere_model import CohereModel
Expand Down Expand Up @@ -66,6 +67,7 @@
'TogetherAIModel',
'YiModel',
'QwenModel',
'AWSBedrockModel',
'ModelProcessingError',
'DeepSeekModel',
'FishAudioModel',
Expand Down
123 changes: 123 additions & 0 deletions camel/models/aws_bedrock_model.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,123 @@
# ========= Copyright 2023-2024 @ CAMEL-AI.org. All Rights Reserved. =========
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ========= Copyright 2023-2024 @ CAMEL-AI.org. All Rights Reserved. =========
import os
from typing import Any, Dict, List, Optional, Union

from openai import OpenAI

from camel.configs import BEDROCK_API_PARAMS, BedrockConfig
from camel.messages import OpenAIMessage
from camel.models.base_model import BaseModelBackend
from camel.types import ChatCompletion, ModelType
from camel.utils import (
BaseTokenCounter,
OpenAITokenCounter,
api_keys_required,
)


class AWSBedrockModel(BaseModelBackend):
r"""AWS Bedrock API in a unified BaseModelBackend interface.

Args:
model_type (Union[ModelType, str]): Model for which a backend is
created.
model_config_dict (Dict[str, Any], optional): A dictionary
that will be fed into:obj:`openai.ChatCompletion.create()`.
If:obj:`None`, :obj:`BedrockConfig().as_dict()` will be used.
(default: :obj:`None`)
api_key (str, optional): The API key for authenticating with
the AWS Bedrock service. (default: :obj:`None`)
url (str, optional): The url to the AWS Bedrock service.
token_counter (BaseTokenCounter, optional): Token counter to
use for the model. If not provided, :obj:`OpenAITokenCounter(
ModelType.GPT_4O_MINI)` will be used.
(default: :obj:`None`)

References:
https://docs.aws.amazon.com/bedrock/latest/APIReference/welcome.html
"""

@api_keys_required(
[
("url", "BEDROCK_API_BASE_URL"),
]
)
def __init__(
self,
model_type: Union[ModelType, str],
model_config_dict: Optional[Dict[str, Any]] = None,
api_key: Optional[str] = None,
url: Optional[str] = None,
token_counter: Optional[BaseTokenCounter] = None,
) -> None:
if model_config_dict is None:
model_config_dict = BedrockConfig().as_dict()
api_key = api_key or os.environ.get("BEDROCK_API_KEY")
url = url or os.environ.get(
"BEDROCK_API_BASE_URL",
)
super().__init__(
model_type, model_config_dict, api_key, url, token_counter
)
self._client = OpenAI(
timeout=180,
max_retries=3,
api_key=self._api_key,
base_url=self._url,
)

def run(self, messages: List[OpenAIMessage]) -> ChatCompletion:
r"""Runs the query to the backend model.

Args:
message (List[OpenAIMessage]): Message list with the chat history
in OpenAI API format.

Returns:
ChatCompletion: The response object in OpenAI's format.
"""
response = self._client.chat.completions.create(
messages=messages,
model=self.model_type,
**self.model_config_dict,
)
return response

@property
def token_counter(self) -> BaseTokenCounter:
r"""Initialize the token counter for the model backend.

Returns:
BaseTokenCounter: The token counter following the model's
tokenization style.
"""
if not self._token_counter:
self._token_counter = OpenAITokenCounter(ModelType.GPT_4O_MINI)
return self._token_counter

def check_model_config(self):
r"""Check whether the input model configuration contains unexpected
arguments.

Raises:
ValueError: If the model configuration dictionary contains any
unexpected argument for this model class.
"""
for param in self.model_config_dict:
if param not in BEDROCK_API_PARAMS:
raise ValueError(
f"Invalid parameter '{param}' in model_config_dict. "
f"Valid parameters are: {BEDROCK_API_PARAMS}"
)
3 changes: 3 additions & 0 deletions camel/models/model_factory.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,7 @@
from typing import Dict, Optional, Type, Union

from camel.models.anthropic_model import AnthropicModel
from camel.models.aws_bedrock_model import AWSBedrockModel
from camel.models.azure_openai_model import AzureOpenAIModel
from camel.models.base_model import BaseModelBackend
from camel.models.cohere_model import CohereModel
Expand Down Expand Up @@ -98,6 +99,8 @@ def create(
model_class = TogetherAIModel
elif model_platform.is_litellm:
model_class = LiteLLMModel
elif model_platform.is_aws_bedrock:
model_class = AWSBedrockModel
elif model_platform.is_nvidia:
model_class = NvidiaModel

Expand Down
6 changes: 6 additions & 0 deletions camel/types/enums.py
Original file line number Diff line number Diff line change
Expand Up @@ -709,6 +709,7 @@ class ModelPlatformType(Enum):
DEEPSEEK = "deepseek"
SGLANG = "sglang"
INTERNLM = "internlm"
AWS_BEDROCK = "aws-bedrock"

@property
def is_openai(self) -> bool:
Expand Down Expand Up @@ -816,6 +817,11 @@ def is_internlm(self) -> bool:
r"""Returns whether this platform is InternLM."""
return self is ModelPlatformType.INTERNLM

@property
def is_aws_bedrock(self) -> bool:
r"""Returns whether this platform is AWS Bedrock."""
return self is ModelPlatformType.AWS_BEDROCK


class AudioModelType(Enum):
TTS_1 = "tts-1"
Expand Down
2 changes: 1 addition & 1 deletion docs/key_modules/models.md
Original file line number Diff line number Diff line change
Expand Up @@ -87,7 +87,7 @@ The following table lists currently supported model platforms by CAMEL.
| Together AI | https://docs.together.ai/docs/chat-models | ----- |
| LiteLLM | https://docs.litellm.ai/docs/providers | ----- |
| SGLang | https://sgl-project.github.io/references/supported_models.html | ----- |

| AWSBedrock | https://docs.aws.amazon.com/bedrock/latest/userguide/models-supported.html | ----- |
## 3. Using Models by API calling

Here is an example code to use a specific model (gpt-4o-mini). If you want to use another model, you can simply change these three parameters: `model_platform`, `model_type`, `model_config_dict` .
Expand Down
42 changes: 42 additions & 0 deletions examples/models/aws_bedrock_model_example.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,42 @@
# ========= Copyright 2023-2024 @ CAMEL-AI.org. All Rights Reserved. =========
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ========= Copyright 2023-2024 @ CAMEL-AI.org. All Rights Reserved. =========
from camel.agents import ChatAgent
from camel.models import ModelFactory
from camel.types import ModelPlatformType

model = ModelFactory.create(
model_platform=ModelPlatformType.AWS_BEDROCK,
model_type="meta.llama3-70b-instruct-v1:0",
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Add values in camel.types.enums.ModelType

)

camel_agent = ChatAgent(model=model)

user_msg = """Say hi to CAMEL AI, one open-source community dedicated to the
study of autonomous and communicative agents."""

response = camel_agent.step(user_msg)
print(response.msgs[0].content)
'''
===============================================================================
Hi CAMEL AI community! It's great to see a dedicated group of individuals
passionate about the study of autonomous and communicative agents. Your
open-source community is a fantastic platform for collaboration, knowledge
sharing, and innovation in this exciting field. I'm happy to interact with you
and provide assistance on any topics related to autonomous agents, natural
language processing, or artificial intelligence in general. Feel free to ask
me any questions, share your projects, or discuss the latest advancements in
the field. Let's explore the possibilities of autonomous and communicative
agents together!
===============================================================================
'''
Loading
Loading