Skip to content

Commit

Permalink
feat: Add support for 01 model platform (#1093)
Browse files Browse the repository at this point in the history
Co-authored-by: Wendong-Fan <133094783+Wendong-Fan@users.noreply.github.com>
Co-authored-by: Wendong <w3ndong.fan@gmail.com>
  • Loading branch information
3 people authored Oct 30, 2024
1 parent d176883 commit aac3fa2
Show file tree
Hide file tree
Showing 21 changed files with 402 additions and 108 deletions.
3 changes: 3 additions & 0 deletions camel/configs/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,7 @@
)
from .togetherai_config import TOGETHERAI_API_PARAMS, TogetherAIConfig
from .vllm_config import VLLM_API_PARAMS, VLLMConfig
from .yi_config import YI_API_PARAMS, YiConfig
from .zhipuai_config import ZHIPUAI_API_PARAMS, ZhipuAIConfig

__all__ = [
Expand Down Expand Up @@ -58,4 +59,6 @@
'SAMBA_CLOUD_API_PARAMS',
'TogetherAIConfig',
'TOGETHERAI_API_PARAMS',
'YiConfig',
'YI_API_PARAMS',
]
58 changes: 58 additions & 0 deletions camel/configs/yi_config.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,58 @@
# =========== Copyright 2023 @ CAMEL-AI.org. All Rights Reserved. ===========
# Licensed under the Apache License, Version 2.0 (the “License”);
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an “AS IS” BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# =========== Copyright 2023 @ CAMEL-AI.org. All Rights Reserved. ===========
from __future__ import annotations

from typing import Optional, Union

from camel.configs.base_config import BaseConfig
from camel.types import NOT_GIVEN, NotGiven


class YiConfig(BaseConfig):
r"""Defines the parameters for generating chat completions using the
Yi API. You can refer to the following link for more details:
https://platform.lingyiwanwu.com/docs/api-reference
Args:
tool_choice (Union[dict[str, str], str], optional): Controls which (if
any) tool is called by the model. :obj:`"none"` means the model
will not call any tool and instead generates a message.
:obj:`"auto"` means the model can pick between generating a
message or calling one or more tools. :obj:`"required"` or
specifying a particular tool via
{"type": "function", "function": {"name": "some_function"}}
can be used to guide the model to use tools more strongly.
(default: :obj:`None`)
max_tokens (int, optional): Specifies the maximum number of tokens
the model can generate. This sets an upper limit, but does not
guarantee that this number will always be reached.
(default: :obj:`5000`)
top_p (float, optional): Controls the randomness of the generated
results. Lower values lead to less randomness, while higher
values increase randomness. (default: :obj:`0.9`)
temperature (float, optional): Controls the diversity and focus of
the generated results. Lower values make the output more focused,
while higher values make it more diverse. (default: :obj:`0.3`)
stream (bool, optional): If True, enables streaming output.
(default: :obj:`False`)
"""

tool_choice: Optional[Union[dict[str, str], str]] = None
max_tokens: Union[int, NotGiven] = NOT_GIVEN
top_p: float = 0.9
temperature: float = 0.3
stream: bool = False


YI_API_PARAMS = {param for param in YiConfig.model_fields.keys()}
2 changes: 2 additions & 0 deletions camel/models/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,6 +29,7 @@
from .stub_model import StubModel
from .togetherai_model import TogetherAIModel
from .vllm_model import VLLMModel
from .yi_model import YiModel
from .zhipuai_model import ZhipuAIModel

__all__ = [
Expand All @@ -51,4 +52,5 @@
'RekaModel',
'SambaModel',
'TogetherAIModel',
'YiModel',
]
3 changes: 3 additions & 0 deletions camel/models/model_factory.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,7 @@
from camel.models.stub_model import StubModel
from camel.models.togetherai_model import TogetherAIModel
from camel.models.vllm_model import VLLMModel
from camel.models.yi_model import YiModel
from camel.models.zhipuai_model import ZhipuAIModel
from camel.types import ModelPlatformType, ModelType, UnifiedModelType
from camel.utils import BaseTokenCounter
Expand Down Expand Up @@ -108,6 +109,8 @@ def create(
model_class = RekaModel
elif model_type == ModelType.STUB:
model_class = StubModel
elif model_platform.is_yi and model_type.is_yi:
model_class = YiModel

if model_class is None:
raise ValueError(
Expand Down
138 changes: 138 additions & 0 deletions camel/models/yi_model.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,138 @@
# =========== Copyright 2023 @ CAMEL-AI.org. All Rights Reserved. ===========
# Licensed under the Apache License, Version 2.0 (the “License”);
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an “AS IS” BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# =========== Copyright 2023 @ CAMEL-AI.org. All Rights Reserved. ===========

import os
from typing import Any, Dict, List, Optional, Union

from openai import OpenAI, Stream

from camel.configs import YI_API_PARAMS, YiConfig
from camel.messages import OpenAIMessage
from camel.models import BaseModelBackend
from camel.types import (
ChatCompletion,
ChatCompletionChunk,
ModelType,
)
from camel.utils import (
BaseTokenCounter,
OpenAITokenCounter,
api_keys_required,
)


class YiModel(BaseModelBackend):
r"""Yi API in a unified BaseModelBackend interface.
Args:
model_type (Union[ModelType, str]): Model for which a backend is
created, one of Yi series.
model_config_dict (Optional[Dict[str, Any]], optional): A dictionary
that will be fed into:obj:`openai.ChatCompletion.create()`. If
:obj:`None`, :obj:`YiConfig().as_dict()` will be used.
(default: :obj:`None`)
api_key (Optional[str], optional): The API key for authenticating with
the Yi service. (default: :obj:`None`)
url (Optional[str], optional): The url to the Yi service.
(default: :obj:`https://api.lingyiwanwu.com/v1`)
token_counter (Optional[BaseTokenCounter], optional): Token counter to
use for the model. If not provided, :obj:`OpenAITokenCounter(
ModelType.GPT_4O_MINI)` will be used.
(default: :obj:`None`)
"""

def __init__(
self,
model_type: Union[ModelType, str],
model_config_dict: Optional[Dict[str, Any]] = None,
api_key: Optional[str] = None,
url: Optional[str] = None,
token_counter: Optional[BaseTokenCounter] = None,
) -> None:
if model_config_dict is None:
model_config_dict = YiConfig().as_dict()
api_key = api_key or os.environ.get("YI_API_KEY")
url = url or os.environ.get(
"YI_API_BASE_URL", "https://api.lingyiwanwu.com/v1"
)
super().__init__(
model_type, model_config_dict, api_key, url, token_counter
)
self._client = OpenAI(
timeout=60,
max_retries=3,
api_key=self._api_key,
base_url=self._url,
)

@api_keys_required("YI_API_KEY")
def run(
self,
messages: List[OpenAIMessage],
) -> Union[ChatCompletion, Stream[ChatCompletionChunk]]:
r"""Runs inference of Yi chat completion.
Args:
messages (List[OpenAIMessage]): Message list with the chat history
in OpenAI API format.
Returns:
Union[ChatCompletion, Stream[ChatCompletionChunk]]:
`ChatCompletion` in the non-stream mode, or
`Stream[ChatCompletionChunk]` in the stream mode.
"""
response = self._client.chat.completions.create(
messages=messages,
model=self.model_type,
**self.model_config_dict,
)
return response

@property
def token_counter(self) -> BaseTokenCounter:
r"""Initialize the token counter for the model backend.
Returns:
OpenAITokenCounter: The token counter following the model's
tokenization style.
"""

if not self._token_counter:
self._token_counter = OpenAITokenCounter(ModelType.GPT_4O_MINI)
return self._token_counter

def check_model_config(self):
r"""Check whether the model configuration contains any
unexpected arguments to Yi API.
Raises:
ValueError: If the model configuration dictionary contains any
unexpected arguments to Yi API.
"""
for param in self.model_config_dict:
if param not in YI_API_PARAMS:
raise ValueError(
f"Unexpected argument `{param}` is "
"input into Yi model backend."
)

@property
def stream(self) -> bool:
r"""Returns whether the model is in stream mode, which sends partial
results each time.
Returns:
bool: Whether the model is in stream mode.
"""
return self.model_config_dict.get('stream', False)
45 changes: 45 additions & 0 deletions camel/types/enums.py
Original file line number Diff line number Diff line change
Expand Up @@ -87,6 +87,17 @@ class ModelType(UnifiedModelType, Enum):
REKA_FLASH = "reka-flash"
REKA_EDGE = "reka-edge"

# Yi models (01-ai)
YI_LIGHTNING = "yi-lightning"
YI_LARGE = "yi-large"
YI_MEDIUM = "yi-medium"
YI_LARGE_TURBO = "yi-large-turbo"
YI_VISION = "yi-vision"
YI_MEDIUM_200K = "yi-medium-200k"
YI_SPARK = "yi-spark"
YI_LARGE_RAG = "yi-large-rag"
YI_LARGE_FC = "yi-large-fc"

def __str__(self):
return self.value

Expand Down Expand Up @@ -220,6 +231,25 @@ def is_reka(self) -> bool:
ModelType.REKA_FLASH,
}

@property
def is_yi(self) -> bool:
r"""Returns whether this type of models is Yi model.
Returns:
bool: Whether this type of models is Yi.
"""
return self in {
ModelType.YI_LIGHTNING,
ModelType.YI_LARGE,
ModelType.YI_MEDIUM,
ModelType.YI_LARGE_TURBO,
ModelType.YI_VISION,
ModelType.YI_MEDIUM_200K,
ModelType.YI_SPARK,
ModelType.YI_LARGE_RAG,
ModelType.YI_LARGE_FC,
}

@property
def token_limit(self) -> int:
r"""Returns the maximum token limit for a given model.
Expand Down Expand Up @@ -249,13 +279,21 @@ def token_limit(self) -> int:
return 8_192
elif self in {
ModelType.GPT_3_5_TURBO,
ModelType.YI_LIGHTNING,
ModelType.YI_MEDIUM,
ModelType.YI_LARGE_TURBO,
ModelType.YI_VISION,
ModelType.YI_SPARK,
ModelType.YI_LARGE_RAG,
}:
return 16_384
elif self in {
ModelType.MISTRAL_CODESTRAL,
ModelType.MISTRAL_7B,
ModelType.MISTRAL_MIXTRAL_8x7B,
ModelType.GROQ_MIXTRAL_8_7B,
ModelType.YI_LARGE,
ModelType.YI_LARGE_FC,
}:
return 32_768
elif self in {ModelType.MISTRAL_MIXTRAL_8x22B}:
Expand Down Expand Up @@ -290,6 +328,7 @@ def token_limit(self) -> int:
ModelType.CLAUDE_3_SONNET,
ModelType.CLAUDE_3_HAIKU,
ModelType.CLAUDE_3_5_SONNET,
ModelType.YI_MEDIUM_200K,
}:
return 200_000
elif self in {
Expand Down Expand Up @@ -445,6 +484,7 @@ class ModelPlatformType(Enum):
TOGETHER = "together"
OPENAI_COMPATIBLE_MODEL = "openai-compatible-model"
SAMBA = "samba-nova"
YI = "lingyiwanwu"

@property
def is_openai(self) -> bool:
Expand Down Expand Up @@ -517,6 +557,11 @@ def is_samba(self) -> bool:
r"""Returns whether this platform is Samba Nova."""
return self is ModelPlatformType.SAMBA

@property
def is_yi(self) -> bool:
r"""Returns whether this platform is Yi."""
return self is ModelPlatformType.YI


class AudioModelType(Enum):
TTS_1 = "tts-1"
Expand Down
5 changes: 5 additions & 0 deletions camel/types/unified_model_type.py
Original file line number Diff line number Diff line change
Expand Up @@ -98,6 +98,11 @@ def is_reka(self) -> bool:
r"""Returns whether the model is a Reka model."""
return True

@property
def is_yi(self) -> bool:
r"""Returns whether the model is a Yi model."""
return True

@property
def support_native_tool_calling(self) -> bool:
r"""Returns whether the model supports native tool calling."""
Expand Down
9 changes: 9 additions & 0 deletions docs/key_modules/models.md
Original file line number Diff line number Diff line change
Expand Up @@ -41,6 +41,15 @@ The following table lists currently supported model platforms by CAMEL.
| Anthropic | claude-2.0 | N |
| Gemini | gemini-1.5-pro | Y |
| Gemini | ggemini-1.5-flash | Y |
| Lingyiwanwu | yi-lightning | N |
| Lingyiwanwu | yi-large | N |
| Lingyiwanwu | yi-medium | N |
| Lingyiwanwu | yi-large-turbo | N |
| Lingyiwanwu | yi-vision | Y |
| Lingyiwanwu | yi-medium-200k | N |
| Lingyiwanwu | yi-spark | N |
| Lingyiwanwu | yi-large-rag | N |
| Lingyiwanwu | yi-large-fc | N |
| ZhipuAI | glm-4v | Y |
| ZhipuAI | glm-4 | N |
| ZhipuAI | glm-3-turbo | N |
Expand Down
13 changes: 3 additions & 10 deletions examples/models/azure_openai_model_example.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,6 @@
# =========== Copyright 2023 @ CAMEL-AI.org. All Rights Reserved. ===========
from camel.agents import ChatAgent
from camel.configs import ChatGPTConfig
from camel.messages import BaseMessage
from camel.models import ModelFactory
from camel.types import ModelPlatformType, ModelType

Expand All @@ -32,19 +31,13 @@
)

# Define system message
sys_msg = BaseMessage.make_assistant_message(
role_name="Assistant",
content="You are a helpful assistant.",
)
sys_msg = "You are a helpful assistant."

# Set agent
camel_agent = ChatAgent(system_message=sys_msg, model=model)

user_msg = BaseMessage.make_user_message(
role_name="User",
content="""Say hi to CAMEL AI, one open-source community dedicated to the
study of autonomous and communicative agents.""",
)
user_msg = """Say hi to CAMEL AI, one open-source community dedicated to the
study of autonomous and communicative agents."""

# Get response information
response = camel_agent.step(user_msg)
Expand Down
Loading

0 comments on commit aac3fa2

Please sign in to comment.