Skip to content

Commit

Permalink
update models.md, add yi_model_example
Browse files Browse the repository at this point in the history
  • Loading branch information
MuggleJinx committed Oct 30, 2024
1 parent f7f699e commit 638b971
Show file tree
Hide file tree
Showing 3 changed files with 59 additions and 14 deletions.
17 changes: 3 additions & 14 deletions camel/utils/token_counting.py
Original file line number Diff line number Diff line change
Expand Up @@ -407,7 +407,7 @@ def _convert_response_from_openai_to_mistral(
return mistral_request


# The API does not provide official token counting for Yi models, using the default "cl100k_base" encoding.
# The API does not provide official token counting for Yi models, using the default OpenAI tokenizer.
class YiTokenCounter(BaseTokenCounter):
def __init__(self, model_type: UnifiedModelType):
r"""Constructor for the token counter for Yi models.
Expand All @@ -416,8 +416,7 @@ def __init__(self, model_type: UnifiedModelType):
model_type (UnifiedModelType): Model type for which tokens will be
counted.
"""
self.model_type = model_type
self.encoding = "cl100k_base"
self._internal_tokenizer = OpenAITokenCounter(model_type)

def count_tokens_from_messages(self, messages: List[OpenAIMessage]) -> int:
r"""Count number of tokens in the provided message list using
Expand All @@ -430,14 +429,4 @@ def count_tokens_from_messages(self, messages: List[OpenAIMessage]) -> int:
Returns:
int: Number of tokens in the messages.
"""
num_tokens = 0
for message in messages:
num_tokens += 4 # every message follows <im_start>{role/name}\n{content}<im_end>\n
for key, value in message.items():
num_tokens += len(self.encoding.encode(str(value)))
if key == "name": # if there's a name, the role is omitted
num_tokens -= (
1 # role is always required and always 1 token
)
num_tokens += 2 # every reply is primed with <im_start>assistant
return num_tokens
return self._internal_tokenizer.count_tokens_from_messages(messages)
4 changes: 4 additions & 0 deletions docs/key_modules/models.md
Original file line number Diff line number Diff line change
Expand Up @@ -41,6 +41,10 @@ The following table lists currently supported model platforms by CAMEL.
| Anthropic | claude-2.0 | N |
| Gemini | gemini-1.5-pro | Y |
| Gemini | ggemini-1.5-flash | Y |
| Lingyiwanwu | yi-lightning | N |
| Lingyiwanwu | yi-large | N |
| Lingyiwanwu | yi-medium | N |
| Lingyiwanwu | yi-large-turbo | N |
| ZhipuAI | glm-4v | Y |
| ZhipuAI | glm-4 | N |
| ZhipuAI | glm-3-turbo | N |
Expand Down
52 changes: 52 additions & 0 deletions examples/models/yi_model_example.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,52 @@
# =========== Copyright 2023 @ CAMEL-AI.org. All Rights Reserved. ===========
# Licensed under the Apache License, Version 2.0 (the “License”);
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an “AS IS” BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# =========== Copyright 2023 @ CAMEL-AI.org. All Rights Reserved. ===========

from camel.agents import ChatAgent
from camel.configs import YiConfig
from camel.messages import BaseMessage
from camel.models import ModelFactory
from camel.types import ModelPlatformType, ModelType

model = ModelFactory.create(
model_platform=ModelPlatformType.YI,
model_type=ModelType.YI_LIGHTNING,
model_config_dict=YiConfig(temperature=0.2).as_dict(),
)

# Define system message
sys_msg = BaseMessage.make_assistant_message(
role_name="Assistant",
content="You are a helpful assistant.",
)

# Set agent
camel_agent = ChatAgent(system_message=sys_msg, model=model)

user_msg = BaseMessage.make_user_message(
role_name="User",
content="""Say hi to CAMEL AI, one open-source community
dedicated to the study of autonomous and communicative agents.""",
)

# Get response information
response = camel_agent.step(user_msg)
print(response.msgs[0].content)
'''
===============================================================================
Hello CAMEL AI community! 👋 It's great to connect with an open-source group
dedicated to the fascinating fields of autonomous and communicative agents. If
there's anything you need assistance with or any interesting projects you're
working on, feel free to share. I'm here to help however I can! 😊
===============================================================================
'''

0 comments on commit 638b971

Please sign in to comment.