Skip to content

Commit

Permalink
Merge branch 'master' into feat/runtime
Browse files Browse the repository at this point in the history
  • Loading branch information
Wendong-Fan committed Nov 22, 2024
2 parents 75c29de + abfa406 commit 5f27670
Show file tree
Hide file tree
Showing 28 changed files with 1,743 additions and 713 deletions.
78 changes: 78 additions & 0 deletions .env
Original file line number Diff line number Diff line change
@@ -0,0 +1,78 @@
# To use these environment variables:
# 1. Populate the .env file with your API keys.
# 2. Include the following code snippet in your Python script:
# from dotenv import load_dotenv
# import os
#
# load_dotenv() # Load environment variables from .env file

#===========================================
# Models API
#===========================================

# OpenAI API (https://platform.openai.com/signup)
OPENAI_API_KEY="Fill your API key here"

# Anthropic API (https://www.anthropic.com/)
ANTHROPIC_API_KEY="Fill your API key here"

# Groq API (https://groq.com/)
GROQ_API_KEY="Fill your API key here"

# Cohere API (https://cohere.ai/)
COHERE_API_KEY="Fill your API key here"

# Hugging Face API (https://huggingface.co/join)
HF_TOKEN="Fill your API key here"

# Azure OpenAI API (https://azure.microsoft.com/products/cognitive-services/openai-service/)
AZURE_OPENAI_API_KEY="Fill your API key here"
AZURE_API_VERSION="Fill your API Version here"
AZURE_DEPLOYMENT_NAME="Fill your Deployment Name here"
AZURE_OPENAI_BASE_URL="Fill your Base URL here"

# Mistral API (https://mistral.ai/)
MISTRAL_API_KEY="Fill your API key here"

# Reka API (https://www.reka.ai/)
REKA_API_KEY="Fill your API key here"

# Zhipu AI API (https://www.zhipu.ai/)
ZHIPUAI_API_KEY="Fill your API key here"
ZHIPUAI_API_BASE_URL="Fill your Base URL here"

# Qwen API (https://help.aliyun.com/document_detail/611472.html)
QWEN_API_KEY="Fill your API key here"

# LingYi API (https://platform.lingyiwanwu.com/apikeys)
YI_API_KEY="Fill your API key here"

#===========================================
# Tools & Services API
#===========================================

# Google Search API (https://developers.google.com/custom-search/v1/overview)
GOOGLE_API_KEY="Fill your API key here"
SEARCH_ENGINE_ID="Fill your API key here"

# OpenWeatherMap API (https://home.openweathermap.org/users/sign_up)
OPENWEATHERMAP_API_KEY="Fill your API key here"

# NVIDIA API (https://build.nvidia.com/explore/discover)
NVIDIA_API_BASE_URL="Fill your API key here"
NVIDIA_API_KEY="Fill your API key here"

# Neo4j Database (https://neo4j.com/)
NEO4J_URI="Fill your API key here"
NEO4J_USERNAME="Fill your User Name here"
NEO4J_PASSWORD="Fill your Password here"

# Firecrawl API (https://www.firecrawl.dev/)
FIRECRAWL_API_KEY="Fill your API key here"

# AskNews API (https://docs.asknews.app/en/reference)
ASKNEWS_CLIENT_ID="Fill your Client ID here"
ASKNEWS_CLIENT_SECRET="Fill your Client Secret here"

# Chunkr API (https://chunkr.ai/)
CHUNKR_API_KEY="Fill your API key here"
1 change: 0 additions & 1 deletion .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -125,7 +125,6 @@ celerybeat.pid
*.sage.py

# Environments
.env
.venv
env/
venv/
Expand Down
31 changes: 31 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -207,6 +207,37 @@ Replace `<insert your OpenAI API key>` with your actual OpenAI API key in each c

Please note that the environment variable is session-specific. If you open a new terminal window or tab, you will need to set the API key again in that new session.

**For `.env` File:**

To simplify the process of managing API Keys, you can use store information in a `.env` file and load them into your application dynamically.

1. Modify .env file in the root directory of CAMEL and fill the following lines:

```bash
OPENAI_API_KEY=<fill your API KEY here>
```

Replace <fill your API KEY here> with your actual API key.

2. Load the .env file in your Python script: Use the load_dotenv() function from the dotenv module to load the variables from the .env file into the environment. Here's an example:
```python
from dotenv import load_dotenv
import os
# Load environment variables from the .env file
load_dotenv()
```
For more details about the key names in project and how to apply key,
you can refer to [here](https://github.com/camel-ai/camel/.env).
> [!TIP]
> By default, the load_dotenv() function does not overwrite existing environment variables that are already set in your system. It only populates variables that are missing.
If you need to overwrite existing environment variables with the values from your `.env` file, use the `override=True` parameter:
> ```python
> load_dotenv(override=True)
> ```
After setting the OpenAI API key, you can run the `role_playing.py` script. Find tasks for various assistant-user roles [here](https://drive.google.com/file/d/194PPaSTBR07m-PzjS-Ty6KlPLdFIPQDd/view?usp=share_link).
```bash
Expand Down
9 changes: 9 additions & 0 deletions camel/agents/chat_agent.py
Original file line number Diff line number Diff line change
Expand Up @@ -476,6 +476,15 @@ def step(
a boolean indicating whether the chat session has terminated,
and information about the chat session.
"""
if (
self.model_backend.model_config_dict.get("response_format")
and response_format
):
raise ValueError(
"The `response_format` parameter cannot be set both in "
"the model configuration and in the ChatAgent step."
)

if isinstance(input_message, str):
input_message = BaseMessage.make_user_message(
role_name='User', content=input_message
Expand Down
3 changes: 3 additions & 0 deletions camel/configs/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,7 @@
# =========== Copyright 2023 @ CAMEL-AI.org. All Rights Reserved. ===========
from .anthropic_config import ANTHROPIC_API_PARAMS, AnthropicConfig
from .base_config import BaseConfig
from .cohere_config import COHERE_API_PARAMS, CohereConfig
from .gemini_config import Gemini_API_PARAMS, GeminiConfig
from .groq_config import GROQ_API_PARAMS, GroqConfig
from .litellm_config import LITELLM_API_PARAMS, LiteLLMConfig
Expand Down Expand Up @@ -60,6 +61,8 @@
'SAMBA_CLOUD_API_PARAMS',
'TogetherAIConfig',
'TOGETHERAI_API_PARAMS',
'CohereConfig',
'COHERE_API_PARAMS',
'YiConfig',
'YI_API_PARAMS',
'QwenConfig',
Expand Down
76 changes: 76 additions & 0 deletions camel/configs/cohere_config.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,76 @@
# =========== Copyright 2023 @ CAMEL-AI.org. All Rights Reserved. ===========
# Licensed under the Apache License, Version 2.0 (the “License”);
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an “AS IS” BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# =========== Copyright 2023 @ CAMEL-AI.org. All Rights Reserved. ===========
from __future__ import annotations

from typing import List, Optional

from camel.configs.base_config import BaseConfig


class CohereConfig(BaseConfig):
r"""Defines the parameters for generating chat completions using the
Cohere API.
Args:
temperature (float, optional): Sampling temperature to use, between
:obj:`0` and :obj:`2`. Higher values make the output more random,
while lower values make it more focused and deterministic.
(default: :obj:`0.3`)
documents (list, optional): A list of relevant documents that the
model can cite to generate a more accurate reply. Each document is
either a string or document object with content and metadata.
(default: :obj:`None`)
max_tokens (int, optional): The maximum number of tokens the model
will generate as part of the response. (default: :obj:`None`)
stop_sequences (List(str), optional): A list of up to 5 strings that
the model will use to stop generation. If the model generates a
string that matches any of the strings in the list, it will stop
generating tokens and return the generated text up to that point
not including the stop sequence. (default: :obj:`None`)
seed (int, optional): If specified, the backend will make a best
effort to sample tokens deterministically, such that repeated
requests with the same seed and parameters should return the same
result. However, determinism cannot be totally guaranteed.
(default: :obj:`None`)
frequency_penalty (float, optional): Min value of `0.0`, max value of
`1.0`. Used to reduce repetitiveness of generated tokens. The
higher the value, the stronger a penalty is applied to previously
present tokens, proportional to how many times they have already
appeared in the prompt or prior generation. (default: :obj:`0.0`)
presence_penalty (float, optional): Min value of `0.0`, max value of
`1.0`. Used to reduce repetitiveness of generated tokens. Similar
to `frequency_penalty`, except that this penalty is applied
equally to all tokens that have already appeared, regardless of
their exact frequencies. (default: :obj:`0.0`)
k (int, optional): Ensures only the top k most likely tokens are
considered for generation at each step. Min value of `0`, max
value of `500`. (default: :obj:`0`)
p (float, optional): Ensures that only the most likely tokens, with
total probability mass of `p`, are considered for generation at
each step. If both k and p are enabled, `p` acts after `k`. Min
value of `0.01`, max value of `0.99`. (default: :obj:`0.75`)
"""

temperature: Optional[float] = 0.2
documents: Optional[list] = None
max_tokens: Optional[int] = None
stop_sequences: Optional[List[str]] = None
seed: Optional[int] = None
frequency_penalty: Optional[float] = 0.0
presence_penalty: Optional[float] = 0.0
k: Optional[int] = 0
p: Optional[float] = 0.75


COHERE_API_PARAMS = {param for param in CohereConfig().model_fields.keys()}
31 changes: 28 additions & 3 deletions camel/configs/openai_config.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,9 +13,9 @@
# =========== Copyright 2023 @ CAMEL-AI.org. All Rights Reserved. ===========
from __future__ import annotations

from typing import Optional, Sequence, Union
from typing import Any, Optional, Sequence, Type, Union

from pydantic import Field
from pydantic import BaseModel, Field

from camel.configs.base_config import BaseConfig
from camel.types import NOT_GIVEN, NotGiven
Expand Down Expand Up @@ -104,11 +104,36 @@ class ChatGPTConfig(BaseConfig):
stop: Union[str, Sequence[str], NotGiven] = NOT_GIVEN
max_tokens: Union[int, NotGiven] = NOT_GIVEN
presence_penalty: float = 0.0
response_format: Union[dict, NotGiven] = NOT_GIVEN
response_format: Union[Type[BaseModel], dict, NotGiven] = NOT_GIVEN
frequency_penalty: float = 0.0
logit_bias: dict = Field(default_factory=dict)
user: str = ""
tool_choice: Optional[Union[dict[str, str], str]] = None

def as_dict(self) -> dict[str, Any]:
r"""Convert the current configuration to a dictionary.
This method converts the current configuration object to a dictionary
representation, which can be used for serialization or other purposes.
Returns:
dict[str, Any]: A dictionary representation of the current
configuration.
"""
config_dict = self.model_dump()
if self.tools:
from camel.toolkits import FunctionTool

tools_schema = []
for tool in self.tools:
if not isinstance(tool, FunctionTool):
raise ValueError(
f"The tool {tool} should "
"be an instance of `FunctionTool`."
)
tools_schema.append(tool.get_openai_tool_schema())
config_dict["tools"] = NOT_GIVEN
return config_dict


OPENAI_API_PARAMS = {param for param in ChatGPTConfig.model_fields.keys()}
2 changes: 2 additions & 0 deletions camel/models/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,7 @@
from .anthropic_model import AnthropicModel
from .azure_openai_model import AzureOpenAIModel
from .base_model import BaseModelBackend
from .cohere_model import CohereModel
from .gemini_model import GeminiModel
from .groq_model import GroqModel
from .litellm_model import LiteLLMModel
Expand Down Expand Up @@ -42,6 +43,7 @@
'GroqModel',
'StubModel',
'ZhipuAIModel',
'CohereModel',
'ModelFactory',
'LiteLLMModel',
'OpenAIAudioModels',
Expand Down
Loading

0 comments on commit 5f27670

Please sign in to comment.