Skip to content

Commit

Permalink
Adding support for RPM
Browse files Browse the repository at this point in the history
  • Loading branch information
joaomdmoura committed Jan 14, 2024
1 parent 1954be4 commit 258a8d6
Show file tree
Hide file tree
Showing 11 changed files with 2,267 additions and 83 deletions.
39 changes: 32 additions & 7 deletions src/crewai/agent.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,8 +24,7 @@
CrewAgentOutputParser,
ToolsHandler,
)
from crewai.i18n import I18N
from crewai.prompts import Prompts
from crewai.utilities import I18N, Logger, Prompts, RPMController


class Agent(BaseModel):
Expand All @@ -42,11 +41,14 @@ class Agent(BaseModel):
llm: The language model that will run the agent.
max_iter: Maximum number of iterations for an agent to execute a task.
memory: Whether the agent should have memory or not.
max_rpm: Maximum number of requests per minute for the agent execution to be respected.
verbose: Whether the agent execution should be in verbose mode.
allow_delegation: Whether the agent is allowed to delegate tasks to other agents.
"""

__hash__ = object.__hash__
_logger: Logger = PrivateAttr()
_rpm_controller: RPMController = PrivateAttr(default=None)
_request_within_rpm_limit: Any = PrivateAttr(default=None)

model_config = ConfigDict(arbitrary_types_allowed=True)
Expand All @@ -58,6 +60,10 @@ class Agent(BaseModel):
role: str = Field(description="Role of the agent")
goal: str = Field(description="Objective of the agent")
backstory: str = Field(description="Backstory of the agent")
max_rpm: Optional[int] = Field(
default=None,
description="Maximum number of requests per minute for the agent execution to be respected.",
)
memory: bool = Field(
default=True, description="Whether the agent should have memory or not"
)
Expand Down Expand Up @@ -101,6 +107,15 @@ def _deny_user_set_id(cls, v: Optional[UUID4]) -> None:
"may_not_set_field", "This field is not to be set by the user.", {}
)

@model_validator(mode="after")
def set_private_attrs(self):
self._logger = Logger(self.verbose)
if self.max_rpm and not self._rpm_controller:
self._rpm_controller = RPMController(
max_rpm=self.max_rpm, logger=self._logger
)
return self

@model_validator(mode="after")
def check_agent_executor(self) -> "Agent":
if not self.agent_executor:
Expand Down Expand Up @@ -128,7 +143,7 @@ def execute_task(
tools = tools or self.tools
self.agent_executor.tools = tools

return self.agent_executor.invoke(
result = self.agent_executor.invoke(
{
"input": task,
"tool_names": self.__tools_names(tools),
Expand All @@ -137,14 +152,20 @@ def execute_task(
RunnableConfig(callbacks=[self.tools_handler]),
)["output"]

if self.max_rpm:
self._rpm_controller.stop_rpm_counter()

return result

def set_cache_handler(self, cache_handler) -> None:
self.cache_handler = cache_handler
self.tools_handler = ToolsHandler(cache=self.cache_handler)
self.__create_agent_executor()

def set_request_within_rpm_limit(self, ensure_function) -> None:
self._request_within_rpm_limit = ensure_function
self.__create_agent_executor()
def set_rpm_controller(self, rpm_controller) -> None:
if not self._rpm_controller:
self._rpm_controller = rpm_controller
self.__create_agent_executor()

def __create_agent_executor(self) -> CrewAgentExecutor:
"""Create an agent executor for the agent.
Expand All @@ -164,9 +185,13 @@ def __create_agent_executor(self) -> CrewAgentExecutor:
"verbose": self.verbose,
"handle_parsing_errors": True,
"max_iterations": self.max_iter,
"request_within_rpm_limit": self._request_within_rpm_limit,
}

if self._rpm_controller:
executor_args[
"request_within_rpm_limit"
] = self._rpm_controller.check_or_wait

if self.memory:
summary_memory = ConversationSummaryMemory(
llm=self.llm, input_key="input", memory_key="chat_history"
Expand Down
2 changes: 1 addition & 1 deletion src/crewai/agents/exceptions.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
from langchain_core.exceptions import OutputParserException

from crewai.i18n import I18N
from crewai.utilities import I18N


class TaskRepeatedUsageException(OutputParserException):
Expand Down
2 changes: 1 addition & 1 deletion src/crewai/agents/executor.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,8 +12,8 @@
from langchain_core.utils.input import get_color_mapping

from crewai.agents.cache.cache_hit import CacheHit
from crewai.i18n import I18N
from crewai.tools.cache_tools import CacheTools
from crewai.utilities import I18N


class CrewAgentExecutor(AgentExecutor):
Expand Down
2 changes: 1 addition & 1 deletion src/crewai/agents/output_parser.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@
from crewai.agents.cache import CacheHandler, CacheHit
from crewai.agents.exceptions import TaskRepeatedUsageException
from crewai.agents.tools_handler import ToolsHandler
from crewai.i18n import I18N
from crewai.utilities import I18N

FINAL_ANSWER_ACTION = "Final Answer:"
FINAL_ANSWER_AND_PARSABLE_ACTION_ERROR_MESSAGE = (
Expand Down
82 changes: 25 additions & 57 deletions src/crewai/crew.py
Original file line number Diff line number Diff line change
@@ -1,8 +1,6 @@
import json
import threading
import time
import uuid
from typing import Any, ClassVar, Dict, List, Optional, Union
from typing import Any, Dict, List, Optional, Union

from pydantic import (
UUID4,
Expand All @@ -19,10 +17,10 @@

from crewai.agent import Agent
from crewai.agents.cache import CacheHandler
from crewai.i18n import I18N
from crewai.process import Process
from crewai.task import Task
from crewai.tools.agent_tools import AgentTools
from crewai.utilities import I18N, Logger, RPMController


class Crew(BaseModel):
Expand All @@ -37,23 +35,26 @@ class Crew(BaseModel):
config: Configuration settings for the crew.
cache_handler: Handles caching for the crew's operations.
max_rpm: Maximum number of requests per minute for the crew execution to be respected.
rpm: Current number of requests per minute for the crew execution.
id: A unique identifier for the crew instance.
"""

__hash__ = object.__hash__
_timer: Optional[threading.Timer] = PrivateAttr(default=None)
lock: ClassVar[threading.Lock] = threading.Lock()
rpm: ClassVar[int] = 0
max_rpm: Optional[int] = Field(default=None)
_rpm_controller: RPMController = PrivateAttr()
_logger: Logger = PrivateAttr()
_cache_handler: Optional[InstanceOf[CacheHandler]] = PrivateAttr(
default=CacheHandler()
)
model_config = ConfigDict(arbitrary_types_allowed=True)
tasks: List[Task] = Field(default_factory=list)
agents: List[Agent] = Field(default_factory=list)
process: Process = Field(default=Process.sequential)
verbose: Union[int, bool] = Field(default=0)
config: Optional[Union[Json, Dict[str, Any]]] = Field(default=None)
cache_handler: Optional[InstanceOf[CacheHandler]] = Field(default=CacheHandler())
id: UUID4 = Field(default_factory=uuid.uuid4, frozen=True)
max_rpm: Optional[int] = Field(
default=None,
description="Maximum number of requests per minute for the crew execution to be respected.",
)
language: str = Field(
default="en",
description="Language used for the crew, defaults to English.",
Expand All @@ -74,9 +75,10 @@ def check_config_type(cls, v: Union[Json, Dict[str, Any]]):
return json.loads(v) if isinstance(v, Json) else v

@model_validator(mode="after")
def set_reset_counter(self):
if self.max_rpm:
self._reset_request_count()
def set_private_attrs(self):
self._cache_handler = CacheHandler()
self._logger = Logger(self.verbose)
self._rpm_controller = RPMController(max_rpm=self.max_rpm, logger=self._logger)
return self

@model_validator(mode="after")
Expand All @@ -94,8 +96,8 @@ def check_config(self):

if self.agents:
for agent in self.agents:
agent.set_cache_handler(self.cache_handler)
agent.set_request_within_rpm_limit(self.ensure_request_within_rpm_limit)
agent.set_cache_handler(self._cache_handler)
agent.set_rpm_controller(self._rpm_controller)
return self

def _setup_from_config(self):
Expand All @@ -116,28 +118,9 @@ def _create_task(self, task_config):
del task_config["agent"]
return Task(**task_config, agent=task_agent)

def ensure_request_within_rpm_limit(self):
if not self.max_rpm:
return True

with Crew.lock:
if Crew.rpm < self.max_rpm:
Crew.rpm += 1
return True
self._log("info", "Max RPM reached, waiting for next minute to start.")

return self._wait_for_next_minute()

def _wait_for_next_minute(self):
time.sleep(60)
with Crew.lock:
Crew.rpm = 0
return True

def kickoff(self) -> str:
"""Starts the crew to work on its assigned tasks."""
for agent in self.agents:
agent.cache_handler = self.cache_handler
agent.i18n = I18N(language=self.language)

if self.process == Process.sequential:
Expand All @@ -149,33 +132,18 @@ def _sequential_loop(self) -> str:
for task in self.tasks:
self._prepare_and_execute_task(task)
task_output = task.execute(task_output)
self._log("debug", f"\n[{task.agent.role}] Task output: {task_output}\n\n")
self._stop_timer()
self._logger.log(
"debug", f"[{task.agent.role}] Task output: {task_output}\n\n"
)

if self.max_rpm:
self._rpm_controller.stop_rpm_counter()
return task_output

def _prepare_and_execute_task(self, task):
"""Prepares and logs information about the task being executed."""
if task.agent.allow_delegation:
task.tools += AgentTools(agents=self.agents).tools()

self._log("debug", f"Working Agent: {task.agent.role}")
self._log("info", f"Starting Task: {task.description}")

def _log(self, level, message):
"""Logs a message at the specified verbosity level."""
level_map = {"debug": 1, "info": 2}
verbose_level = (
2 if isinstance(self.verbose, bool) and self.verbose else self.verbose
)
if verbose_level and level_map[level] <= verbose_level:
print(f"\n{message}")

def _stop_timer(self):
if self._timer:
self._timer.cancel()

def _reset_request_count(self):
self._stop_timer()
self._timer = threading.Timer(60.0, self._reset_request_count)
self._timer.start()
Crew.rpm = 0
self._logger.log("debug", f"Working Agent: {task.agent.role}")
self._logger.log("info", f"Starting Task: {task.description}")
2 changes: 1 addition & 1 deletion src/crewai/tools/agent_tools.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@
from pydantic import BaseModel, Field

from crewai.agent import Agent
from crewai.i18n import I18N
from crewai.utilities import I18N


class AgentTools(BaseModel):
Expand Down
Loading

0 comments on commit 258a8d6

Please sign in to comment.