Skip to content

Commit

Permalink
Merge branch 'main' into update_userproxy_func
Browse files Browse the repository at this point in the history
  • Loading branch information
skzhang1 committed Feb 26, 2024
2 parents 42cb1fb + 8ec1c3e commit 5a6ad67
Show file tree
Hide file tree
Showing 37 changed files with 1,129 additions and 130 deletions.
5 changes: 5 additions & 0 deletions .github/workflows/build.yml
Original file line number Diff line number Diff line change
Expand Up @@ -42,8 +42,13 @@ jobs:
pip install -e .
python -c "import autogen"
pip install pytest mock
- name: Install optional dependencies for code executors
# code executors auto skip without deps, so only run for python 3.11
if: matrix.python-version == '3.11'
run: |
pip install jupyter-client ipykernel
python -m ipykernel install --user --name python3
pip install -e ".[local-jupyter-exec]"
- name: Set AUTOGEN_USE_DOCKER based on OS
shell: bash
run: |
Expand Down
2 changes: 1 addition & 1 deletion autogen/agentchat/contrib/capabilities/context_handling.py
Original file line number Diff line number Diff line change
Expand Up @@ -46,7 +46,7 @@ def add_to_agent(self, agent: ConversableAgent):
"""
Adds TransformChatHistory capability to the given agent.
"""
agent.register_hook(hookable_method="process_all_messages", hook=self._transform_messages)
agent.register_hook(hookable_method="process_all_messages_before_reply", hook=self._transform_messages)

def _transform_messages(self, messages: List[Dict]) -> List[Dict]:
"""
Expand Down
4 changes: 2 additions & 2 deletions autogen/agentchat/contrib/capabilities/teachability.py
Original file line number Diff line number Diff line change
Expand Up @@ -61,7 +61,7 @@ def add_to_agent(self, agent: ConversableAgent):
self.teachable_agent = agent

# Register a hook for processing the last message.
agent.register_hook(hookable_method="process_last_message", hook=self.process_last_message)
agent.register_hook(hookable_method="process_last_received_message", hook=self.process_last_received_message)

# Was an llm_config passed to the constructor?
if self.llm_config is None:
Expand All @@ -82,7 +82,7 @@ def prepopulate_db(self):
"""Adds a few arbitrary memos to the DB."""
self.memo_store.prepopulate()

def process_last_message(self, text):
def process_last_received_message(self, text):
"""
Appends any relevant memos to the message text, and stores any apparent teachings in new memos.
Uses TextAnalyzerAgent to make decisions about memo storage and retrieval.
Expand Down
30 changes: 20 additions & 10 deletions autogen/agentchat/contrib/gpt_assistant_agent.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
import json
import time
import logging
import copy

from autogen import OpenAIWrapper
from autogen.oai.openai_utils import retrieve_assistants_by_name
Expand Down Expand Up @@ -52,12 +53,26 @@ def __init__(
- verbose (bool): If set to True, enables more detailed output from the assistant thread.
- Other kwargs: Except verbose, others are passed directly to ConversableAgent.
"""

self._verbose = kwargs.pop("verbose", False)
super().__init__(
name=name, system_message=instructions, human_input_mode="NEVER", llm_config=llm_config, **kwargs
)

if llm_config is False:
raise ValueError("llm_config=False is not supported for GPTAssistantAgent.")

# Use AutoGen OpenAIWrapper to create a client
openai_client_cfg = None
model_name = "gpt-4-1106-preview"
if llm_config and llm_config.get("config_list") is not None and len(llm_config["config_list"]) > 0:
openai_client_cfg = llm_config["config_list"][0].copy()
model_name = openai_client_cfg.pop("model", "gpt-4-1106-preview")
model_name = "gpt-4-0125-preview"
openai_client_cfg = copy.deepcopy(llm_config)
# GPTAssistantAgent's azure_deployment param may cause NotFoundError (404) in client.beta.assistants.list()
# See: https://github.com/microsoft/autogen/pull/1721
if openai_client_cfg.get("config_list") is not None and len(openai_client_cfg["config_list"]) > 0:
model_name = openai_client_cfg["config_list"][0].pop("model", "gpt-4-0125-preview")
else:
model_name = openai_client_cfg.pop("model", "gpt-4-0125-preview")

logger.warning("OpenAI client config of GPTAssistantAgent(%s) - model: %s", name, model_name)

oai_wrapper = OpenAIWrapper(**openai_client_cfg)
if len(oai_wrapper._clients) > 1:
Expand Down Expand Up @@ -143,11 +158,6 @@ def __init__(
# Tools are specified but overwrite_tools is False; do not update the assistant's tools
logger.warning("overwrite_tools is False. Using existing tools from assistant API.")

self._verbose = kwargs.pop("verbose", False)
super().__init__(
name=name, system_message=instructions, human_input_mode="NEVER", llm_config=llm_config, **kwargs
)

# lazily create threads
self._openai_threads = {}
self._unread_index = defaultdict(int)
Expand Down
33 changes: 24 additions & 9 deletions autogen/agentchat/conversable_agent.py
Original file line number Diff line number Diff line change
Expand Up @@ -223,7 +223,11 @@ def __init__(

# Registered hooks are kept in lists, indexed by hookable method, to be called in their order of registration.
# New hookable methods should be added to this list as required to support new agent capabilities.
self.hook_lists = {"process_last_message": [], "process_all_messages": []}
self.hook_lists = {
"process_last_received_message": [],
"process_all_messages_before_reply": [],
"process_message_before_send": [],
}

@property
def name(self) -> str:
Expand Down Expand Up @@ -467,6 +471,15 @@ def _append_oai_message(self, message: Union[Dict, str], role, conversation_id:
self._oai_messages[conversation_id].append(oai_message)
return True

def _process_message_before_send(
self, message: Union[Dict, str], recipient: Agent, silent: bool
) -> Union[Dict, str]:
"""Process the message before sending it to the recipient."""
hook_list = self.hook_lists["process_message_before_send"]
for hook in hook_list:
message = hook(message, recipient, silent)
return message

def send(
self,
message: Union[Dict, str],
Expand Down Expand Up @@ -509,6 +522,7 @@ def send(
Returns:
ChatResult: a ChatResult object.
"""
message = self._process_message_before_send(message, recipient, silent)
# When the agent composes and sends the message, the role of the message is "assistant"
# unless it's "function".
valid = self._append_oai_message(message, "assistant", recipient)
Expand Down Expand Up @@ -561,6 +575,7 @@ async def a_send(
Returns:
ChatResult: an ChatResult object.
"""
message = self._process_message_before_send(message, recipient, silent)
# When the agent composes and sends the message, the role of the message is "assistant"
# unless it's "function".
valid = self._append_oai_message(message, "assistant", recipient)
Expand Down Expand Up @@ -1634,11 +1649,11 @@ def generate_reply(

# Call the hookable method that gives registered hooks a chance to process all messages.
# Message modifications do not affect the incoming messages or self._oai_messages.
messages = self.process_all_messages(messages)
messages = self.process_all_messages_before_reply(messages)

# Call the hookable method that gives registered hooks a chance to process the last message.
# Message modifications do not affect the incoming messages or self._oai_messages.
messages = self.process_last_message(messages)
messages = self.process_last_received_message(messages)

for reply_func_tuple in self._reply_func_list:
reply_func = reply_func_tuple["reply_func"]
Expand Down Expand Up @@ -1695,11 +1710,11 @@ async def a_generate_reply(

# Call the hookable method that gives registered hooks a chance to process all messages.
# Message modifications do not affect the incoming messages or self._oai_messages.
messages = self.process_all_messages(messages)
messages = self.process_all_messages_before_reply(messages)

# Call the hookable method that gives registered hooks a chance to process the last message.
# Message modifications do not affect the incoming messages or self._oai_messages.
messages = self.process_last_message(messages)
messages = self.process_last_received_message(messages)

for reply_func_tuple in self._reply_func_list:
reply_func = reply_func_tuple["reply_func"]
Expand Down Expand Up @@ -2336,11 +2351,11 @@ def register_hook(self, hookable_method: str, hook: Callable):
assert hook not in hook_list, f"{hook} is already registered as a hook."
hook_list.append(hook)

def process_all_messages(self, messages: List[Dict]) -> List[Dict]:
def process_all_messages_before_reply(self, messages: List[Dict]) -> List[Dict]:
"""
Calls any registered capability hooks to process all messages, potentially modifying the messages.
"""
hook_list = self.hook_lists["process_all_messages"]
hook_list = self.hook_lists["process_all_messages_before_reply"]
# If no hooks are registered, or if there are no messages to process, return the original message list.
if len(hook_list) == 0 or messages is None:
return messages
Expand All @@ -2351,14 +2366,14 @@ def process_all_messages(self, messages: List[Dict]) -> List[Dict]:
processed_messages = hook(processed_messages)
return processed_messages

def process_last_message(self, messages):
def process_last_received_message(self, messages):
"""
Calls any registered capability hooks to use and potentially modify the text of the last message,
as long as the last message is not a function call or exit command.
"""

# If any required condition is not met, return the original message list.
hook_list = self.hook_lists["process_last_message"]
hook_list = self.hook_lists["process_last_received_message"]
if len(hook_list) == 0:
return messages # No hooks registered.
if messages is None:
Expand Down
34 changes: 33 additions & 1 deletion autogen/agentchat/groupchat.py
Original file line number Diff line number Diff line change
Expand Up @@ -58,7 +58,8 @@ class GroupChat:
Must be supplied if `allowed_or_disallowed_speaker_transitions` is not None.
- enable_clear_history: enable possibility to clear history of messages for agents manually by providing
"clear history" phrase in user prompt. This is experimental feature.
See description of `GroupChatManager.clear_agents_history` function for more info.
See description of GroupChatManager.clear_agents_history function for more info.
- send_introductions: send a round of introductions at the start of the group chat, so agents know who they can speak to (default: False)
"""

agents: List[Agent]
Expand All @@ -71,6 +72,7 @@ class GroupChat:
allowed_or_disallowed_speaker_transitions: Optional[Dict] = None
speaker_transitions_type: Optional[str] = None
enable_clear_history: Optional[bool] = False
send_introductions: Optional[bool] = False

_VALID_SPEAKER_SELECTION_METHODS = ["auto", "manual", "random", "round_robin"]
_VALID_SPEAKER_TRANSITIONS_TYPE = ["allowed", "disallowed", None]
Expand Down Expand Up @@ -229,6 +231,16 @@ def select_speaker_prompt(self, agents: Optional[List[Agent]] = None) -> str:
agents = self.agents
return f"Read the above conversation. Then select the next role from {[agent.name for agent in agents]} to play. Only return the role."

def introductions_msg(self, agents: Optional[List[Agent]] = None) -> str:
"""Return the system message for selecting the next speaker. This is always the *first* message in the context."""
if agents is None:
agents = self.agents

return f"""Hello everyone. We have assembled a great team today to answer questions and solve tasks. In attendance are:
{self._participant_roles(agents)}
"""

def manual_select_speaker(self, agents: Optional[List[Agent]] = None) -> Union[Agent, None]:
"""Manually select the next speaker."""
if agents is None:
Expand Down Expand Up @@ -535,6 +547,16 @@ def run_chat(
message = messages[-1]
speaker = sender
groupchat = config
send_introductions = getattr(groupchat, "send_introductions", False)

if send_introductions:
# Broadcast the intro
intro = groupchat.introductions_msg()
for agent in groupchat.agents:
self.send(intro, agent, request_reply=False, silent=True)
# NOTE: We do not also append to groupchat.messages,
# since groupchat handles its own introductions

if self.client_cache is not None:
for a in groupchat.agents:
a.previous_cache = a.client_cache
Expand Down Expand Up @@ -598,6 +620,16 @@ async def a_run_chat(
message = messages[-1]
speaker = sender
groupchat = config
send_introductions = getattr(groupchat, "send_introductions", False)

if send_introductions:
# Broadcast the intro
intro = groupchat.introductions_msg()
for agent in groupchat.agents:
self.a_send(intro, agent, request_reply=False, silent=True)
# NOTE: We do not also append to groupchat.messages,
# since groupchat handles its own introductions

if self.client_cache is not None:
for a in groupchat.agents:
a.previous_cache = a.client_cache
Expand Down
3 changes: 2 additions & 1 deletion autogen/code_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -229,7 +229,8 @@ def get_powershell_command():
return "pwsh"

except FileNotFoundError:
print("Neither powershell nor pwsh is installed.")
if WIN32:
logging.warning("Neither powershell nor pwsh is installed but it is a Windows OS")
return None


Expand Down
9 changes: 9 additions & 0 deletions autogen/coding/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -92,3 +92,12 @@ def restart(self) -> None:
This method is called when the agent is reset.
"""
... # pragma: no cover


class IPythonCodeResult(CodeResult):
"""(Experimental) A code result class for IPython code executor."""

output_files: List[str] = Field(
default_factory=list,
description="The list of files that the executed code blocks generated.",
)
16 changes: 5 additions & 11 deletions autogen/coding/embedded_ipython_code_executor.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
import base64
import json
import os
from pathlib import Path
import re
import uuid
from queue import Empty
Expand All @@ -11,19 +12,10 @@
from pydantic import BaseModel, Field, field_validator

from ..agentchat.agent import LLMAgent
from .base import CodeBlock, CodeExtractor, CodeResult
from .base import CodeBlock, CodeExtractor, IPythonCodeResult
from .markdown_code_extractor import MarkdownCodeExtractor

__all__ = ("EmbeddedIPythonCodeExecutor", "IPythonCodeResult")


class IPythonCodeResult(CodeResult):
"""(Experimental) A code result class for IPython code executor."""

output_files: List[str] = Field(
default_factory=list,
description="The list of files that the executed code blocks generated.",
)
__all__ = "EmbeddedIPythonCodeExecutor"


class EmbeddedIPythonCodeExecutor(BaseModel):
Expand Down Expand Up @@ -126,6 +118,8 @@ def __init__(self, **kwargs: Any):
self._kernel_client = self._kernel_manager.client()
self._kernel_client.start_channels()
self._timeout = self.timeout
self._kernel_name = self.kernel_name
self._output_dir = Path(self.output_dir)

@property
def user_capability(self) -> "EmbeddedIPythonCodeExecutor.UserCapability":
Expand Down
4 changes: 4 additions & 0 deletions autogen/coding/factory.py
Original file line number Diff line number Diff line change
Expand Up @@ -37,5 +37,9 @@ def create(code_execution_config: Dict[str, Any]) -> CodeExecutor:
from .local_commandline_code_executor import LocalCommandlineCodeExecutor

return LocalCommandlineCodeExecutor(**code_execution_config.get("commandline-local", {}))
elif executor == "jupyter-local":
from .jupyter_code_executor import LocalJupyterCodeExecutor

return LocalJupyterCodeExecutor(**code_execution_config.get("jupyter-local", {}))
else:
raise ValueError(f"Unknown code executor {executor}")
5 changes: 5 additions & 0 deletions autogen/coding/jupyter/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
from .base import JupyterConnectable, JupyterConnectionInfo
from .jupyter_client import JupyterClient
from .local_jupyter_server import LocalJupyterServer

__all__ = ["JupyterConnectable", "JupyterConnectionInfo", "JupyterClient", "LocalJupyterServer"]
21 changes: 21 additions & 0 deletions autogen/coding/jupyter/base.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,21 @@
from dataclasses import dataclass
from typing import Optional, Protocol, runtime_checkable


@dataclass
class JupyterConnectionInfo:
"""(Experimental)"""

host: str
use_https: bool
port: int
token: Optional[str]


@runtime_checkable
class JupyterConnectable(Protocol):
"""(Experimental)"""

@property
def connection_info(self) -> JupyterConnectionInfo:
pass
Loading

0 comments on commit 5a6ad67

Please sign in to comment.