Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Fix for bugs appeared when using function calls with clear history functionality #1531

Merged
merged 27 commits into from
Mar 2, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
27 commits
Select commit Hold shift + click to select a range
af1ad11
resolved errors happening when using function calling and clear history
GregorD1A1 Feb 4, 2024
04593f6
checking in nr_of_messages_to_preserve were provided
GregorD1A1 Feb 4, 2024
5a9ac7b
code formatting
GregorD1A1 Feb 4, 2024
60b7037
test added, dict signature improved
GregorD1A1 Feb 6, 2024
4b6790f
test added, dict signature improved
GregorD1A1 Feb 6, 2024
d07cae3
test added, dict signature improved
GregorD1A1 Feb 6, 2024
73ca7b8
test added, dict signature improved
GregorD1A1 Feb 6, 2024
1b31c96
test added, dict signature improved
GregorD1A1 Feb 6, 2024
09c6a5e
test added, dict signature improved
GregorD1A1 Feb 6, 2024
fef80a5
test added, dict signature improved
GregorD1A1 Feb 6, 2024
d357226
test added, dict signature improved
GregorD1A1 Feb 6, 2024
eaa1899
Test updated
GregorD1A1 Feb 7, 2024
0711f2c
test improved
GregorD1A1 Feb 7, 2024
89644e0
test improved
GregorD1A1 Feb 7, 2024
67c13e6
Merge branch 'main' into history_cleaning_function_call
GregorD1A1 Feb 8, 2024
4241889
Merge branch 'main' into history_cleaning_function_call
GregorD1A1 Feb 9, 2024
5afb67b
Merge branch 'main' into history_cleaning_function_call
GregorD1A1 Feb 13, 2024
88c2efc
comment about preserving additional message added
GregorD1A1 Feb 13, 2024
eba57a6
commentary about clear history called in tool response improved
GregorD1A1 Feb 13, 2024
cd65426
created test for clear hisotry called from tool response
GregorD1A1 Feb 13, 2024
13f9dc7
code formatting
GregorD1A1 Feb 13, 2024
176cfca
Merge branch 'main' into history_cleaning_function_call
sonichi Feb 14, 2024
4a0b73f
added 'USER INTERRUPTED' as internal content of tool response
GregorD1A1 Feb 14, 2024
28ff877
Merge branch 'history_cleaning_function_call' of https://github.com/G…
GregorD1A1 Feb 14, 2024
2975ff6
added separate vatiable 'nr_messages_to_preserve_internal'
GregorD1A1 Feb 22, 2024
025e9f8
Merge branch 'main' into history_cleaning_function_call
GregorD1A1 Feb 27, 2024
698a443
Merge branch 'main' into history_cleaning_function_call
GregorD1A1 Mar 2, 2024
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
12 changes: 11 additions & 1 deletion autogen/agentchat/conversable_agent.py
Original file line number Diff line number Diff line change
Expand Up @@ -1126,8 +1126,18 @@ def clear_history(self, recipient: Optional[Agent] = None, nr_messages_to_preser
if recipient is None:
if nr_messages_to_preserve:
for key in self._oai_messages:
nr_messages_to_preserve_internal = nr_messages_to_preserve
# if breaking history between function call and function response, save function call message
# additionally, otherwise openai will return error
first_msg_to_save = self._oai_messages[key][-nr_messages_to_preserve_internal]
if "tool_responses" in first_msg_to_save:
nr_messages_to_preserve_internal += 1
print(
f"Preserving one more message for {self.name} to not divide history between tool call and "
f"tool response."
)
# Remove messages from history except last `nr_messages_to_preserve` messages.
self._oai_messages[key] = self._oai_messages[key][-nr_messages_to_preserve:]
self._oai_messages[key] = self._oai_messages[key][-nr_messages_to_preserve_internal:]
else:
self._oai_messages.clear()
else:
Expand Down
26 changes: 19 additions & 7 deletions autogen/agentchat/groupchat.py
Original file line number Diff line number Diff line change
Expand Up @@ -596,9 +596,11 @@ def run_chat(
if (
groupchat.enable_clear_history
and isinstance(reply, dict)
and reply["content"]
and "CLEAR HISTORY" in reply["content"].upper()
):
reply["content"] = self.clear_agents_history(reply["content"], groupchat)
reply["content"] = self.clear_agents_history(reply, groupchat)

# The speaker sends the message without requesting a reply
speaker.send(reply, self, request_reply=False)
message = self.last_message(speaker)
Expand Down Expand Up @@ -684,7 +686,7 @@ def _raise_exception_on_async_reply_functions(self) -> None:
for agent in self._groupchat.agents:
agent._raise_exception_on_async_reply_functions()

def clear_agents_history(self, reply: str, groupchat: GroupChat) -> str:
def clear_agents_history(self, reply: dict, groupchat: GroupChat) -> str:
"""Clears history of messages for all agents or selected one. Can preserve selected number of last messages.
That function is called when user manually provide "clear history" phrase in his reply.
When "clear history" is provided, the history of messages for all agents is cleared.
Expand All @@ -696,23 +698,27 @@ def clear_agents_history(self, reply: str, groupchat: GroupChat) -> str:
Phrase "clear history" and optional arguments are cut out from the reply before it passed to the chat.

Args:
reply (str): Admin reply to analyse.
reply (dict): reply message dict to analyze.
groupchat (GroupChat): GroupChat object.
"""
reply_content = reply["content"]
# Split the reply into words
words = reply.split()
words = reply_content.split()
# Find the position of "clear" to determine where to start processing
clear_word_index = next(i for i in reversed(range(len(words))) if words[i].upper() == "CLEAR")
# Extract potential agent name and steps
words_to_check = words[clear_word_index + 2 : clear_word_index + 4]
nr_messages_to_preserve = None
nr_messages_to_preserve_provided = False
agent_to_memory_clear = None

for word in words_to_check:
if word.isdigit():
nr_messages_to_preserve = int(word)
nr_messages_to_preserve_provided = True
elif word[:-1].isdigit(): # for the case when number of messages is followed by dot or other sign
nr_messages_to_preserve = int(word[:-1])
nr_messages_to_preserve_provided = True
else:
for agent in groupchat.agents:
if agent.name == word:
Expand All @@ -721,6 +727,12 @@ def clear_agents_history(self, reply: str, groupchat: GroupChat) -> str:
elif agent.name == word[:-1]: # for the case when agent name is followed by dot or other sign
agent_to_memory_clear = agent
break
# preserve last tool call message if clear history called inside of tool response
if "tool_responses" in reply and not nr_messages_to_preserve:
GregorD1A1 marked this conversation as resolved.
Show resolved Hide resolved
nr_messages_to_preserve = 1
logger.warning(
"The last tool call message will be saved to prevent errors caused by tool response without tool call."
)
# clear history
if agent_to_memory_clear:
if nr_messages_to_preserve:
Expand All @@ -746,7 +758,7 @@ def clear_agents_history(self, reply: str, groupchat: GroupChat) -> str:
agent.clear_history(nr_messages_to_preserve=nr_messages_to_preserve)

# Reconstruct the reply without the "clear history" command and parameters
skip_words_number = 2 + int(bool(agent_to_memory_clear)) + int(bool(nr_messages_to_preserve))
reply = " ".join(words[:clear_word_index] + words[clear_word_index + skip_words_number :])
skip_words_number = 2 + int(bool(agent_to_memory_clear)) + int(nr_messages_to_preserve_provided)
reply_content = " ".join(words[:clear_word_index] + words[clear_word_index + skip_words_number :])

return reply
return reply_content
97 changes: 97 additions & 0 deletions test/agentchat/test_groupchat.py
Original file line number Diff line number Diff line change
Expand Up @@ -779,6 +779,103 @@ def test_clear_agents_history():
{"content": "How you doing?", "name": "sam", "role": "user"},
]

# testing saving tool_call message when clear history going to remove it leaving only tool_response message
agent1.reset()
agent2.reset()
agent3.reset()
# we want to broadcast the message only in the preparation.
groupchat = autogen.GroupChat(agents=[agent1, agent2, agent3], messages=[], max_round=1, enable_clear_history=True)
group_chat_manager = autogen.GroupChatManager(groupchat=groupchat, llm_config=False)
# We want to trigger the broadcast of group chat manager, which requires `request_reply` to be set to True.
agent1.send("dummy message", group_chat_manager, request_reply=True)
agent1.send(
{
"content": None,
"role": "assistant",
"function_call": None,
"tool_calls": [
{"id": "call_test_id", "function": {"arguments": "", "name": "test_tool"}, "type": "function"}
],
},
group_chat_manager,
request_reply=True,
)
agent1.send(
{
"role": "tool",
"tool_responses": [{"tool_call_id": "call_emulated", "role": "tool", "content": "example tool response"}],
"content": "example tool response",
},
group_chat_manager,
request_reply=True,
)
# increase max_round to 3
groupchat.max_round = 3
group_chat_manager = autogen.GroupChatManager(groupchat=groupchat, llm_config=False)
with mock.patch.object(builtins, "input", lambda _: "clear history alice 1. How you doing?"):
agent1.initiate_chat(group_chat_manager, message="hello", clear_history=False)

agent1_history = list(agent1._oai_messages.values())[0]
assert agent1_history == [
{
"tool_calls": [
{"id": "call_test_id", "function": {"arguments": "", "name": "test_tool"}, "type": "function"},
],
"content": None,
"role": "assistant",
},
{
"content": "example tool response",
"tool_responses": [{"tool_call_id": "call_emulated", "role": "tool", "content": "example tool response"}],
"role": "tool",
},
]

# testing clear history called from tool response
agent1.reset()
agent2.reset()
agent3.reset()
agent2 = autogen.ConversableAgent(
"bob",
max_consecutive_auto_reply=10,
human_input_mode="NEVER",
llm_config=False,
default_auto_reply={
"role": "tool",
"tool_responses": [{"tool_call_id": "call_emulated", "role": "tool", "content": "USER INTERRUPTED"}],
"content": "Clear history. How you doing?",
},
)
groupchat = autogen.GroupChat(agents=[agent1, agent2, agent3], messages=[], max_round=1, enable_clear_history=True)
group_chat_manager = autogen.GroupChatManager(groupchat=groupchat, llm_config=False)
agent1.send("dummy message", group_chat_manager, request_reply=True)
agent1.send(
{
"content": None,
"role": "assistant",
"function_call": None,
"tool_calls": [
{"id": "call_test_id", "function": {"arguments": "", "name": "test_tool"}, "type": "function"}
],
},
group_chat_manager,
request_reply=True,
)
groupchat.max_round = 2
group_chat_manager = autogen.GroupChatManager(groupchat=groupchat, llm_config=False)

agent1.initiate_chat(group_chat_manager, message="hello")
agent1_history = list(agent1._oai_messages.values())[0]
assert agent1_history == [
{
"tool_calls": [
{"id": "call_test_id", "function": {"arguments": "", "name": "test_tool"}, "type": "function"},
],
"content": None,
"role": "assistant",
},
]


def test_get_agent_by_name():
def agent(name: str) -> autogen.ConversableAgent:
Expand Down
Loading