Skip to content

Commit

Permalink
Add test for async group chat (microsoft#902)
Browse files Browse the repository at this point in the history
* Add test for async group chat

* run pre-commit

---------

Co-authored-by: Qingyun Wu <qingyun.wu@psu.edu>
  • Loading branch information
2 people authored and rlam3 committed Dec 19, 2023
1 parent 8eba4e8 commit 50b77fe
Showing 1 changed file with 46 additions and 0 deletions.
46 changes: 46 additions & 0 deletions test/agentchat/test_async.py
Original file line number Diff line number Diff line change
Expand Up @@ -45,6 +45,52 @@ def get_market_news(ind, ind_upper):
return feeds_summary


@pytest.mark.asyncio
async def test_async_groupchat():
try:
import openai
except ImportError:
return

config_list = autogen.config_list_from_json(OAI_CONFIG_LIST, KEY_LOC)

llm_config = {
"timeout": 600,
"cache_seed": 41,
"config_list": config_list,
"temperature": 0,
}

# create an AssistantAgent instance named "assistant"
assistant = autogen.AssistantAgent(
name="assistant",
llm_config={
"timeout": 600,
"cache_seed": 41,
"config_list": config_list,
"temperature": 0,
},
system_message="You are a helpful assistant. Reply 'TERMINATE' to end the conversation.",
)
# create a UserProxyAgent instance named "user"
user_proxy = autogen.UserProxyAgent(
name="user",
human_input_mode="NEVER",
max_consecutive_auto_reply=5,
code_execution_config=False,
default_auto_reply=None,
)

groupchat = autogen.GroupChat(agents=[user_proxy, assistant], messages=[], max_round=12)
manager = autogen.GroupChatManager(
groupchat=groupchat,
llm_config=llm_config,
is_termination_msg=lambda x: "TERMINATE" in x.get("content", ""),
)
await user_proxy.a_initiate_chat(manager, message="""Have a short conversation with the assistant.""")
assert len(user_proxy.chat_messages) > 0


@pytest.mark.asyncio
async def test_stream():
try:
Expand Down

0 comments on commit 50b77fe

Please sign in to comment.