Skip to content

Commit

Permalink
run pre-commit
Browse files Browse the repository at this point in the history
  • Loading branch information
qingyun-wu committed Dec 7, 2023
1 parent 165e6a7 commit a9cf2f9
Showing 1 changed file with 18 additions and 17 deletions.
35 changes: 18 additions & 17 deletions test/agentchat/test_async.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,6 @@
from test_assistant_agent import KEY_LOC, OAI_CONFIG_LIST



def get_market_news(ind, ind_upper):
data = {
"feed": [
Expand Down Expand Up @@ -45,23 +44,23 @@ def get_market_news(ind, ind_upper):
)
return feeds_summary


@pytest.mark.asyncio
async def test_async_groupchat():

try:
import openai
except ImportError:
return

config_list = autogen.config_list_from_json(OAI_CONFIG_LIST, KEY_LOC)
llm_config={
"timeout": 600,
"cache_seed": 41,
"config_list": config_list,
"temperature": 0,
}

llm_config = {
"timeout": 600,
"cache_seed": 41,
"config_list": config_list,
"temperature": 0,
}

# create an AssistantAgent instance named "assistant"
assistant = autogen.AssistantAgent(
name="assistant",
Expand All @@ -83,12 +82,14 @@ async def test_async_groupchat():
)

groupchat = autogen.GroupChat(agents=[user_proxy, assistant], messages=[], max_round=12)
manager = autogen.GroupChatManager(groupchat=groupchat, llm_config=llm_config,
is_termination_msg=lambda x: "TERMINATE" in x.get("content", ""),
)
await user_proxy.a_initiate_chat(manager,
message="""Have a short conversation with the assistant.""")
assert len(user_proxy.chat_messages)>0
manager = autogen.GroupChatManager(
groupchat=groupchat,
llm_config=llm_config,
is_termination_msg=lambda x: "TERMINATE" in x.get("content", ""),
)
await user_proxy.a_initiate_chat(manager, message="""Have a short conversation with the assistant.""")
assert len(user_proxy.chat_messages) > 0


@pytest.mark.asyncio
async def test_stream():
Expand Down

0 comments on commit a9cf2f9

Please sign in to comment.