Skip to content

Commit

Permalink
Add basic notebook for gptassistant (microsoft#636)
Browse files Browse the repository at this point in the history
* Refactor GPTAssistantAgent constructor to handle
instructions and overwrite_instructions flag

- Ensure that `system_message` is always consistent with `instructions`
- Ensure provided instructions are always used
- Add option to permanently modify the instructions of the assistant

* Improve default behavior

* Add a test; add method to delete assistant

* Add a new test for overwriting instructions

* Add test case for when no instructions are given for existing assistant

* Add pytest markers to test_gpt_assistant.py

* add test in workflow

* update

* fix test_client_stream

* comment out test_hierarchy_

* Add basic gptassistant notebook

- also improve logging in gpt assistant

* Update notebook/agentchat_oai_assistant_twoagents_basic.ipynb

Co-authored-by: Qingyun Wu <qingyun.wu@psu.edu>

---------

Co-authored-by: Chi Wang <wang.chi@microsoft.com>
Co-authored-by: kevin666aa <yrwu000627@gmail.com>
Co-authored-by: Qingyun Wu <qingyun.wu@psu.edu>
  • Loading branch information
4 people committed Nov 12, 2023
1 parent daf0bf7 commit 16dce1f
Show file tree
Hide file tree
Showing 2 changed files with 216 additions and 0 deletions.
2 changes: 2 additions & 0 deletions autogen/agentchat/contrib/gpt_assistant_agent.py
Original file line number Diff line number Diff line change
Expand Up @@ -52,6 +52,7 @@ def __init__(
self._openai_client = oai_wrapper._clients[0]
openai_assistant_id = llm_config.get("assistant_id", None)
if openai_assistant_id is None:
logger.warning("assistant_id was None, creating a new assistant")
# create a new assistant
if instructions is None:
logger.warning(
Expand Down Expand Up @@ -346,4 +347,5 @@ def get_assistant_instructions(self):

def delete_assistant(self):
"""Delete the assistant from OAI assistant API"""
logger.warning("Permanently deleting assistant...")
self._openai_client.beta.assistants.delete(self.assistant_id)
214 changes: 214 additions & 0 deletions notebook/agentchat_oai_assistant_twoagents_basic.ipynb
Original file line number Diff line number Diff line change
@@ -0,0 +1,214 @@
{
"cells": [
{
"cell_type": "markdown",
"metadata": {},
"source": [
"## OpenAI Assistants in AutoGen\n",
"\n",
"This notebook shows a very basic example of the [`GPTAssistantAgent`](https://github.com/microsoft/autogen/blob/main/autogen/agentchat/contrib/gpt_assistant_agent.py#L16C43-L16C43), which is an experimental AutoGen agent class that leverages the [OpenAI Assistant API](https://platform.openai.com/docs/assistants/overview) for conversational capabilities, working with\n",
"`UserProxyAgent` in AutoGen."
]
},
{
"cell_type": "code",
"execution_count": 1,
"metadata": {},
"outputs": [
{
"name": "stderr",
"output_type": "stream",
"text": [
"assistant_id was None, creating a new assistant\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"\u001b[33muser_proxy\u001b[0m (to assistant):\n",
"\n",
"Print hello world\n",
"\n",
"--------------------------------------------------------------------------------\n",
"\u001b[33massistant\u001b[0m (to user_proxy):\n",
"\n",
"```python\n",
"print(\"Hello, World!\")\n",
"```\n",
"\n",
"Please run this Python code to print \"Hello, World!\" to the console.\n",
"\n",
"\n",
"--------------------------------------------------------------------------------\n",
"\u001b[31m\n",
">>>>>>>> EXECUTING CODE BLOCK 0 (inferred language is python)...\u001b[0m\n"
]
},
{
"name": "stderr",
"output_type": "stream",
"text": [
"execute_code was called without specifying a value for use_docker. Since the python docker package is not available, code will be run natively. Note: this fallback behavior is subject to change\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"\u001b[33muser_proxy\u001b[0m (to assistant):\n",
"\n",
"exitcode: 0 (execution succeeded)\n",
"Code output: \n",
"Hello, World!\n",
"\n",
"\n",
"--------------------------------------------------------------------------------\n",
"\u001b[33massistant\u001b[0m (to user_proxy):\n",
"\n",
"The code executed successfully and printed \"Hello, World!\" as expected.\n",
"\n",
"TERMINATE\n",
"\n",
"\n",
"--------------------------------------------------------------------------------\n"
]
}
],
"source": [
"import logging\n",
"import os\n",
" \n",
"from autogen import config_list_from_json\n",
"from autogen import AssistantAgent\n",
"from autogen.agentchat.contrib.gpt_assistant_agent import GPTAssistantAgent\n",
"from autogen import UserProxyAgent\n",
"\n",
"logger = logging.getLogger(__name__)\n",
"logger.setLevel(logging.WARNING)\n",
"\n",
"assistant_id = os.environ.get(\"ASSISTANT_ID\", None)\n",
"\n",
"config_list = config_list_from_json(\"OAI_CONFIG_LIST\")\n",
"llm_config = {\n",
" \"config_list\": config_list,\n",
" \"assistant_id\": assistant_id\n",
"}\n",
"\n",
"gpt_assistant = GPTAssistantAgent(name=\"assistant\",\n",
" instructions=AssistantAgent.DEFAULT_SYSTEM_MESSAGE,\n",
" llm_config=llm_config)\n",
"\n",
"user_proxy = UserProxyAgent(name=\"user_proxy\",\n",
" code_execution_config={\n",
" \"work_dir\": \"coding\"\n",
" },\n",
" is_termination_msg=lambda msg: \"TERMINATE\" in msg[\"content\"],\n",
" human_input_mode=\"NEVER\",\n",
" max_consecutive_auto_reply=1)\n",
"user_proxy.initiate_chat(gpt_assistant, message=\"Print hello world\")"
]
},
{
"cell_type": "code",
"execution_count": 2,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"\u001b[33muser_proxy\u001b[0m (to assistant):\n",
"\n",
"Write py code to eval 2 + 2\n",
"\n",
"--------------------------------------------------------------------------------\n",
"\u001b[33massistant\u001b[0m (to user_proxy):\n",
"\n",
"```python\n",
"# Let's write a simple Python code to evaluate 2 + 2 and print the result.\n",
"\n",
"result = 2 + 2\n",
"print(result)\n",
"```\n",
"\n",
"\n",
"--------------------------------------------------------------------------------\n",
"\u001b[31m\n",
">>>>>>>> EXECUTING CODE BLOCK 0 (inferred language is python)...\u001b[0m\n"
]
},
{
"name": "stderr",
"output_type": "stream",
"text": [
"execute_code was called without specifying a value for use_docker. Since the python docker package is not available, code will be run natively. Note: this fallback behavior is subject to change\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"\u001b[33muser_proxy\u001b[0m (to assistant):\n",
"\n",
"exitcode: 0 (execution succeeded)\n",
"Code output: \n",
"4\n",
"\n",
"\n",
"--------------------------------------------------------------------------------\n",
"\u001b[33massistant\u001b[0m (to user_proxy):\n",
"\n",
"The Python code was executed successfully and the result of evaluating 2 + 2 is 4.\n",
"\n",
"TERMINATE\n",
"\n",
"\n",
"--------------------------------------------------------------------------------\n"
]
}
],
"source": [
"user_proxy.initiate_chat(gpt_assistant, message=\"Write py code to eval 2 + 2\", clear_history=True)"
]
},
{
"cell_type": "code",
"execution_count": 3,
"metadata": {},
"outputs": [
{
"name": "stderr",
"output_type": "stream",
"text": [
"Permanently deleting assistant...\n"
]
}
],
"source": [
"gpt_assistant.delete_assistant()"
]
}
],
"metadata": {
"kernelspec": {
"display_name": "Python 3",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.10.12"
}
},
"nbformat": 4,
"nbformat_minor": 2
}

0 comments on commit 16dce1f

Please sign in to comment.