-
Notifications
You must be signed in to change notification settings - Fork 5k
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
- also improve logging in gpt assistant
- Loading branch information
Showing
2 changed files
with
216 additions
and
0 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,214 @@ | ||
{ | ||
"cells": [ | ||
{ | ||
"cell_type": "markdown", | ||
"metadata": {}, | ||
"source": [ | ||
"## OpenAI Assistants in AutoGen\n", | ||
"\n", | ||
"This notebook shows a very basic example of the `GPTAssistantAgent` working with\n", | ||
"`UserProxyAgent` in AutoGen." | ||
] | ||
}, | ||
{ | ||
"cell_type": "code", | ||
"execution_count": 1, | ||
"metadata": {}, | ||
"outputs": [ | ||
{ | ||
"name": "stderr", | ||
"output_type": "stream", | ||
"text": [ | ||
"assistant_id was None, creating a new assistant\n" | ||
] | ||
}, | ||
{ | ||
"name": "stdout", | ||
"output_type": "stream", | ||
"text": [ | ||
"\u001b[33muser_proxy\u001b[0m (to assistant):\n", | ||
"\n", | ||
"Print hello world\n", | ||
"\n", | ||
"--------------------------------------------------------------------------------\n", | ||
"\u001b[33massistant\u001b[0m (to user_proxy):\n", | ||
"\n", | ||
"```python\n", | ||
"print(\"Hello, World!\")\n", | ||
"```\n", | ||
"\n", | ||
"Please run this Python code to print \"Hello, World!\" to the console.\n", | ||
"\n", | ||
"\n", | ||
"--------------------------------------------------------------------------------\n", | ||
"\u001b[31m\n", | ||
">>>>>>>> EXECUTING CODE BLOCK 0 (inferred language is python)...\u001b[0m\n" | ||
] | ||
}, | ||
{ | ||
"name": "stderr", | ||
"output_type": "stream", | ||
"text": [ | ||
"execute_code was called without specifying a value for use_docker. Since the python docker package is not available, code will be run natively. Note: this fallback behavior is subject to change\n" | ||
] | ||
}, | ||
{ | ||
"name": "stdout", | ||
"output_type": "stream", | ||
"text": [ | ||
"\u001b[33muser_proxy\u001b[0m (to assistant):\n", | ||
"\n", | ||
"exitcode: 0 (execution succeeded)\n", | ||
"Code output: \n", | ||
"Hello, World!\n", | ||
"\n", | ||
"\n", | ||
"--------------------------------------------------------------------------------\n", | ||
"\u001b[33massistant\u001b[0m (to user_proxy):\n", | ||
"\n", | ||
"The code executed successfully and printed \"Hello, World!\" as expected.\n", | ||
"\n", | ||
"TERMINATE\n", | ||
"\n", | ||
"\n", | ||
"--------------------------------------------------------------------------------\n" | ||
] | ||
} | ||
], | ||
"source": [ | ||
"import logging\n", | ||
"import os\n", | ||
" \n", | ||
"from autogen import config_list_from_json\n", | ||
"from autogen import AssistantAgent\n", | ||
"from autogen.agentchat.contrib.gpt_assistant_agent import GPTAssistantAgent\n", | ||
"from autogen import UserProxyAgent\n", | ||
"\n", | ||
"logger = logging.getLogger(__name__)\n", | ||
"logger.setLevel(logging.WARNING)\n", | ||
"\n", | ||
"assistant_id = os.environ.get(\"ASSISTANT_ID\", None)\n", | ||
"\n", | ||
"config_list = config_list_from_json(\"OAI_CONFIG_LIST\")\n", | ||
"llm_config = {\n", | ||
" \"config_list\": config_list,\n", | ||
" \"assistant_id\": assistant_id\n", | ||
"}\n", | ||
"\n", | ||
"gpt_assistant = GPTAssistantAgent(name=\"assistant\",\n", | ||
" instructions=AssistantAgent.DEFAULT_SYSTEM_MESSAGE,\n", | ||
" llm_config=llm_config)\n", | ||
"\n", | ||
"user_proxy = UserProxyAgent(name=\"user_proxy\",\n", | ||
" code_execution_config={\n", | ||
" \"work_dir\": \"coding\"\n", | ||
" },\n", | ||
" is_termination_msg=lambda msg: \"TERMINATE\" in msg[\"content\"],\n", | ||
" human_input_mode=\"NEVER\",\n", | ||
" max_consecutive_auto_reply=1)\n", | ||
"user_proxy.initiate_chat(gpt_assistant, message=\"Print hello world\")" | ||
] | ||
}, | ||
{ | ||
"cell_type": "code", | ||
"execution_count": 2, | ||
"metadata": {}, | ||
"outputs": [ | ||
{ | ||
"name": "stdout", | ||
"output_type": "stream", | ||
"text": [ | ||
"\u001b[33muser_proxy\u001b[0m (to assistant):\n", | ||
"\n", | ||
"Write py code to eval 2 + 2\n", | ||
"\n", | ||
"--------------------------------------------------------------------------------\n", | ||
"\u001b[33massistant\u001b[0m (to user_proxy):\n", | ||
"\n", | ||
"```python\n", | ||
"# Let's write a simple Python code to evaluate 2 + 2 and print the result.\n", | ||
"\n", | ||
"result = 2 + 2\n", | ||
"print(result)\n", | ||
"```\n", | ||
"\n", | ||
"\n", | ||
"--------------------------------------------------------------------------------\n", | ||
"\u001b[31m\n", | ||
">>>>>>>> EXECUTING CODE BLOCK 0 (inferred language is python)...\u001b[0m\n" | ||
] | ||
}, | ||
{ | ||
"name": "stderr", | ||
"output_type": "stream", | ||
"text": [ | ||
"execute_code was called without specifying a value for use_docker. Since the python docker package is not available, code will be run natively. Note: this fallback behavior is subject to change\n" | ||
] | ||
}, | ||
{ | ||
"name": "stdout", | ||
"output_type": "stream", | ||
"text": [ | ||
"\u001b[33muser_proxy\u001b[0m (to assistant):\n", | ||
"\n", | ||
"exitcode: 0 (execution succeeded)\n", | ||
"Code output: \n", | ||
"4\n", | ||
"\n", | ||
"\n", | ||
"--------------------------------------------------------------------------------\n", | ||
"\u001b[33massistant\u001b[0m (to user_proxy):\n", | ||
"\n", | ||
"The Python code was executed successfully and the result of evaluating 2 + 2 is 4.\n", | ||
"\n", | ||
"TERMINATE\n", | ||
"\n", | ||
"\n", | ||
"--------------------------------------------------------------------------------\n" | ||
] | ||
} | ||
], | ||
"source": [ | ||
"user_proxy.initiate_chat(gpt_assistant, message=\"Write py code to eval 2 + 2\", clear_history=True)" | ||
] | ||
}, | ||
{ | ||
"cell_type": "code", | ||
"execution_count": 3, | ||
"metadata": {}, | ||
"outputs": [ | ||
{ | ||
"name": "stderr", | ||
"output_type": "stream", | ||
"text": [ | ||
"Permanently deleting assistant...\n" | ||
] | ||
} | ||
], | ||
"source": [ | ||
"gpt_assistant.delete_assistant()" | ||
] | ||
} | ||
], | ||
"metadata": { | ||
"kernelspec": { | ||
"display_name": "Python 3", | ||
"language": "python", | ||
"name": "python3" | ||
}, | ||
"language_info": { | ||
"codemirror_mode": { | ||
"name": "ipython", | ||
"version": 3 | ||
}, | ||
"file_extension": ".py", | ||
"mimetype": "text/x-python", | ||
"name": "python", | ||
"nbconvert_exporter": "python", | ||
"pygments_lexer": "ipython3", | ||
"version": "3.10.12" | ||
} | ||
}, | ||
"nbformat": 4, | ||
"nbformat_minor": 2 | ||
} |