Skip to content

Commit

Permalink
Merge branch 'main' into redis_cache_type
Browse files Browse the repository at this point in the history
  • Loading branch information
ekzhu committed Mar 6, 2024
2 parents d074ef5 + 09a4918 commit c2c944b
Show file tree
Hide file tree
Showing 13 changed files with 244 additions and 24 deletions.
3 changes: 2 additions & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@
[![Build](https://github.com/microsoft/autogen/actions/workflows/python-package.yml/badge.svg)](https://github.com/microsoft/autogen/actions/workflows/python-package.yml)
![Python Version](https://img.shields.io/badge/3.8%20%7C%203.9%20%7C%203.10%20%7C%203.11%20%7C%203.12-blue)
[![Downloads](https://static.pepy.tech/badge/pyautogen/week)](https://pepy.tech/project/pyautogen)
[![](https://img.shields.io/discord/1153072414184452236?logo=discord&style=flat)](https://discord.gg/pAbnFJrkgZ)
[![Discord](https://img.shields.io/discord/1153072414184452236?logo=discord&style=flat)](https://discord.gg/pAbnFJrkgZ)
[![Twitter](https://img.shields.io/twitter/url/https/twitter.com/cloudposse.svg?style=social&label=Follow%20%40pyautogen)](https://twitter.com/pyautogen)


Expand All @@ -12,6 +12,7 @@
<img src="https://github.com/microsoft/autogen/blob/main/website/static/img/flaml.svg" width=200>
<br>
</p> -->
:fire: Mar 1: the first AutoGen multi-agent experiment on the challenging [GAIA](https://huggingface.co/spaces/gaia-benchmark/leaderboard) benchmark achieved the No. 1 accuracy in all the three levels.

:fire: Jan 30: AutoGen is highlighted by Peter Lee in Microsoft Research Forum [Keynote](https://t.co/nUBSjPDjqD).

Expand Down
19 changes: 19 additions & 0 deletions autogen/agentchat/conversable_agent.py
Original file line number Diff line number Diff line change
Expand Up @@ -1271,6 +1271,25 @@ def _generate_code_execution_reply_using_executor(
code_blocks = self._code_executor.code_extractor.extract_code_blocks(message["content"])
if len(code_blocks) == 0:
continue

num_code_blocks = len(code_blocks)
if num_code_blocks > 1:
print(
colored(
f"\n>>>>>>>> EXECUTING CODE BLOCK (inferred language is {code_blocks[0].language})...",
"red",
),
flush=True,
)
else:
print(
colored(
f"\n>>>>>>>> EXECUTING {num_code_blocks} CODE BLOCKS (inferred languages are [{', '.join([x.language for x in code_blocks])}])...",
"red",
),
flush=True,
)

# found code blocks, execute code.
code_result = self._code_executor.execute_code_blocks(code_blocks)
exitcode2str = "execution succeeded" if code_result.exit_code == 0 else "execution failed"
Expand Down
27 changes: 15 additions & 12 deletions autogen/coding/jupyter/local_jupyter_server.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,12 +19,6 @@
from .jupyter_client import JupyterClient


def _get_free_port() -> int:
with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s:
s.bind(("", 0))
return cast(int, s.getsockname()[1])


class LocalJupyterServer(JupyterConnectable):
class GenerateToken:
pass
Expand Down Expand Up @@ -69,9 +63,6 @@ def __init__(
)

self.ip = ip
if port is None:
port = _get_free_port()
self.port = port

if isinstance(token, LocalJupyterServer.GenerateToken):
token = secrets.token_hex(32)
Expand All @@ -98,8 +89,6 @@ def __init__(
"kernelgateway",
"--KernelGatewayApp.ip",
ip,
"--KernelGatewayApp.port",
str(port),
"--KernelGatewayApp.auth_token",
token,
"--JupyterApp.answer_yes",
Expand All @@ -109,6 +98,9 @@ def __init__(
"--JupyterWebsocketPersonality.list_kernels",
"true",
]
if port is not None:
args.extend(["--KernelGatewayApp.port", str(port)])
args.extend(["--KernelGatewayApp.port_retries", "0"])
self._subprocess = subprocess.Popen(args, stdout=subprocess.PIPE, stderr=subprocess.PIPE, text=True)

# Satisfy mypy, we know this is not None because we passed PIPE
Expand All @@ -119,11 +111,22 @@ def __init__(
result = self._subprocess.poll()
if result is not None:
stderr += self._subprocess.stderr.read()
print(f"token=[[[[{token}]]]]")
raise ValueError(f"Jupyter gateway server failed to start with exit code: {result}. stderr:\n{stderr}")
line = self._subprocess.stderr.readline()
stderr += line

if "ERROR:" in line:
error_info = line.split("ERROR:")[1]
raise ValueError(f"Jupyter gateway server failed to start. {error_info}")

if "is available at" in line:
# We need to extract what port it settled on
# Example output:
# Jupyter Kernel Gateway 3.0.0 is available at http://127.0.0.1:8890
if port is None:
port = int(line.split(":")[-1])
self.port = port

break

# Poll the subprocess to check if it is still running
Expand Down
10 changes: 0 additions & 10 deletions autogen/coding/local_commandline_code_executor.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,13 +5,11 @@
from typing import Any, ClassVar, List, Optional
from pydantic import BaseModel, Field, field_validator

from autogen.agentchat.conversable_agent import colored
from ..agentchat.agent import LLMAgent
from ..code_utils import execute_code
from .base import CodeBlock, CodeExtractor, CodeResult
from .markdown_code_extractor import MarkdownCodeExtractor


__all__ = (
"LocalCommandlineCodeExecutor",
"CommandlineCodeResult",
Expand Down Expand Up @@ -139,14 +137,6 @@ def execute_code_blocks(self, code_blocks: List[CodeBlock]) -> CommandlineCodeRe
lang, code = code_block.language, code_block.code

LocalCommandlineCodeExecutor.sanitize_command(lang, code)

print(
colored(
f"\n>>>>>>>> EXECUTING CODE BLOCK {i} (inferred language is {lang})...",
"red",
),
flush=True,
)
filename_uuid = uuid.uuid4().hex
filename = None
if lang in ["bash", "shell", "sh", "pwsh", "powershell", "ps1"]:
Expand Down
3 changes: 3 additions & 0 deletions website/blog/2024-03-03-AutoGen-Update/img/.gitattributes
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
gaia.png filter=lfs diff=lfs merge=lfs -text
dalle_gpt4v.png filter=lfs diff=lfs merge=lfs -text
teach.png filter=lfs diff=lfs merge=lfs -text
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Loading

0 comments on commit c2c944b

Please sign in to comment.