Skip to content

Commit

Permalink
Merge branch 'main' of github.com:microsoft/autogen into feat/headles…
Browse files Browse the repository at this point in the history
…s_browser
  • Loading branch information
INF800 committed Mar 1, 2024
2 parents 0f65abf + 0a49f2a commit 01fc631
Show file tree
Hide file tree
Showing 23 changed files with 626 additions and 112 deletions.
6 changes: 6 additions & 0 deletions .github/ISSUE_TEMPLATE/bug_report.yml
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,12 @@ body:
3. ...
4. See error
placeholder: How can we replicate the issue?
- type: textarea
id: modelused
attributes:
label: Model Used
description: A description of the model that was used when the error was encountered
placeholder: gpt-4, mistral-7B etc
- type: textarea
id: expected_behavior
attributes:
Expand Down
3 changes: 1 addition & 2 deletions .github/workflows/build.yml
Original file line number Diff line number Diff line change
Expand Up @@ -47,9 +47,8 @@ jobs:
# code executors auto skip without deps, so only run for python 3.11
if: matrix.python-version == '3.11'
run: |
pip install jupyter-client ipykernel
pip install -e ".[jupyter-executor]"
python -m ipykernel install --user --name python3
pip install -e ".[local-jupyter-exec]"
- name: Set AUTOGEN_USE_DOCKER based on OS
shell: bash
run: |
Expand Down
4 changes: 2 additions & 2 deletions .github/workflows/deploy-website.yml
Original file line number Diff line number Diff line change
Expand Up @@ -52,7 +52,7 @@ jobs:
quarto render .
- name: Process notebooks
run: |
python process_notebooks.py
python process_notebooks.py render
- name: Test Build
run: |
if [ -e yarn.lock ]; then
Expand Down Expand Up @@ -98,7 +98,7 @@ jobs:
quarto render .
- name: Process notebooks
run: |
python process_notebooks.py
python process_notebooks.py render
- name: Build website
run: |
if [ -e yarn.lock ]; then
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/openai.yml
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@ on:
- "notebook/agentchat_groupchat_finite_state_machine.ipynb"
- ".github/workflows/openai.yml"
permissions: {}
# actions: read
# actions: read
# checks: read
# contents: read
# deployments: read
Expand Down
2 changes: 1 addition & 1 deletion autogen/agentchat/groupchat.py
Original file line number Diff line number Diff line change
Expand Up @@ -626,7 +626,7 @@ async def a_run_chat(
# Broadcast the intro
intro = groupchat.introductions_msg()
for agent in groupchat.agents:
self.a_send(intro, agent, request_reply=False, silent=True)
await self.a_send(intro, agent, request_reply=False, silent=True)
# NOTE: We do not also append to groupchat.messages,
# since groupchat handles its own introductions

Expand Down
9 changes: 8 additions & 1 deletion autogen/coding/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,4 +2,11 @@
from .factory import CodeExecutorFactory
from .markdown_code_extractor import MarkdownCodeExtractor

__all__ = ("CodeBlock", "CodeResult", "CodeExtractor", "CodeExecutor", "CodeExecutorFactory", "MarkdownCodeExtractor")
__all__ = (
"CodeBlock",
"CodeResult",
"CodeExtractor",
"CodeExecutor",
"CodeExecutorFactory",
"MarkdownCodeExtractor",
)
6 changes: 1 addition & 5 deletions autogen/coding/factory.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,16 +30,12 @@ def create(code_execution_config: Dict[str, Any]) -> CodeExecutor:
# If the executor is already an instance of CodeExecutor, return it.
return executor
if executor == "ipython-embedded":
from .embedded_ipython_code_executor import EmbeddedIPythonCodeExecutor
from .jupyter.embedded_ipython_code_executor import EmbeddedIPythonCodeExecutor

return EmbeddedIPythonCodeExecutor(**code_execution_config.get("ipython-embedded", {}))
elif executor == "commandline-local":
from .local_commandline_code_executor import LocalCommandlineCodeExecutor

return LocalCommandlineCodeExecutor(**code_execution_config.get("commandline-local", {}))
elif executor == "jupyter-local":
from .jupyter_code_executor import LocalJupyterCodeExecutor

return LocalJupyterCodeExecutor(**code_execution_config.get("jupyter-local", {}))
else:
raise ValueError(f"Unknown code executor {executor}")
13 changes: 12 additions & 1 deletion autogen/coding/jupyter/__init__.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,16 @@
from .base import JupyterConnectable, JupyterConnectionInfo
from .jupyter_client import JupyterClient
from .local_jupyter_server import LocalJupyterServer
from .docker_jupyter_server import DockerJupyterServer
from .embedded_ipython_code_executor import EmbeddedIPythonCodeExecutor
from .jupyter_code_executor import JupyterCodeExecutor

__all__ = ["JupyterConnectable", "JupyterConnectionInfo", "JupyterClient", "LocalJupyterServer"]
__all__ = [
"JupyterConnectable",
"JupyterConnectionInfo",
"JupyterClient",
"LocalJupyterServer",
"DockerJupyterServer",
"EmbeddedIPythonCodeExecutor",
"JupyterCodeExecutor",
]
167 changes: 167 additions & 0 deletions autogen/coding/jupyter/docker_jupyter_server.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,167 @@
from __future__ import annotations

from pathlib import Path
import sys
from time import sleep
from types import TracebackType
import uuid
from typing import Dict, Optional, Union
import docker
import secrets
import io
import atexit
import logging

if sys.version_info >= (3, 11):
from typing import Self
else:
from typing_extensions import Self


from .jupyter_client import JupyterClient
from .base import JupyterConnectable, JupyterConnectionInfo


def _wait_for_ready(container: docker.Container, timeout: int = 60, stop_time: int = 0.1) -> None:
elapsed_time = 0
while container.status != "running" and elapsed_time < timeout:
sleep(stop_time)
elapsed_time += stop_time
container.reload()
continue
if container.status != "running":
raise ValueError("Container failed to start")


class DockerJupyterServer(JupyterConnectable):
DEFAULT_DOCKERFILE = """FROM quay.io/jupyter/docker-stacks-foundation
SHELL ["/bin/bash", "-o", "pipefail", "-c"]
USER ${NB_UID}
RUN mamba install --yes jupyter_kernel_gateway ipykernel && \
mamba clean --all -f -y && \
fix-permissions "${CONDA_DIR}" && \
fix-permissions "/home/${NB_USER}"
ENV TOKEN="UNSET"
CMD python -m jupyter kernelgateway --KernelGatewayApp.ip=0.0.0.0 \
--KernelGatewayApp.port=8888 \
--KernelGatewayApp.auth_token="${TOKEN}" \
--JupyterApp.answer_yes=true \
--JupyterWebsocketPersonality.list_kernels=true
EXPOSE 8888
WORKDIR "${HOME}"
"""

class GenerateToken:
pass

def __init__(
self,
*,
custom_image_name: Optional[str] = None,
container_name: Optional[str] = None,
auto_remove: bool = True,
stop_container: bool = True,
docker_env: Dict[str, str] = {},
token: Union[str, GenerateToken] = GenerateToken(),
):
"""Start a Jupyter kernel gateway server in a Docker container.
Args:
custom_image_name (Optional[str], optional): Custom image to use. If this is None,
then the bundled image will be built and used. The default image is based on
quay.io/jupyter/docker-stacks-foundation and extended to include jupyter_kernel_gateway
container_name (Optional[str], optional): Name of the container to start.
A name will be generated if None.
auto_remove (bool, optional): If true the Docker container will be deleted
when it is stopped.
stop_container (bool, optional): If true the container will be stopped,
either by program exit or using the context manager
docker_env (Dict[str, str], optional): Extra environment variables to pass
to the running Docker container.
token (Union[str, GenerateToken], optional): Token to use for authentication.
If GenerateToken is used, a random token will be generated. Empty string
will be unauthenticated.
"""
if container_name is None:
container_name = f"autogen-jupyterkernelgateway-{uuid.uuid4()}"

client = docker.from_env()
if custom_image_name is None:
image_name = "autogen-jupyterkernelgateway"
# Make sure the image exists
try:
client.images.get(image_name)
except docker.errors.ImageNotFound:
# Build the image
# Get this script directory
here = Path(__file__).parent
dockerfile = io.BytesIO(self.DEFAULT_DOCKERFILE.encode("utf-8"))
logging.info(f"Image {image_name} not found. Building it now.")
client.images.build(path=here, fileobj=dockerfile, tag=image_name)
logging.info(f"Image {image_name} built successfully.")
else:
image_name = custom_image_name
# Check if the image exists
try:
client.images.get(image_name)
except docker.errors.ImageNotFound:
raise ValueError(f"Custom image {image_name} does not exist")

if isinstance(token, DockerJupyterServer.GenerateToken):
self._token = secrets.token_hex(32)
else:
self._token = token

# Run the container
env = {"TOKEN": self._token}
env.update(docker_env)
container = client.containers.run(
image_name,
detach=True,
auto_remove=auto_remove,
environment=env,
publish_all_ports=True,
name=container_name,
)
_wait_for_ready(container)
container_ports = container.ports
self._port = int(container_ports["8888/tcp"][0]["HostPort"])
self._container_id = container.id

def cleanup():
try:
inner_container = client.containers.get(container.id)
inner_container.stop()
except docker.errors.NotFound:
pass

atexit.unregister(cleanup)

if stop_container:
atexit.register(cleanup)

self._cleanup_func = cleanup
self._stop_container = stop_container

@property
def connection_info(self) -> JupyterConnectionInfo:
return JupyterConnectionInfo(host="127.0.0.1", use_https=False, port=self._port, token=self._token)

def stop(self):
self._cleanup_func()

def get_client(self) -> JupyterClient:
return JupyterClient(self.connection_info)

def __enter__(self) -> Self:
return self

def __exit__(
self, exc_type: Optional[type[BaseException]], exc_val: Optional[BaseException], exc_tb: Optional[TracebackType]
) -> None:
self.stop()
Original file line number Diff line number Diff line change
Expand Up @@ -11,9 +11,9 @@
from jupyter_client.kernelspec import KernelSpecManager
from pydantic import BaseModel, Field, field_validator

from ..agentchat.agent import LLMAgent
from .base import CodeBlock, CodeExtractor, IPythonCodeResult
from .markdown_code_extractor import MarkdownCodeExtractor
from ...agentchat.agent import LLMAgent
from ..base import CodeBlock, CodeExtractor, IPythonCodeResult
from ..markdown_code_extractor import MarkdownCodeExtractor

__all__ = "EmbeddedIPythonCodeExecutor"

Expand Down
21 changes: 17 additions & 4 deletions autogen/coding/jupyter/jupyter_client.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,7 @@
import uuid
import datetime
import requests
from requests.adapters import HTTPAdapter, Retry

import websocket
from websocket import WebSocket
Expand All @@ -26,6 +27,9 @@ class JupyterClient:

def __init__(self, connection_info: JupyterConnectionInfo):
self._connection_info = connection_info
self._session = requests.Session()
retries = Retry(total=5, backoff_factor=0.1)
self._session.mount("http://", HTTPAdapter(max_retries=retries))

def _get_headers(self) -> Dict[str, str]:
if self._connection_info.token is None:
Expand All @@ -40,11 +44,11 @@ def _get_ws_base_url(self) -> str:
return f"ws://{self._connection_info.host}:{self._connection_info.port}"

def list_kernel_specs(self) -> Dict[str, Dict[str, str]]:
response = requests.get(f"{self._get_api_base_url()}/api/kernelspecs", headers=self._get_headers())
response = self._session.get(f"{self._get_api_base_url()}/api/kernelspecs", headers=self._get_headers())
return cast(Dict[str, Dict[str, str]], response.json())

def list_kernels(self) -> List[Dict[str, str]]:
response = requests.get(f"{self._get_api_base_url()}/api/kernels", headers=self._get_headers())
response = self._session.get(f"{self._get_api_base_url()}/api/kernels", headers=self._get_headers())
return cast(List[Dict[str, str]], response.json())

def start_kernel(self, kernel_spec_name: str) -> str:
Expand All @@ -57,15 +61,21 @@ def start_kernel(self, kernel_spec_name: str) -> str:
str: ID of the started kernel
"""

response = requests.post(
response = self._session.post(
f"{self._get_api_base_url()}/api/kernels",
headers=self._get_headers(),
json={"name": kernel_spec_name},
)
return cast(str, response.json()["id"])

def delete_kernel(self, kernel_id: str) -> None:
response = self._session.delete(
f"{self._get_api_base_url()}/api/kernels/{kernel_id}", headers=self._get_headers()
)
response.raise_for_status()

def restart_kernel(self, kernel_id: str) -> None:
response = requests.post(
response = self._session.post(
f"{self._get_api_base_url()}/api/kernels/{kernel_id}/restart", headers=self._get_headers()
)
response.raise_for_status()
Expand Down Expand Up @@ -100,6 +110,9 @@ def __enter__(self) -> Self:
def __exit__(
self, exc_type: Optional[type[BaseException]], exc_val: Optional[BaseException], exc_tb: Optional[TracebackType]
) -> None:
self.stop()

def stop(self) -> None:
self._websocket.close()

def _send_message(self, *, content: Dict[str, Any], channel: str, message_type: str) -> str:
Expand Down
Loading

0 comments on commit 01fc631

Please sign in to comment.