Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Multi agent support #102

Merged
merged 8 commits into from
Mar 11, 2024
Merged
Show file tree
Hide file tree
Changes from 4 commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
21 changes: 21 additions & 0 deletions agentops/agent.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,21 @@
import uuid
from agentops import Client


def track_agent(name: str | None):
def class_decorator(cls):
cls._is_ao_agent = True
cls._ao_agent_name = name or cls.__name__

original_init = cls.__init__

def new_init(self, *args, **kwargs):
self._ao_agent_id = str(uuid.uuid4())
ao_client = Client()
ao_client.create_agent(self._ao_agent_id, self._ao_agent_name)
original_init(self, *args, **kwargs)

cls.__init__ = new_init
return cls

return class_decorator
13 changes: 9 additions & 4 deletions agentops/client.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@
"""

from .event import Event
from .helpers import get_ISO_time
from .helpers import get_ISO_time, singleton
from .session import Session
from .worker import Worker
from .host_env import get_host_env
Expand All @@ -22,12 +22,13 @@
import signal
import sys

from .developer_errors import ExceptionHandlerMeta
from .meta_client import MetaClient
from .config import Configuration
from .llm_tracker import LlmTracker


class Client(metaclass=ExceptionHandlerMeta):
@singleton
class Client(metaclass=MetaClient):
"""
Client for AgentOps service.

Expand Down Expand Up @@ -60,7 +61,8 @@ def __init__(self, api_key: Optional[str] = None,
self.config = None

if not api_key and not environ.get('AGENTOPS_API_KEY'):
return logging.warn("AgentOps: No API key provided - no data will be recorded.")
logging.warning("AgentOps: No API key provided - no data will be recorded.")
return

self.config = Configuration(api_key or environ.get('AGENTOPS_API_KEY'),
endpoint,
Expand Down Expand Up @@ -269,6 +271,9 @@ def end_session(self, end_state: str = Field("Indeterminate",
self._session = None
self._worker = None

def create_agent(self, agent_id: str, name: str):
self._worker.create_agent(agent_id, name)

def _handle_unclean_exits(self):
def cleanup(end_state_reason: Optional[str] = None):
# Only run cleanup function if session is created
Expand Down
12 changes: 12 additions & 0 deletions agentops/helpers.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,17 @@
from packaging.version import parse


def singleton(class_):
instances = {}

def getinstance(*args, **kwargs):
if class_ not in instances:
instances[class_] = class_(*args, **kwargs)
return instances[class_]

return getinstance


def get_ISO_time():
"""
Get the current UTC time in ISO 8601 format with milliseconds precision, suffixed with 'Z' to denote UTC timezone.
Expand Down Expand Up @@ -42,4 +53,5 @@ def default(o):
return o.to_json()
else:
return f"<<non-serializable: {type(o).__qualname__}>>"

return json.dumps(obj, default=default)
2 changes: 1 addition & 1 deletion agentops/host_env.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@ def get_cpu_details():
return {
"Physical cores": psutil.cpu_count(logical=False),
"Total cores": psutil.cpu_count(logical=True),
"Max Frequency": f"{psutil.cpu_freq().max:.2f}Mhz",
# "Max Frequency": f"{psutil.cpu_freq().max:.2f}Mhz", # Fails right now
"CPU Usage": f"{psutil.cpu_percent()}%"
}

Expand Down
7 changes: 4 additions & 3 deletions agentops/http_client.py
Original file line number Diff line number Diff line change
Expand Up @@ -92,12 +92,13 @@ def post(url: str, payload: bytes, api_key: Optional[str] = None, header=None) -
result.body = {'error': str(e)}

if result.code == 401:
logging.warn(
logging.warning(
'AgentOps: Could not post data - API server rejected your API key')
if result.code == 400:
logging.warn(f'AgentOps: Could not post data - {result.body}')
logging.warning(f'AgentOps: Could not post data - {result.body}')
if result.code == 500:
logging.warn(
logging.warning(
f'AgentOps: Could not post data - internal server error')
print(result.body)

return result
32 changes: 28 additions & 4 deletions agentops/llm_tracker.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,10 +3,27 @@
import sys
from importlib import import_module
from packaging.version import parse
import logging
from .event import Event
from .helpers import get_ISO_time


def check_call_stack_for_agent_id() -> str | None:
# only looks at the last 2 frames otherwise it will retrieve
# whatever agent class was used last
for frame_info in inspect.stack():
# Get the locals from the current frame
local_vars = frame_info.frame.f_locals
for var in local_vars.values():
if var == "__main__":
return
# Instead of checking if var is a class, check if it's an instance with the _is_ao_agent attribute
if hasattr(var, '_is_ao_agent') and getattr(var, '_is_ao_agent'):
logging.info('LLM call from agent named: ' + getattr(var, '_ao_agent_name'))
bboynton97 marked this conversation as resolved.
Show resolved Hide resolved
return getattr(var, '_ao_agent_id')
return None
bboynton97 marked this conversation as resolved.
Show resolved Hide resolved


class LlmTracker:
SUPPORTED_APIS = {
'openai': {
Expand Down Expand Up @@ -36,8 +53,10 @@ def handle_stream_chunk(chunk):
finish_reason = choices[0]['finish_reason']

if self.event_stream == None:
agent_id = check_call_stack_for_agent_id()
self.event_stream = Event(
event_type='openai stream',
agent_id=agent_id,
params=kwargs,
result='Success',
returns={"finish_reason": None,
Expand All @@ -59,7 +78,7 @@ def handle_stream_chunk(chunk):
self.client.record(self.event_stream)
self.event_stream = None
except Exception as e:
print(
logging.info(
bboynton97 marked this conversation as resolved.
Show resolved Hide resolved
f"Unable to parse a chunk for LLM call {kwargs} - skipping upload to AgentOps")

# if the response is a generator, decorate the generator
Expand All @@ -81,6 +100,7 @@ def generator():

# v0.0.0 responses are dicts
try:
agent_id = check_call_stack_for_agent_id()
self.client.record(Event(
event_type=response['object'],
params=kwargs,
Expand Down Expand Up @@ -115,7 +135,7 @@ def generator():
))
# Standard response
except Exception as e:
print(
logging.error(
f"Unable to parse a chunk for LLM call {kwargs} - skipping upload to AgentOps")

bboynton97 marked this conversation as resolved.
Show resolved Hide resolved
return response
Expand All @@ -137,8 +157,10 @@ def handle_stream_chunk(chunk: ChatCompletionChunk):
role = choices[0].delta.role

if self.event_stream == None:
agent_id = check_call_stack_for_agent_id()
self.event_stream = Event(
event_type='openai chat completion stream',
agent_id=agent_id,
params=kwargs,
result='Success',
returns={"finish_reason": None,
Expand All @@ -165,7 +187,7 @@ def handle_stream_chunk(chunk: ChatCompletionChunk):
self.client.record(self.event_stream)
self.event_stream = None
except Exception as e:
print(
logging.error(
f"Unable to parse a chunk for LLM call {kwargs} - skipping upload to AgentOps")
bboynton97 marked this conversation as resolved.
Show resolved Hide resolved

# if the response is a generator, decorate the generator
Expand Down Expand Up @@ -194,8 +216,10 @@ async def async_generator():

# v1.0.0+ responses are objects
try:
agent_id = check_call_stack_for_agent_id()
self.client.record(Event(
event_type=response.object,
agent_id=agent_id,
params=kwargs,
result='Success',
returns={
Expand All @@ -210,7 +234,7 @@ async def async_generator():
))
# Standard response
except Exception as e:
print(
logging.error(
f"Unable to parse a chunk for LLM call {kwargs} - skipping upload to AgentOps")
bboynton97 marked this conversation as resolved.
Show resolved Hide resolved

return response
Expand Down
2 changes: 1 addition & 1 deletion agentops/developer_errors.py → agentops/meta_client.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@
from .helpers import safe_serialize


class ExceptionHandlerMeta(type):
class MetaClient(type):
"""Metaclass to automatically decorate methods with exception handling and provide a shared exception handler."""

def __new__(cls, name, bases, dct):
Expand Down
13 changes: 13 additions & 0 deletions agentops/worker.py
Original file line number Diff line number Diff line change
Expand Up @@ -82,6 +82,19 @@ def update_session(self, session: Session) -> None:
self.config.api_key,
self.config.org_key)

def create_agent(self, agent_id, name):
payload = {
"id": agent_id,
"name": name,
"session_id": self._session.session_id
}

serialized_payload = \
safe_serialize(payload).encode("utf-8")
HttpClient.post(f'{self.config.endpoint}/agents',
serialized_payload,
self.config.api_key)

def run(self) -> None:
while not self.stop_flag.is_set():
time.sleep(self.config.max_wait_time / 1000)
Expand Down
Loading