Skip to content

Commit

Permalink
feat: move HTML rendering of messages into LettaResponse and update…
Browse files Browse the repository at this point in the history
… notebook (#1983)
  • Loading branch information
sarahwooders authored Nov 7, 2024
1 parent d9d53db commit 911db39
Show file tree
Hide file tree
Showing 3 changed files with 133 additions and 22 deletions.
39 changes: 19 additions & 20 deletions examples/Building agents with Letta.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -13,16 +13,6 @@
"4. Building agentic RAG with MemGPT "
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "f096bd03-9fb7-468f-af3c-24cd9e03108c",
"metadata": {},
"outputs": [],
"source": [
"from helper import nb_print"
]
},
{
"cell_type": "markdown",
"id": "aad3a8cc-d17a-4da1-b621-ecc93c9e2106",
Expand Down Expand Up @@ -62,9 +52,10 @@
"metadata": {},
"outputs": [],
"source": [
"from letta.schemas.llm_config import LLMConfig\n",
"from letta import LLMConfig, EmbeddingConfig\n",
"\n",
"client.set_default_llm_config(LLMConfig.default_config(\"gpt-4o-mini\")) "
"client.set_default_llm_config(LLMConfig.default_config(\"gpt-4o-mini\")) \n",
"client.set_default_embedding_config(EmbeddingConfig.default_config(provider=\"openai\")) "
]
},
{
Expand Down Expand Up @@ -124,7 +115,7 @@
" message=\"hello!\", \n",
" role=\"user\" \n",
")\n",
"nb_print(response.messages)"
"response"
]
},
{
Expand Down Expand Up @@ -257,7 +248,7 @@
" message = \"My name is actually Bob\", \n",
" role = \"user\"\n",
") \n",
"nb_print(response.messages)"
"response"
]
},
{
Expand Down Expand Up @@ -291,7 +282,7 @@
" message = \"In the future, never use emojis to communicate\", \n",
" role = \"user\"\n",
") \n",
"nb_print(response.messages)"
"response"
]
},
{
Expand Down Expand Up @@ -353,7 +344,7 @@
" message = \"Save the information that 'bob loves cats' to archival\", \n",
" role = \"user\"\n",
") \n",
"nb_print(response.messages)"
"response"
]
},
{
Expand Down Expand Up @@ -407,15 +398,23 @@
" role=\"user\", \n",
" message=\"What animals do I like? Search archival.\"\n",
")\n",
"nb_print(response.messages)"
"response"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "adc394c8-1d88-42bf-a6a5-b01f20f78d81",
"metadata": {},
"outputs": [],
"source": []
}
],
"metadata": {
"kernelspec": {
"display_name": "letta",
"display_name": "letta-main",
"language": "python",
"name": "letta"
"name": "letta-main"
},
"language_info": {
"codemirror_mode": {
Expand All @@ -427,7 +426,7 @@
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.12.2"
"version": "3.12.6"
}
},
"nbformat": 4,
Expand Down
6 changes: 4 additions & 2 deletions letta/agent.py
Original file line number Diff line number Diff line change
Expand Up @@ -248,9 +248,11 @@ def __init__(
# initialize a tool rules solver
if agent_state.tool_rules:
# if there are tool rules, print out a warning
warnings.warn("Tool rules only work reliably for the latest OpenAI models that support structured outputs.")
for rule in agent_state.tool_rules:
if not isinstance(rule, TerminalToolRule):
warnings.warn("Tool rules only work reliably for the latest OpenAI models that support structured outputs.")
break
# add default rule for having send_message be a terminal tool

if agent_state.tool_rules is None:
agent_state.tool_rules = []
# Define the rule to add
Expand Down
110 changes: 110 additions & 0 deletions letta/schemas/letta_response.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,6 @@
import html
import json
import re
from typing import List, Union

from pydantic import BaseModel, Field
Expand Down Expand Up @@ -34,6 +37,113 @@ def __str__(self):
indent=4,
)

def _repr_html_(self):
def get_formatted_content(msg):
if msg.message_type == "internal_monologue":
return f'<div class="content"><span class="internal-monologue">{html.escape(msg.internal_monologue)}</span></div>'
elif msg.message_type == "function_call":
args = format_json(msg.function_call.arguments)
return f'<div class="content"><span class="function-name">{html.escape(msg.function_call.name)}</span>({args})</div>'
elif msg.message_type == "function_return":

return_value = format_json(msg.function_return)
# return f'<div class="status-line">Status: {html.escape(msg.status)}</div><div class="content">{return_value}</div>'
return f'<div class="content">{return_value}</div>'
elif msg.message_type == "user_message":
if is_json(msg.message):
return f'<div class="content">{format_json(msg.message)}</div>'
else:
return f'<div class="content">{html.escape(msg.message)}</div>'
elif msg.message_type in ["assistant_message", "system_message"]:
return f'<div class="content">{html.escape(msg.message)}</div>'
else:
return f'<div class="content">{html.escape(str(msg))}</div>'

def is_json(string):
try:
json.loads(string)
return True
except ValueError:
return False

def format_json(json_str):
try:
parsed = json.loads(json_str)
formatted = json.dumps(parsed, indent=2, ensure_ascii=False)
formatted = formatted.replace("&", "&amp;").replace("<", "&lt;").replace(">", "&gt;")
formatted = formatted.replace("\n", "<br>").replace(" ", "&nbsp;&nbsp;")
formatted = re.sub(r'(".*?"):', r'<span class="json-key">\1</span>:', formatted)
formatted = re.sub(r': (".*?")', r': <span class="json-string">\1</span>', formatted)
formatted = re.sub(r": (\d+)", r': <span class="json-number">\1</span>', formatted)
formatted = re.sub(r": (true|false)", r': <span class="json-boolean">\1</span>', formatted)
return formatted
except json.JSONDecodeError:
return html.escape(json_str)

html_output = """
<style>
.message-container, .usage-container {
font-family: 'Segoe UI', Tahoma, Geneva, Verdana, sans-serif;
max-width: 800px;
margin: 20px auto;
background-color: #1e1e1e;
border-radius: 8px;
overflow: hidden;
color: #d4d4d4;
}
.message, .usage-stats {
padding: 10px 15px;
border-bottom: 1px solid #3a3a3a;
}
.message:last-child, .usage-stats:last-child {
border-bottom: none;
}
.title {
font-weight: bold;
margin-bottom: 5px;
color: #ffffff;
text-transform: uppercase;
font-size: 0.9em;
}
.content {
background-color: #2d2d2d;
border-radius: 4px;
padding: 5px 10px;
font-family: 'Consolas', 'Courier New', monospace;
white-space: pre-wrap;
}
.json-key, .function-name, .json-boolean { color: #9cdcfe; }
.json-string { color: #ce9178; }
.json-number { color: #b5cea8; }
.internal-monologue { font-style: italic; }
</style>
<div class="message-container">
"""

for msg in self.messages:
content = get_formatted_content(msg)
title = msg.message_type.replace("_", " ").upper()
html_output += f"""
<div class="message">
<div class="title">{title}</div>
{content}
</div>
"""
html_output += "</div>"

# Formatting the usage statistics
usage_html = json.dumps(self.usage.model_dump(), indent=2)
html_output += f"""
<div class="usage-container">
<div class="usage-stats">
<div class="title">USAGE STATISTICS</div>
<div class="content">{format_json(usage_html)}</div>
</div>
</div>
"""

return html_output


# The streaming response is either [DONE], [DONE_STEP], [DONE], an error, or a LettaMessage
LettaStreamingResponse = Union[LettaMessage, MessageStreamStatus, LettaUsageStatistics]

0 comments on commit 911db39

Please sign in to comment.