feat: move HTML rendering of messages into LettaResponse and update notebook (#1983)

This commit is contained in:
Sarah Wooders 2024-11-06 19:43:56 -08:00 committed by GitHub
parent d9d53db1af
commit 911db39990
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
3 changed files with 133 additions and 22 deletions

View File

@ -13,16 +13,6 @@
"4. Building agentic RAG with MemGPT " "4. Building agentic RAG with MemGPT "
] ]
}, },
{
"cell_type": "code",
"execution_count": null,
"id": "f096bd03-9fb7-468f-af3c-24cd9e03108c",
"metadata": {},
"outputs": [],
"source": [
"from helper import nb_print"
]
},
{ {
"cell_type": "markdown", "cell_type": "markdown",
"id": "aad3a8cc-d17a-4da1-b621-ecc93c9e2106", "id": "aad3a8cc-d17a-4da1-b621-ecc93c9e2106",
@ -62,9 +52,10 @@
"metadata": {}, "metadata": {},
"outputs": [], "outputs": [],
"source": [ "source": [
"from letta.schemas.llm_config import LLMConfig\n", "from letta import LLMConfig, EmbeddingConfig\n",
"\n", "\n",
"client.set_default_llm_config(LLMConfig.default_config(\"gpt-4o-mini\")) " "client.set_default_llm_config(LLMConfig.default_config(\"gpt-4o-mini\")) \n",
"client.set_default_embedding_config(EmbeddingConfig.default_config(provider=\"openai\")) "
] ]
}, },
{ {
@ -124,7 +115,7 @@
" message=\"hello!\", \n", " message=\"hello!\", \n",
" role=\"user\" \n", " role=\"user\" \n",
")\n", ")\n",
"nb_print(response.messages)" "response"
] ]
}, },
{ {
@ -257,7 +248,7 @@
" message = \"My name is actually Bob\", \n", " message = \"My name is actually Bob\", \n",
" role = \"user\"\n", " role = \"user\"\n",
") \n", ") \n",
"nb_print(response.messages)" "response"
] ]
}, },
{ {
@ -291,7 +282,7 @@
" message = \"In the future, never use emojis to communicate\", \n", " message = \"In the future, never use emojis to communicate\", \n",
" role = \"user\"\n", " role = \"user\"\n",
") \n", ") \n",
"nb_print(response.messages)" "response"
] ]
}, },
{ {
@ -353,7 +344,7 @@
" message = \"Save the information that 'bob loves cats' to archival\", \n", " message = \"Save the information that 'bob loves cats' to archival\", \n",
" role = \"user\"\n", " role = \"user\"\n",
") \n", ") \n",
"nb_print(response.messages)" "response"
] ]
}, },
{ {
@ -407,15 +398,23 @@
" role=\"user\", \n", " role=\"user\", \n",
" message=\"What animals do I like? Search archival.\"\n", " message=\"What animals do I like? Search archival.\"\n",
")\n", ")\n",
"nb_print(response.messages)" "response"
] ]
},
{
"cell_type": "code",
"execution_count": null,
"id": "adc394c8-1d88-42bf-a6a5-b01f20f78d81",
"metadata": {},
"outputs": [],
"source": []
} }
], ],
"metadata": { "metadata": {
"kernelspec": { "kernelspec": {
"display_name": "letta", "display_name": "letta-main",
"language": "python", "language": "python",
"name": "letta" "name": "letta-main"
}, },
"language_info": { "language_info": {
"codemirror_mode": { "codemirror_mode": {
@ -427,7 +426,7 @@
"name": "python", "name": "python",
"nbconvert_exporter": "python", "nbconvert_exporter": "python",
"pygments_lexer": "ipython3", "pygments_lexer": "ipython3",
"version": "3.12.2" "version": "3.12.6"
} }
}, },
"nbformat": 4, "nbformat": 4,

View File

@ -248,9 +248,11 @@ class Agent(BaseAgent):
# initialize a tool rules solver # initialize a tool rules solver
if agent_state.tool_rules: if agent_state.tool_rules:
# if there are tool rules, print out a warning # if there are tool rules, print out a warning
for rule in agent_state.tool_rules:
if not isinstance(rule, TerminalToolRule):
warnings.warn("Tool rules only work reliably for the latest OpenAI models that support structured outputs.") warnings.warn("Tool rules only work reliably for the latest OpenAI models that support structured outputs.")
break
# add default rule for having send_message be a terminal tool # add default rule for having send_message be a terminal tool
if agent_state.tool_rules is None: if agent_state.tool_rules is None:
agent_state.tool_rules = [] agent_state.tool_rules = []
# Define the rule to add # Define the rule to add

View File

@ -1,3 +1,6 @@
import html
import json
import re
from typing import List, Union from typing import List, Union
from pydantic import BaseModel, Field from pydantic import BaseModel, Field
@ -34,6 +37,113 @@ class LettaResponse(BaseModel):
indent=4, indent=4,
) )
def _repr_html_(self):
def get_formatted_content(msg):
if msg.message_type == "internal_monologue":
return f'<div class="content"><span class="internal-monologue">{html.escape(msg.internal_monologue)}</span></div>'
elif msg.message_type == "function_call":
args = format_json(msg.function_call.arguments)
return f'<div class="content"><span class="function-name">{html.escape(msg.function_call.name)}</span>({args})</div>'
elif msg.message_type == "function_return":
return_value = format_json(msg.function_return)
# return f'<div class="status-line">Status: {html.escape(msg.status)}</div><div class="content">{return_value}</div>'
return f'<div class="content">{return_value}</div>'
elif msg.message_type == "user_message":
if is_json(msg.message):
return f'<div class="content">{format_json(msg.message)}</div>'
else:
return f'<div class="content">{html.escape(msg.message)}</div>'
elif msg.message_type in ["assistant_message", "system_message"]:
return f'<div class="content">{html.escape(msg.message)}</div>'
else:
return f'<div class="content">{html.escape(str(msg))}</div>'
def is_json(string):
try:
json.loads(string)
return True
except ValueError:
return False
def format_json(json_str):
try:
parsed = json.loads(json_str)
formatted = json.dumps(parsed, indent=2, ensure_ascii=False)
formatted = formatted.replace("&", "&amp;").replace("<", "&lt;").replace(">", "&gt;")
formatted = formatted.replace("\n", "<br>").replace(" ", "&nbsp;&nbsp;")
formatted = re.sub(r'(".*?"):', r'<span class="json-key">\1</span>:', formatted)
formatted = re.sub(r': (".*?")', r': <span class="json-string">\1</span>', formatted)
formatted = re.sub(r": (\d+)", r': <span class="json-number">\1</span>', formatted)
formatted = re.sub(r": (true|false)", r': <span class="json-boolean">\1</span>', formatted)
return formatted
except json.JSONDecodeError:
return html.escape(json_str)
html_output = """
<style>
.message-container, .usage-container {
font-family: 'Segoe UI', Tahoma, Geneva, Verdana, sans-serif;
max-width: 800px;
margin: 20px auto;
background-color: #1e1e1e;
border-radius: 8px;
overflow: hidden;
color: #d4d4d4;
}
.message, .usage-stats {
padding: 10px 15px;
border-bottom: 1px solid #3a3a3a;
}
.message:last-child, .usage-stats:last-child {
border-bottom: none;
}
.title {
font-weight: bold;
margin-bottom: 5px;
color: #ffffff;
text-transform: uppercase;
font-size: 0.9em;
}
.content {
background-color: #2d2d2d;
border-radius: 4px;
padding: 5px 10px;
font-family: 'Consolas', 'Courier New', monospace;
white-space: pre-wrap;
}
.json-key, .function-name, .json-boolean { color: #9cdcfe; }
.json-string { color: #ce9178; }
.json-number { color: #b5cea8; }
.internal-monologue { font-style: italic; }
</style>
<div class="message-container">
"""
for msg in self.messages:
content = get_formatted_content(msg)
title = msg.message_type.replace("_", " ").upper()
html_output += f"""
<div class="message">
<div class="title">{title}</div>
{content}
</div>
"""
html_output += "</div>"
# Formatting the usage statistics
usage_html = json.dumps(self.usage.model_dump(), indent=2)
html_output += f"""
<div class="usage-container">
<div class="usage-stats">
<div class="title">USAGE STATISTICS</div>
<div class="content">{format_json(usage_html)}</div>
</div>
</div>
"""
return html_output
# The streaming response is either [DONE], [DONE_STEP], [DONE], an error, or a LettaMessage # The streaming response is either [DONE], [DONE_STEP], [DONE], an error, or a LettaMessage
LettaStreamingResponse = Union[LettaMessage, MessageStreamStatus, LettaUsageStatistics] LettaStreamingResponse = Union[LettaMessage, MessageStreamStatus, LettaUsageStatistics]