mirror of
https://github.com/cpacker/MemGPT.git
synced 2025-06-03 04:30:22 +00:00
feat: Clean up prints (#1515)
This commit is contained in:
parent
ab710c5073
commit
11e235b2a9
@ -139,7 +139,7 @@ def create_application() -> "FastAPI":
|
|||||||
|
|
||||||
@app.on_event("startup")
|
@app.on_event("startup")
|
||||||
async def configure_executor():
|
async def configure_executor():
|
||||||
print(f"Configured event loop executor with {settings.event_loop_threadpool_max_workers} workers.")
|
print(f"INFO: Configured event loop executor with {settings.event_loop_threadpool_max_workers} workers.")
|
||||||
loop = asyncio.get_running_loop()
|
loop = asyncio.get_running_loop()
|
||||||
executor = concurrent.futures.ThreadPoolExecutor(max_workers=settings.event_loop_threadpool_max_workers)
|
executor = concurrent.futures.ThreadPoolExecutor(max_workers=settings.event_loop_threadpool_max_workers)
|
||||||
loop.set_default_executor(executor)
|
loop.set_default_executor(executor)
|
||||||
|
@ -174,8 +174,6 @@ class AsyncToolSandboxBase(ABC):
|
|||||||
return None, None
|
return None, None
|
||||||
|
|
||||||
result = pickle.loads(base64.b64decode(text))
|
result = pickle.loads(base64.b64decode(text))
|
||||||
print("LOOK HERE!")
|
|
||||||
print(result)
|
|
||||||
agent_state = result["agent_state"]
|
agent_state = result["agent_state"]
|
||||||
return result["results"], agent_state
|
return result["results"], agent_state
|
||||||
|
|
||||||
|
@ -307,7 +307,7 @@ def test_multi_agent_broadcast_client(client: Letta, weather_tool):
|
|||||||
client.agents.delete(agent_id=worker.id)
|
client.agents.delete(agent_id=worker.id)
|
||||||
|
|
||||||
# create worker agents
|
# create worker agents
|
||||||
num_workers = 50
|
num_workers = 10
|
||||||
for idx in range(num_workers):
|
for idx in range(num_workers):
|
||||||
client.agents.create(
|
client.agents.create(
|
||||||
name=f"worker_{idx}",
|
name=f"worker_{idx}",
|
||||||
@ -329,6 +329,9 @@ def test_multi_agent_broadcast_client(client: Letta, weather_tool):
|
|||||||
)
|
)
|
||||||
|
|
||||||
# send a message to the supervisor
|
# send a message to the supervisor
|
||||||
|
import time
|
||||||
|
|
||||||
|
start = time.perf_counter()
|
||||||
response = client.agents.messages.create(
|
response = client.agents.messages.create(
|
||||||
agent_id=supervisor.id,
|
agent_id=supervisor.id,
|
||||||
messages=[
|
messages=[
|
||||||
@ -338,7 +341,43 @@ def test_multi_agent_broadcast_client(client: Letta, weather_tool):
|
|||||||
}
|
}
|
||||||
],
|
],
|
||||||
)
|
)
|
||||||
|
end = time.perf_counter()
|
||||||
|
print("TIME ELAPSED: " + str(end - start))
|
||||||
|
for message in response.messages:
|
||||||
|
print(message)
|
||||||
|
|
||||||
|
|
||||||
|
def test_call_weather(client: Letta, weather_tool):
|
||||||
|
# delete any existing worker agents
|
||||||
|
workers = client.agents.list(tags=["worker", "supervisor"])
|
||||||
|
for worker in workers:
|
||||||
|
client.agents.delete(agent_id=worker.id)
|
||||||
|
|
||||||
|
# create supervisor agent
|
||||||
|
supervisor = client.agents.create(
|
||||||
|
name="supervisor",
|
||||||
|
include_base_tools=True,
|
||||||
|
tool_ids=[weather_tool.id],
|
||||||
|
model="openai/gpt-4o",
|
||||||
|
embedding="letta/letta-free",
|
||||||
|
tags=["supervisor"],
|
||||||
|
)
|
||||||
|
|
||||||
|
# send a message to the supervisor
|
||||||
|
import time
|
||||||
|
|
||||||
|
start = time.perf_counter()
|
||||||
|
response = client.agents.messages.create(
|
||||||
|
agent_id=supervisor.id,
|
||||||
|
messages=[
|
||||||
|
{
|
||||||
|
"role": "user",
|
||||||
|
"content": "What's the weather like in Seattle?",
|
||||||
|
}
|
||||||
|
],
|
||||||
|
)
|
||||||
|
end = time.perf_counter()
|
||||||
|
print("TIME ELAPSED: " + str(end - start))
|
||||||
for message in response.messages:
|
for message in response.messages:
|
||||||
print(message)
|
print(message)
|
||||||
|
|
||||||
|
Loading…
Reference in New Issue
Block a user