fix letta endpoint

This commit is contained in:
Sarah Wooders 2025-05-24 20:51:43 -07:00
parent 33cb59c261
commit 7dbd2d266c

View File

@ -716,6 +716,7 @@ async def send_message_streaming(
feature_enabled = settings.use_experimental or experimental_header.lower() == "true"
model_compatible = agent.llm_config.model_endpoint_type in ["anthropic", "openai", "together", "google_ai", "google_vertex"]
model_compatible_token_streaming = agent.llm_config.model_endpoint_type in ["anthropic", "openai"]
not_letta_endpoint = not ("letta" in agent.llm_config.model_endpoint)
if agent_eligible and feature_enabled and model_compatible:
if agent.enable_sleeptime:
@ -745,7 +746,7 @@ async def send_message_streaming(
)
from letta.server.rest_api.streaming_response import StreamingResponseWithStatusCode
if request.stream_tokens and model_compatible_token_streaming:
if request.stream_tokens and model_compatible_token_streaming and not_letta_endpoint:
result = StreamingResponseWithStatusCode(
experimental_agent.step_stream(
input_messages=request.messages,