mirror of
https://github.com/cpacker/MemGPT.git
synced 2025-06-03 04:30:22 +00:00
fix: anthropic streaming (#1768)
Co-authored-by: Kevin Lin <klin5061@gmail.com>
This commit is contained in:
parent
4d38ea11fa
commit
2ed0c93d07
@ -619,7 +619,7 @@ class Message(BaseMessage):
|
|||||||
text_content = self.content[0].text
|
text_content = self.content[0].text
|
||||||
# Otherwise, check if we have TextContent and multiple other parts
|
# Otherwise, check if we have TextContent and multiple other parts
|
||||||
elif self.content and len(self.content) > 1:
|
elif self.content and len(self.content) > 1:
|
||||||
text = [content for content in self.content if isinstance(self.content[0], TextContent)]
|
text = [content for content in self.content if isinstance(content, TextContent)]
|
||||||
if len(text) > 1:
|
if len(text) > 1:
|
||||||
assert len(text) == 1, f"multiple text content parts found in a single message: {self.content}"
|
assert len(text) == 1, f"multiple text content parts found in a single message: {self.content}"
|
||||||
text_content = text[0].text
|
text_content = text[0].text
|
||||||
|
@ -6,6 +6,8 @@ from collections import deque
|
|||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
from typing import AsyncGenerator, Literal, Optional, Union
|
from typing import AsyncGenerator, Literal, Optional, Union
|
||||||
|
|
||||||
|
import demjson3 as demjson
|
||||||
|
|
||||||
from letta.constants import DEFAULT_MESSAGE_TOOL, DEFAULT_MESSAGE_TOOL_KWARG
|
from letta.constants import DEFAULT_MESSAGE_TOOL, DEFAULT_MESSAGE_TOOL_KWARG
|
||||||
from letta.helpers.datetime_helpers import is_utc_datetime
|
from letta.helpers.datetime_helpers import is_utc_datetime
|
||||||
from letta.interface import AgentInterface
|
from letta.interface import AgentInterface
|
||||||
@ -530,7 +532,6 @@ class StreamingServerInterface(AgentChunkStreamingInterface):
|
|||||||
try:
|
try:
|
||||||
# NOTE: this is hardcoded for our DeepSeek API integration
|
# NOTE: this is hardcoded for our DeepSeek API integration
|
||||||
json_reasoning_content = parse_json(self.expect_reasoning_content_buffer)
|
json_reasoning_content = parse_json(self.expect_reasoning_content_buffer)
|
||||||
print(f"json_reasoning_content: {json_reasoning_content}")
|
|
||||||
|
|
||||||
processed_chunk = ToolCallMessage(
|
processed_chunk = ToolCallMessage(
|
||||||
id=message_id,
|
id=message_id,
|
||||||
@ -547,6 +548,10 @@ class StreamingServerInterface(AgentChunkStreamingInterface):
|
|||||||
except json.JSONDecodeError as e:
|
except json.JSONDecodeError as e:
|
||||||
print(f"Failed to interpret reasoning content ({self.expect_reasoning_content_buffer}) as JSON: {e}")
|
print(f"Failed to interpret reasoning content ({self.expect_reasoning_content_buffer}) as JSON: {e}")
|
||||||
|
|
||||||
|
return None
|
||||||
|
except demjson.JSONDecodeError as e:
|
||||||
|
print(f"Failed to interpret reasoning content ({self.expect_reasoning_content_buffer}) as JSON: {e}")
|
||||||
|
|
||||||
return None
|
return None
|
||||||
# Else,
|
# Else,
|
||||||
# return None
|
# return None
|
||||||
|
Loading…
Reference in New Issue
Block a user