mirror of
https://github.com/cpacker/MemGPT.git
synced 2025-06-03 04:30:22 +00:00
feat: composio async execution (#1941)
This commit is contained in:
parent
60fc1fb8cc
commit
12ff04f194
@ -60,7 +60,7 @@ Last updated Oct 2, 2024. Please check `composio` documentation for any composio
|
||||
|
||||
|
||||
def main():
|
||||
from composio_langchain import Action
|
||||
from composio import Action
|
||||
|
||||
# Add the composio tool
|
||||
tool = client.load_composio_tool(action=Action.GITHUB_STAR_A_REPOSITORY_FOR_THE_AUTHENTICATED_USER)
|
||||
|
@ -21,8 +21,8 @@ from letta.constants import (
|
||||
)
|
||||
from letta.errors import ContextWindowExceededError
|
||||
from letta.functions.ast_parsers import coerce_dict_args_by_annotations, get_function_annotations_from_source
|
||||
from letta.functions.composio_helpers import execute_composio_action, generate_composio_action_from_func_name
|
||||
from letta.functions.functions import get_function_from_module
|
||||
from letta.functions.helpers import execute_composio_action, generate_composio_action_from_func_name
|
||||
from letta.functions.mcp_client.base_client import BaseMCPClient
|
||||
from letta.helpers import ToolRulesSolver
|
||||
from letta.helpers.composio_helpers import get_composio_api_key
|
||||
|
@ -179,6 +179,7 @@ class LettaAgent(BaseAgent):
|
||||
ToolType.LETTA_SLEEPTIME_CORE,
|
||||
}
|
||||
or (t.tool_type == ToolType.LETTA_MULTI_AGENT_CORE and t.name == "send_message_to_agents_matching_tags")
|
||||
or (t.tool_type == ToolType.EXTERNAL_COMPOSIO)
|
||||
]
|
||||
|
||||
valid_tool_names = tool_rules_solver.get_allowed_tool_names(available_tools=set([t.name for t in tools]))
|
||||
@ -331,6 +332,10 @@ class LettaAgent(BaseAgent):
|
||||
results = await self._send_message_to_agents_matching_tags(**tool_args)
|
||||
log_event(name="finish_send_message_to_agents_matching_tags", attributes=tool_args)
|
||||
return json.dumps(results), True
|
||||
elif target_tool.type == ToolType.EXTERNAL_COMPOSIO:
|
||||
log_event(name=f"start_composio_{tool_name}_execution", attributes=tool_args)
|
||||
log_event(name=f"finish_compsio_{tool_name}_execution", attributes=tool_args)
|
||||
return tool_execution_result.func_return, True
|
||||
else:
|
||||
tool_execution_manager = ToolExecutionManager(agent_state=agent_state, actor=self.actor)
|
||||
# TODO: Integrate sandbox result
|
||||
|
100
letta/functions/composio_helpers.py
Normal file
100
letta/functions/composio_helpers.py
Normal file
@ -0,0 +1,100 @@
|
||||
import asyncio
|
||||
import os
|
||||
from typing import Any, Optional
|
||||
|
||||
from composio import ComposioToolSet
|
||||
from composio.constants import DEFAULT_ENTITY_ID
|
||||
from composio.exceptions import (
|
||||
ApiKeyNotProvidedError,
|
||||
ComposioSDKError,
|
||||
ConnectedAccountNotFoundError,
|
||||
EnumMetadataNotFound,
|
||||
EnumStringNotFound,
|
||||
)
|
||||
|
||||
from letta.constants import COMPOSIO_ENTITY_ENV_VAR_KEY
|
||||
|
||||
|
||||
# TODO: This is kind of hacky, as this is used to search up the action later on composio's side
|
||||
# TODO: So be very careful changing/removing these pair of functions
|
||||
def _generate_func_name_from_composio_action(action_name: str) -> str:
|
||||
"""
|
||||
Generates the composio function name from the composio action.
|
||||
|
||||
Args:
|
||||
action_name: The composio action name
|
||||
|
||||
Returns:
|
||||
function name
|
||||
"""
|
||||
return action_name.lower()
|
||||
|
||||
|
||||
def generate_composio_action_from_func_name(func_name: str) -> str:
|
||||
"""
|
||||
Generates the composio action from the composio function name.
|
||||
|
||||
Args:
|
||||
func_name: The composio function name
|
||||
|
||||
Returns:
|
||||
composio action name
|
||||
"""
|
||||
return func_name.upper()
|
||||
|
||||
|
||||
def generate_composio_tool_wrapper(action_name: str) -> tuple[str, str]:
|
||||
# Generate func name
|
||||
func_name = _generate_func_name_from_composio_action(action_name)
|
||||
|
||||
wrapper_function_str = f"""\
|
||||
def {func_name}(**kwargs):
|
||||
raise RuntimeError("Something went wrong - we should never be using the persisted source code for Composio. Please reach out to Letta team")
|
||||
"""
|
||||
|
||||
# Compile safety check
|
||||
_assert_code_gen_compilable(wrapper_function_str.strip())
|
||||
|
||||
return func_name, wrapper_function_str.strip()
|
||||
|
||||
|
||||
async def execute_composio_action_async(
|
||||
action_name: str, args: dict, api_key: Optional[str] = None, entity_id: Optional[str] = None
|
||||
) -> tuple[str, str]:
|
||||
try:
|
||||
loop = asyncio.get_running_loop()
|
||||
return await loop.run_in_executor(None, execute_composio_action, action_name, args, api_key, entity_id)
|
||||
except Exception as e:
|
||||
raise RuntimeError(f"Error in execute_composio_action_async: {e}") from e
|
||||
|
||||
|
||||
def execute_composio_action(action_name: str, args: dict, api_key: Optional[str] = None, entity_id: Optional[str] = None) -> Any:
|
||||
entity_id = entity_id or os.getenv(COMPOSIO_ENTITY_ENV_VAR_KEY, DEFAULT_ENTITY_ID)
|
||||
try:
|
||||
composio_toolset = ComposioToolSet(api_key=api_key, entity_id=entity_id, lock=False)
|
||||
response = composio_toolset.execute_action(action=action_name, params=args)
|
||||
except ApiKeyNotProvidedError:
|
||||
raise RuntimeError(
|
||||
f"Composio API key is missing for action '{action_name}'. "
|
||||
"Please set the sandbox environment variables either through the ADE or the API."
|
||||
)
|
||||
except ConnectedAccountNotFoundError:
|
||||
raise RuntimeError(f"No connected account was found for action '{action_name}'. " "Please link an account and try again.")
|
||||
except EnumStringNotFound as e:
|
||||
raise RuntimeError(f"Invalid value provided for action '{action_name}': " + str(e) + ". Please check the action parameters.")
|
||||
except EnumMetadataNotFound as e:
|
||||
raise RuntimeError(f"Invalid value provided for action '{action_name}': " + str(e) + ". Please check the action parameters.")
|
||||
except ComposioSDKError as e:
|
||||
raise RuntimeError(f"An unexpected error occurred in Composio SDK while executing action '{action_name}': " + str(e))
|
||||
|
||||
if "error" in response and response["error"]:
|
||||
raise RuntimeError(f"Error while executing action '{action_name}': " + str(response["error"]))
|
||||
|
||||
return response.get("data")
|
||||
|
||||
|
||||
def _assert_code_gen_compilable(code_str):
|
||||
try:
|
||||
compile(code_str, "<string>", "exec")
|
||||
except SyntaxError as e:
|
||||
print(f"Syntax error in code: {e}")
|
@ -1,8 +1,9 @@
|
||||
import importlib
|
||||
import inspect
|
||||
from collections.abc import Callable
|
||||
from textwrap import dedent # remove indentation
|
||||
from types import ModuleType
|
||||
from typing import Dict, List, Literal, Optional
|
||||
from typing import Any, Dict, List, Literal, Optional
|
||||
|
||||
from letta.errors import LettaToolCreateError
|
||||
from letta.functions.schema_generator import generate_schema
|
||||
@ -66,7 +67,8 @@ def parse_source_code(func) -> str:
|
||||
return source_code
|
||||
|
||||
|
||||
def get_function_from_module(module_name: str, function_name: str):
|
||||
# TODO (cliandy) refactor below two funcs
|
||||
def get_function_from_module(module_name: str, function_name: str) -> Callable[..., Any]:
|
||||
"""
|
||||
Dynamically imports a function from a specified module.
|
||||
|
||||
|
@ -6,10 +6,9 @@ from random import uniform
|
||||
from typing import Any, Dict, List, Optional, Type, Union
|
||||
|
||||
import humps
|
||||
from composio.constants import DEFAULT_ENTITY_ID
|
||||
from pydantic import BaseModel, Field, create_model
|
||||
|
||||
from letta.constants import COMPOSIO_ENTITY_ENV_VAR_KEY, DEFAULT_MESSAGE_TOOL, DEFAULT_MESSAGE_TOOL_KWARG
|
||||
from letta.constants import DEFAULT_MESSAGE_TOOL, DEFAULT_MESSAGE_TOOL_KWARG
|
||||
from letta.functions.interface import MultiAgentMessagingInterface
|
||||
from letta.orm.errors import NoResultFound
|
||||
from letta.schemas.enums import MessageRole
|
||||
@ -21,34 +20,6 @@ from letta.server.rest_api.utils import get_letta_server
|
||||
from letta.settings import settings
|
||||
|
||||
|
||||
# TODO: This is kind of hacky, as this is used to search up the action later on composio's side
|
||||
# TODO: So be very careful changing/removing these pair of functions
|
||||
def generate_func_name_from_composio_action(action_name: str) -> str:
|
||||
"""
|
||||
Generates the composio function name from the composio action.
|
||||
|
||||
Args:
|
||||
action_name: The composio action name
|
||||
|
||||
Returns:
|
||||
function name
|
||||
"""
|
||||
return action_name.lower()
|
||||
|
||||
|
||||
def generate_composio_action_from_func_name(func_name: str) -> str:
|
||||
"""
|
||||
Generates the composio action from the composio function name.
|
||||
|
||||
Args:
|
||||
func_name: The composio function name
|
||||
|
||||
Returns:
|
||||
composio action name
|
||||
"""
|
||||
return func_name.upper()
|
||||
|
||||
|
||||
# TODO needed?
|
||||
def generate_mcp_tool_wrapper(mcp_tool_name: str) -> tuple[str, str]:
|
||||
|
||||
@ -58,62 +29,11 @@ def {mcp_tool_name}(**kwargs):
|
||||
"""
|
||||
|
||||
# Compile safety check
|
||||
assert_code_gen_compilable(wrapper_function_str.strip())
|
||||
_assert_code_gen_compilable(wrapper_function_str.strip())
|
||||
|
||||
return mcp_tool_name, wrapper_function_str.strip()
|
||||
|
||||
|
||||
def generate_composio_tool_wrapper(action_name: str) -> tuple[str, str]:
|
||||
# Generate func name
|
||||
func_name = generate_func_name_from_composio_action(action_name)
|
||||
|
||||
wrapper_function_str = f"""\
|
||||
def {func_name}(**kwargs):
|
||||
raise RuntimeError("Something went wrong - we should never be using the persisted source code for Composio. Please reach out to Letta team")
|
||||
"""
|
||||
|
||||
# Compile safety check
|
||||
assert_code_gen_compilable(wrapper_function_str.strip())
|
||||
|
||||
return func_name, wrapper_function_str.strip()
|
||||
|
||||
|
||||
def execute_composio_action(action_name: str, args: dict, api_key: Optional[str] = None, entity_id: Optional[str] = None) -> Any:
|
||||
import os
|
||||
|
||||
from composio.exceptions import (
|
||||
ApiKeyNotProvidedError,
|
||||
ComposioSDKError,
|
||||
ConnectedAccountNotFoundError,
|
||||
EnumMetadataNotFound,
|
||||
EnumStringNotFound,
|
||||
)
|
||||
from composio_langchain import ComposioToolSet
|
||||
|
||||
entity_id = entity_id or os.getenv(COMPOSIO_ENTITY_ENV_VAR_KEY, DEFAULT_ENTITY_ID)
|
||||
try:
|
||||
composio_toolset = ComposioToolSet(api_key=api_key, entity_id=entity_id, lock=False)
|
||||
response = composio_toolset.execute_action(action=action_name, params=args)
|
||||
except ApiKeyNotProvidedError:
|
||||
raise RuntimeError(
|
||||
f"Composio API key is missing for action '{action_name}'. "
|
||||
"Please set the sandbox environment variables either through the ADE or the API."
|
||||
)
|
||||
except ConnectedAccountNotFoundError:
|
||||
raise RuntimeError(f"No connected account was found for action '{action_name}'. " "Please link an account and try again.")
|
||||
except EnumStringNotFound as e:
|
||||
raise RuntimeError(f"Invalid value provided for action '{action_name}': " + str(e) + ". Please check the action parameters.")
|
||||
except EnumMetadataNotFound as e:
|
||||
raise RuntimeError(f"Invalid value provided for action '{action_name}': " + str(e) + ". Please check the action parameters.")
|
||||
except ComposioSDKError as e:
|
||||
raise RuntimeError(f"An unexpected error occurred in Composio SDK while executing action '{action_name}': " + str(e))
|
||||
|
||||
if "error" in response:
|
||||
raise RuntimeError(f"Error while executing action '{action_name}': " + str(response["error"]))
|
||||
|
||||
return response.get("data")
|
||||
|
||||
|
||||
def generate_langchain_tool_wrapper(
|
||||
tool: "LangChainBaseTool", additional_imports_module_attr_map: dict[str, str] = None
|
||||
) -> tuple[str, str]:
|
||||
@ -139,12 +59,12 @@ def {func_name}(**kwargs):
|
||||
"""
|
||||
|
||||
# Compile safety check
|
||||
assert_code_gen_compilable(wrapper_function_str)
|
||||
_assert_code_gen_compilable(wrapper_function_str)
|
||||
|
||||
return func_name, wrapper_function_str
|
||||
|
||||
|
||||
def assert_code_gen_compilable(code_str):
|
||||
def _assert_code_gen_compilable(code_str):
|
||||
try:
|
||||
compile(code_str, "<string>", "exec")
|
||||
except SyntaxError as e:
|
||||
@ -157,7 +77,7 @@ def assert_all_classes_are_imported(tool: Union["LangChainBaseTool"], additional
|
||||
current_class_imports = {tool_name}
|
||||
if additional_imports_module_attr_map:
|
||||
current_class_imports.update(set(additional_imports_module_attr_map.values()))
|
||||
required_class_imports = set(find_required_class_names_for_import(tool))
|
||||
required_class_imports = set(_find_required_class_names_for_import(tool))
|
||||
|
||||
if not current_class_imports.issuperset(required_class_imports):
|
||||
err_msg = f"[ERROR] You are missing module_attr pairs in `additional_imports_module_attr_map`. Currently, you have imports for {current_class_imports}, but the required classes for import are {required_class_imports}"
|
||||
@ -165,7 +85,7 @@ def assert_all_classes_are_imported(tool: Union["LangChainBaseTool"], additional
|
||||
raise RuntimeError(err_msg)
|
||||
|
||||
|
||||
def find_required_class_names_for_import(obj: Union["LangChainBaseTool", BaseModel]) -> list[str]:
|
||||
def _find_required_class_names_for_import(obj: Union["LangChainBaseTool", BaseModel]) -> list[str]:
|
||||
"""
|
||||
Finds all the class names for required imports when instantiating the `obj`.
|
||||
NOTE: This does not return the full import path, only the class name.
|
||||
@ -181,7 +101,7 @@ def find_required_class_names_for_import(obj: Union["LangChainBaseTool", BaseMod
|
||||
|
||||
# Collect all possible candidates for BaseModel objects
|
||||
candidates = []
|
||||
if is_base_model(curr_obj):
|
||||
if _is_base_model(curr_obj):
|
||||
# If it is a base model, we get all the values of the object parameters
|
||||
# i.e., if obj('b' = <class A>), we would want to inspect <class A>
|
||||
fields = dict(curr_obj)
|
||||
@ -198,7 +118,7 @@ def find_required_class_names_for_import(obj: Union["LangChainBaseTool", BaseMod
|
||||
|
||||
# Filter out all candidates that are not BaseModels
|
||||
# In the list example above, ['a', 3, None, <class A>], we want to filter out 'a', 3, and None
|
||||
candidates = filter(lambda x: is_base_model(x), candidates)
|
||||
candidates = filter(lambda x: _is_base_model(x), candidates)
|
||||
|
||||
# Classic BFS here
|
||||
for c in candidates:
|
||||
@ -216,7 +136,7 @@ def generate_imported_tool_instantiation_call_str(obj: Any) -> Optional[str]:
|
||||
# If it is a basic Python type, we trivially return the string version of that value
|
||||
# Handle basic types
|
||||
return repr(obj)
|
||||
elif is_base_model(obj):
|
||||
elif _is_base_model(obj):
|
||||
# Otherwise, if it is a BaseModel
|
||||
# We want to pull out all the parameters, and reformat them into strings
|
||||
# e.g. {arg}={value}
|
||||
@ -269,7 +189,7 @@ def generate_imported_tool_instantiation_call_str(obj: Any) -> Optional[str]:
|
||||
return None
|
||||
|
||||
|
||||
def is_base_model(obj: Any):
|
||||
def _is_base_model(obj: Any):
|
||||
return isinstance(obj, BaseModel)
|
||||
|
||||
|
||||
@ -286,7 +206,7 @@ def generate_import_code(module_attr_map: Optional[dict]):
|
||||
return "\n".join(code_lines)
|
||||
|
||||
|
||||
def parse_letta_response_for_assistant_message(
|
||||
def _parse_letta_response_for_assistant_message(
|
||||
target_agent_id: str,
|
||||
letta_response: LettaResponse,
|
||||
) -> Optional[str]:
|
||||
@ -346,7 +266,7 @@ def execute_send_message_to_agent(
|
||||
return asyncio.run(async_execute_send_message_to_agent(sender_agent, messages, other_agent_id, log_prefix))
|
||||
|
||||
|
||||
async def send_message_to_agent_no_stream(
|
||||
async def _send_message_to_agent_no_stream(
|
||||
server: "SyncServer",
|
||||
agent_id: str,
|
||||
actor: User,
|
||||
@ -389,7 +309,7 @@ async def async_send_message_with_retries(
|
||||
for attempt in range(1, max_retries + 1):
|
||||
try:
|
||||
response = await asyncio.wait_for(
|
||||
send_message_to_agent_no_stream(
|
||||
_send_message_to_agent_no_stream(
|
||||
server=server,
|
||||
agent_id=target_agent_id,
|
||||
actor=sender_agent.user,
|
||||
@ -399,7 +319,7 @@ async def async_send_message_with_retries(
|
||||
)
|
||||
|
||||
# Then parse out the assistant message
|
||||
assistant_message = parse_letta_response_for_assistant_message(target_agent_id, response)
|
||||
assistant_message = _parse_letta_response_for_assistant_message(target_agent_id, response)
|
||||
if assistant_message:
|
||||
sender_agent.logger.info(f"{logging_prefix} - {assistant_message}")
|
||||
return assistant_message
|
||||
|
@ -3,7 +3,7 @@ from typing import Any, Dict, Optional
|
||||
|
||||
from letta.constants import COMPOSIO_ENTITY_ENV_VAR_KEY, PRE_EXECUTION_MESSAGE_ARG
|
||||
from letta.functions.ast_parsers import coerce_dict_args_by_annotations, get_function_annotations_from_source
|
||||
from letta.functions.helpers import execute_composio_action, generate_composio_action_from_func_name
|
||||
from letta.functions.composio_helpers import execute_composio_action, generate_composio_action_from_func_name
|
||||
from letta.helpers.composio_helpers import get_composio_api_key
|
||||
from letta.orm.enums import ToolType
|
||||
from letta.schemas.agent import AgentState
|
||||
|
@ -11,13 +11,9 @@ from letta.constants import (
|
||||
MCP_TOOL_TAG_NAME_PREFIX,
|
||||
)
|
||||
from letta.functions.ast_parsers import get_function_name_and_description
|
||||
from letta.functions.composio_helpers import generate_composio_tool_wrapper
|
||||
from letta.functions.functions import derive_openai_json_schema, get_json_schema_from_module
|
||||
from letta.functions.helpers import (
|
||||
generate_composio_tool_wrapper,
|
||||
generate_langchain_tool_wrapper,
|
||||
generate_mcp_tool_wrapper,
|
||||
generate_model_from_args_json_schema,
|
||||
)
|
||||
from letta.functions.helpers import generate_langchain_tool_wrapper, generate_mcp_tool_wrapper, generate_model_from_args_json_schema
|
||||
from letta.functions.mcp_client.types import MCPTool
|
||||
from letta.functions.schema_generator import (
|
||||
generate_schema_from_args_schema_v2,
|
||||
@ -176,8 +172,7 @@ class ToolCreate(LettaBase):
|
||||
Returns:
|
||||
Tool: A Letta Tool initialized with attributes derived from the Composio tool.
|
||||
"""
|
||||
from composio import LogLevel
|
||||
from composio_langchain import ComposioToolSet
|
||||
from composio import ComposioToolSet, LogLevel
|
||||
|
||||
composio_toolset = ComposioToolSet(logging_level=LogLevel.ERROR, lock=False)
|
||||
composio_action_schemas = composio_toolset.get_action_schemas(actions=[action_name], check_connected_accounts=False)
|
||||
|
@ -100,7 +100,7 @@ class ToolExecutionManager:
|
||||
try:
|
||||
executor = ToolExecutorFactory.get_executor(tool.tool_type)
|
||||
# TODO: Extend this async model to composio
|
||||
if isinstance(executor, SandboxToolExecutor):
|
||||
if isinstance(executor, (SandboxToolExecutor, ExternalComposioToolExecutor)):
|
||||
result = await executor.execute(function_name, function_args, self.agent_state, tool, self.actor)
|
||||
else:
|
||||
result = executor.execute(function_name, function_args, self.agent_state, tool, self.actor)
|
||||
|
@ -5,7 +5,7 @@ from typing import Any, Dict, Optional
|
||||
|
||||
from letta.constants import COMPOSIO_ENTITY_ENV_VAR_KEY, CORE_MEMORY_LINE_NUMBER_WARNING, RETRIEVAL_QUERY_DEFAULT_PAGE_SIZE
|
||||
from letta.functions.ast_parsers import coerce_dict_args_by_annotations, get_function_annotations_from_source
|
||||
from letta.functions.helpers import execute_composio_action, generate_composio_action_from_func_name
|
||||
from letta.functions.composio_helpers import execute_composio_action_async, generate_composio_action_from_func_name
|
||||
from letta.helpers.composio_helpers import get_composio_api_key
|
||||
from letta.helpers.json_helpers import json_dumps
|
||||
from letta.schemas.agent import AgentState
|
||||
@ -486,7 +486,7 @@ class LettaMultiAgentToolExecutor(ToolExecutor):
|
||||
class ExternalComposioToolExecutor(ToolExecutor):
|
||||
"""Executor for external Composio tools."""
|
||||
|
||||
def execute(
|
||||
async def execute(
|
||||
self,
|
||||
function_name: str,
|
||||
function_args: dict,
|
||||
@ -505,7 +505,7 @@ class ExternalComposioToolExecutor(ToolExecutor):
|
||||
composio_api_key = get_composio_api_key(actor=actor)
|
||||
|
||||
# TODO (matt): Roll in execute_composio_action into this class
|
||||
function_response = execute_composio_action(
|
||||
function_response = await execute_composio_action_async(
|
||||
action_name=action_name, args=function_args, api_key=composio_api_key, entity_id=entity_id
|
||||
)
|
||||
|
||||
|
104
poetry.lock
generated
104
poetry.lock
generated
@ -1016,25 +1016,6 @@ e2b = ["e2b (>=0.17.2a37,<1.1.0)", "e2b-code-interpreter"]
|
||||
flyio = ["gql", "requests_toolbelt"]
|
||||
tools = ["diskcache", "flake8", "networkx", "pathspec", "pygments", "ruff", "transformers"]
|
||||
|
||||
[[package]]
|
||||
name = "composio-langchain"
|
||||
version = "0.7.15"
|
||||
description = "Use Composio to get an array of tools with your LangChain agent."
|
||||
optional = false
|
||||
python-versions = "<4,>=3.9"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "composio_langchain-0.7.15-py3-none-any.whl", hash = "sha256:a71b5371ad6c3ee4d4289c7a994fad1424e24c29a38e820b6b2ed259056abb65"},
|
||||
{file = "composio_langchain-0.7.15.tar.gz", hash = "sha256:cb75c460289ecdf9590caf7ddc0d7888b0a6622ca4f800c9358abe90c25d055e"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
composio_core = ">=0.7.0,<0.8.0"
|
||||
langchain = ">=0.1.0"
|
||||
langchain-openai = ">=0.0.2.post1"
|
||||
langchainhub = ">=0.1.15"
|
||||
pydantic = ">=2.6.4"
|
||||
|
||||
[[package]]
|
||||
name = "configargparse"
|
||||
version = "1.7"
|
||||
@ -2842,9 +2823,10 @@ files = [
|
||||
name = "jsonpatch"
|
||||
version = "1.33"
|
||||
description = "Apply JSON-Patches (RFC 6902)"
|
||||
optional = false
|
||||
optional = true
|
||||
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*, !=3.6.*"
|
||||
groups = ["main"]
|
||||
markers = "extra == \"external-tools\" or extra == \"desktop\" or extra == \"all\""
|
||||
files = [
|
||||
{file = "jsonpatch-1.33-py2.py3-none-any.whl", hash = "sha256:0ae28c0cd062bbd8b8ecc26d7d164fbbea9652a1a3693f3b956c1eae5145dade"},
|
||||
{file = "jsonpatch-1.33.tar.gz", hash = "sha256:9fcd4009c41e6d12348b4a0ff2563ba56a2923a7dfee731d004e212e1ee5030c"},
|
||||
@ -2857,9 +2839,10 @@ jsonpointer = ">=1.9"
|
||||
name = "jsonpointer"
|
||||
version = "3.0.0"
|
||||
description = "Identify specific nodes in a JSON document (RFC 6901)"
|
||||
optional = false
|
||||
optional = true
|
||||
python-versions = ">=3.7"
|
||||
groups = ["main"]
|
||||
markers = "extra == \"external-tools\" or extra == \"desktop\" or extra == \"all\""
|
||||
files = [
|
||||
{file = "jsonpointer-3.0.0-py2.py3-none-any.whl", hash = "sha256:13e088adc14fca8b6aa8177c044e12701e6ad4b28ff10e65f2267a90109c9942"},
|
||||
{file = "jsonpointer-3.0.0.tar.gz", hash = "sha256:2b2d729f2091522d61c3b31f82e11870f60b68f43fbc705cb76bf4b832af59ef"},
|
||||
@ -3052,9 +3035,10 @@ files = [
|
||||
name = "langchain"
|
||||
version = "0.3.23"
|
||||
description = "Building applications with LLMs through composability"
|
||||
optional = false
|
||||
optional = true
|
||||
python-versions = "<4.0,>=3.9"
|
||||
groups = ["main"]
|
||||
markers = "extra == \"external-tools\" or extra == \"desktop\" or extra == \"all\""
|
||||
files = [
|
||||
{file = "langchain-0.3.23-py3-none-any.whl", hash = "sha256:084f05ee7e80b7c3f378ebadd7309f2a37868ce2906fa0ae64365a67843ade3d"},
|
||||
{file = "langchain-0.3.23.tar.gz", hash = "sha256:d95004afe8abebb52d51d6026270248da3f4b53d93e9bf699f76005e0c83ad34"},
|
||||
@ -3120,9 +3104,10 @@ tenacity = ">=8.1.0,<8.4.0 || >8.4.0,<10"
|
||||
name = "langchain-core"
|
||||
version = "0.3.51"
|
||||
description = "Building applications with LLMs through composability"
|
||||
optional = false
|
||||
optional = true
|
||||
python-versions = "<4.0,>=3.9"
|
||||
groups = ["main"]
|
||||
markers = "extra == \"external-tools\" or extra == \"desktop\" or extra == \"all\""
|
||||
files = [
|
||||
{file = "langchain_core-0.3.51-py3-none-any.whl", hash = "sha256:4bd71e8acd45362aa428953f2a91d8162318014544a2216e4b769463caf68e13"},
|
||||
{file = "langchain_core-0.3.51.tar.gz", hash = "sha256:db76b9cc331411602cb40ba0469a161febe7a0663fbcaddbc9056046ac2d22f4"},
|
||||
@ -3140,30 +3125,14 @@ PyYAML = ">=5.3"
|
||||
tenacity = ">=8.1.0,<8.4.0 || >8.4.0,<10.0.0"
|
||||
typing-extensions = ">=4.7"
|
||||
|
||||
[[package]]
|
||||
name = "langchain-openai"
|
||||
version = "0.3.12"
|
||||
description = "An integration package connecting OpenAI and LangChain"
|
||||
optional = false
|
||||
python-versions = "<4.0,>=3.9"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "langchain_openai-0.3.12-py3-none-any.whl", hash = "sha256:0fab64d58ec95e65ffbaf659470cd362e815685e15edbcb171641e90eca4eb86"},
|
||||
{file = "langchain_openai-0.3.12.tar.gz", hash = "sha256:c9dbff63551f6bd91913bca9f99a2d057fd95dc58d4778657d67e5baa1737f61"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
langchain-core = ">=0.3.49,<1.0.0"
|
||||
openai = ">=1.68.2,<2.0.0"
|
||||
tiktoken = ">=0.7,<1"
|
||||
|
||||
[[package]]
|
||||
name = "langchain-text-splitters"
|
||||
version = "0.3.8"
|
||||
description = "LangChain text splitting utilities"
|
||||
optional = false
|
||||
optional = true
|
||||
python-versions = "<4.0,>=3.9"
|
||||
groups = ["main"]
|
||||
markers = "extra == \"external-tools\" or extra == \"desktop\" or extra == \"all\""
|
||||
files = [
|
||||
{file = "langchain_text_splitters-0.3.8-py3-none-any.whl", hash = "sha256:e75cc0f4ae58dcf07d9f18776400cf8ade27fadd4ff6d264df6278bb302f6f02"},
|
||||
{file = "langchain_text_splitters-0.3.8.tar.gz", hash = "sha256:116d4b9f2a22dda357d0b79e30acf005c5518177971c66a9f1ab0edfdb0f912e"},
|
||||
@ -3172,30 +3141,14 @@ files = [
|
||||
[package.dependencies]
|
||||
langchain-core = ">=0.3.51,<1.0.0"
|
||||
|
||||
[[package]]
|
||||
name = "langchainhub"
|
||||
version = "0.1.21"
|
||||
description = "The LangChain Hub API client"
|
||||
optional = false
|
||||
python-versions = "<4.0,>=3.8.1"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "langchainhub-0.1.21-py3-none-any.whl", hash = "sha256:1cc002dc31e0d132a776afd044361e2b698743df5202618cf2bad399246b895f"},
|
||||
{file = "langchainhub-0.1.21.tar.gz", hash = "sha256:723383b3964a47dbaea6ad5d0ef728accefbc9d2c07480e800bdec43510a8c10"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
packaging = ">=23.2,<25"
|
||||
requests = ">=2,<3"
|
||||
types-requests = ">=2.31.0.2,<3.0.0.0"
|
||||
|
||||
[[package]]
|
||||
name = "langsmith"
|
||||
version = "0.3.28"
|
||||
description = "Client library to connect to the LangSmith LLM Tracing and Evaluation Platform."
|
||||
optional = false
|
||||
optional = true
|
||||
python-versions = "<4.0,>=3.9"
|
||||
groups = ["main"]
|
||||
markers = "extra == \"external-tools\" or extra == \"desktop\" or extra == \"all\""
|
||||
files = [
|
||||
{file = "langsmith-0.3.28-py3-none-any.whl", hash = "sha256:54ac8815514af52d9c801ad7970086693667e266bf1db90fc453c1759e8407cd"},
|
||||
{file = "langsmith-0.3.28.tar.gz", hash = "sha256:4666595207131d7f8d83418e54dc86c05e28562e5c997633e7c33fc18f9aeb89"},
|
||||
@ -3221,14 +3174,14 @@ pytest = ["pytest (>=7.0.0)", "rich (>=13.9.4,<14.0.0)"]
|
||||
|
||||
[[package]]
|
||||
name = "letta-client"
|
||||
version = "0.1.124"
|
||||
version = "0.1.129"
|
||||
description = ""
|
||||
optional = false
|
||||
python-versions = "<4.0,>=3.8"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "letta_client-0.1.124-py3-none-any.whl", hash = "sha256:a7901437ef91f395cd85d24c0312046b7c82e5a4dd8e04de0d39b5ca085c65d3"},
|
||||
{file = "letta_client-0.1.124.tar.gz", hash = "sha256:e8b5716930824cc98c62ee01343e358f88619d346578d48a466277bc8282036d"},
|
||||
{file = "letta_client-0.1.129-py3-none-any.whl", hash = "sha256:87a5fc32471e5b9fefbfc1e1337fd667d5e2e340ece5d2a6c782afbceab4bf36"},
|
||||
{file = "letta_client-0.1.129.tar.gz", hash = "sha256:b00f611c18a2ad802ec9265f384e1666938c5fc5c86364b2c410d72f0331d597"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
@ -4366,10 +4319,10 @@ files = [
|
||||
name = "orjson"
|
||||
version = "3.10.16"
|
||||
description = "Fast, correct Python JSON library supporting dataclasses, datetimes, and numpy"
|
||||
optional = false
|
||||
optional = true
|
||||
python-versions = ">=3.9"
|
||||
groups = ["main"]
|
||||
markers = "platform_python_implementation != \"PyPy\""
|
||||
markers = "platform_python_implementation != \"PyPy\" and (extra == \"external-tools\" or extra == \"desktop\" or extra == \"all\")"
|
||||
files = [
|
||||
{file = "orjson-3.10.16-cp310-cp310-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:4cb473b8e79154fa778fb56d2d73763d977be3dcc140587e07dbc545bbfc38f8"},
|
||||
{file = "orjson-3.10.16-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:622a8e85eeec1948690409a19ca1c7d9fd8ff116f4861d261e6ae2094fe59a00"},
|
||||
@ -6069,9 +6022,10 @@ use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"]
|
||||
name = "requests-toolbelt"
|
||||
version = "1.0.0"
|
||||
description = "A utility belt for advanced users of python-requests"
|
||||
optional = false
|
||||
optional = true
|
||||
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
|
||||
groups = ["main"]
|
||||
markers = "extra == \"external-tools\" or extra == \"desktop\" or extra == \"all\""
|
||||
files = [
|
||||
{file = "requests-toolbelt-1.0.0.tar.gz", hash = "sha256:7681a0a3d047012b5bdc0ee37d7f8f07ebe76ab08caeccfc3921ce23c88d5bc6"},
|
||||
{file = "requests_toolbelt-1.0.0-py2.py3-none-any.whl", hash = "sha256:cccfdd665f0a24fcf4726e690f65639d272bb0637b9b92dfd91a5568ccf6bd06"},
|
||||
@ -6855,21 +6809,6 @@ dev = ["autoflake (>=1.3.1,<2.0.0)", "flake8 (>=3.8.3,<4.0.0)", "pre-commit (>=2
|
||||
doc = ["cairosvg (>=2.5.2,<3.0.0)", "mdx-include (>=1.4.1,<2.0.0)", "mkdocs (>=1.1.2,<2.0.0)", "mkdocs-material (>=8.1.4,<9.0.0)", "pillow (>=9.3.0,<10.0.0)"]
|
||||
test = ["black (>=22.3.0,<23.0.0)", "coverage (>=6.2,<7.0)", "isort (>=5.0.6,<6.0.0)", "mypy (==0.971)", "pytest (>=4.4.0,<8.0.0)", "pytest-cov (>=2.10.0,<5.0.0)", "pytest-sugar (>=0.9.4,<0.10.0)", "pytest-xdist (>=1.32.0,<4.0.0)", "rich (>=10.11.0,<14.0.0)", "shellingham (>=1.3.0,<2.0.0)"]
|
||||
|
||||
[[package]]
|
||||
name = "types-requests"
|
||||
version = "2.32.0.20250328"
|
||||
description = "Typing stubs for requests"
|
||||
optional = false
|
||||
python-versions = ">=3.9"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "types_requests-2.32.0.20250328-py3-none-any.whl", hash = "sha256:72ff80f84b15eb3aa7a8e2625fffb6a93f2ad5a0c20215fc1dcfa61117bcb2a2"},
|
||||
{file = "types_requests-2.32.0.20250328.tar.gz", hash = "sha256:c9e67228ea103bd811c96984fac36ed2ae8da87a36a633964a21f199d60baf32"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
urllib3 = ">=2"
|
||||
|
||||
[[package]]
|
||||
name = "typing-extensions"
|
||||
version = "4.13.2"
|
||||
@ -7438,9 +7377,10 @@ testing = ["coverage[toml]", "zope.event", "zope.testing"]
|
||||
name = "zstandard"
|
||||
version = "0.23.0"
|
||||
description = "Zstandard bindings for Python"
|
||||
optional = false
|
||||
optional = true
|
||||
python-versions = ">=3.8"
|
||||
groups = ["main"]
|
||||
markers = "extra == \"external-tools\" or extra == \"desktop\" or extra == \"all\""
|
||||
files = [
|
||||
{file = "zstandard-0.23.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:bf0a05b6059c0528477fba9054d09179beb63744355cab9f38059548fedd46a9"},
|
||||
{file = "zstandard-0.23.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fc9ca1c9718cb3b06634c7c8dec57d24e9438b2aa9a0f02b8bb36bf478538880"},
|
||||
@ -7563,4 +7503,4 @@ tests = ["wikipedia"]
|
||||
[metadata]
|
||||
lock-version = "2.1"
|
||||
python-versions = "<3.14,>=3.10"
|
||||
content-hash = "75c1c949aa6c0ef8d681bddd91999f97ed4991451be93ca45bf9c01dd19d8a8a"
|
||||
content-hash = "ba9cf0e00af2d5542aa4beecbd727af92b77ba584033f05c222b00ae47f96585"
|
||||
|
@ -56,7 +56,6 @@ nltk = "^3.8.1"
|
||||
jinja2 = "^3.1.5"
|
||||
locust = {version = "^2.31.5", optional = true}
|
||||
wikipedia = {version = "^1.4.0", optional = true}
|
||||
composio-langchain = "^0.7.7"
|
||||
composio-core = "^0.7.7"
|
||||
alembic = "^1.13.3"
|
||||
pyhumps = "^3.8.0"
|
||||
@ -74,7 +73,7 @@ llama-index = "^0.12.2"
|
||||
llama-index-embeddings-openai = "^0.3.1"
|
||||
e2b-code-interpreter = {version = "^1.0.3", optional = true}
|
||||
anthropic = "^0.49.0"
|
||||
letta_client = "^0.1.124"
|
||||
letta_client = "^0.1.127"
|
||||
openai = "^1.60.0"
|
||||
opentelemetry-api = "1.30.0"
|
||||
opentelemetry-sdk = "1.30.0"
|
||||
|
@ -179,7 +179,7 @@ def check_agent_uses_external_tool(filename: str) -> LettaResponse:
|
||||
|
||||
Note: This is acting on the Letta response, note the usage of `user_message`
|
||||
"""
|
||||
from composio_langchain import Action
|
||||
from composio import Action
|
||||
|
||||
# Set up client
|
||||
client = create_client()
|
||||
|
@ -56,7 +56,7 @@ def test_add_composio_tool(fastapi_client):
|
||||
assert "name" in response.json()
|
||||
|
||||
|
||||
def test_composio_tool_execution_e2e(check_composio_key_set, composio_get_emojis, server: SyncServer, default_user):
|
||||
async def test_composio_tool_execution_e2e(check_composio_key_set, composio_get_emojis, server: SyncServer, default_user):
|
||||
agent_state = server.agent_manager.create_agent(
|
||||
agent_create=CreateAgent(
|
||||
name="sarah_agent",
|
||||
@ -67,7 +67,7 @@ def test_composio_tool_execution_e2e(check_composio_key_set, composio_get_emojis
|
||||
actor=default_user,
|
||||
)
|
||||
|
||||
tool_execution_result = ToolExecutionManager(agent_state, actor=default_user).execute_tool(
|
||||
tool_execution_result = await ToolExecutionManager(agent_state, actor=default_user).execute_tool(
|
||||
function_name=composio_get_emojis.name, function_args={}, tool=composio_get_emojis
|
||||
)
|
||||
|
||||
|
@ -124,7 +124,7 @@ def test_agent(client: LocalClient):
|
||||
def test_agent_add_remove_tools(client: LocalClient, agent):
|
||||
# Create and add two tools to the client
|
||||
# tool 1
|
||||
from composio_langchain import Action
|
||||
from composio import Action
|
||||
|
||||
github_tool = client.load_composio_tool(action=Action.GITHUB_STAR_A_REPOSITORY_FOR_THE_AUTHENTICATED_USER)
|
||||
|
||||
@ -316,7 +316,7 @@ def test_tools(client: LocalClient):
|
||||
|
||||
|
||||
def test_tools_from_composio_basic(client: LocalClient):
|
||||
from composio_langchain import Action
|
||||
from composio import Action
|
||||
|
||||
# Create a `LocalClient` (you can also use a `RESTClient`, see the letta_rest_client.py example)
|
||||
client = create_client()
|
||||
|
Loading…
Reference in New Issue
Block a user