feat: Migrating CLI to run on MemGPT Client for memgpt [list/add/delete] (#1428) (#1449)

Co-authored-by: Krishna Mandal <43015249+KrishnaM251@users.noreply.github.com>
This commit is contained in:
Sarah Wooders 2024-06-13 16:02:59 -07:00 committed by GitHub
parent 70764cff50
commit 6bd19e15ad
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
7 changed files with 187 additions and 31 deletions

View File

@ -13,13 +13,13 @@ jobs:
steps:
- name: Checkout
uses: actions/checkout@v4
- name: Start Ollama Server
run: |
curl -fsSL https://ollama.com/install.sh | sh
ollama serve &
sleep 10 # wait for server
ollama pull dolphin2.2-mistral:7b-q6_K
ollama pull dolphin2.2-mistral:7b-q6_K
ollama pull mxbai-embed-large
- name: "Setup Python, Poetry and Dependencies"

View File

@ -23,13 +23,13 @@ You can also use MemGPT to deploy agents as a *service*. You can use a MemGPT se
<img width="1000" alt="image" src="https://github.com/cpacker/MemGPT/assets/8505980/1096eb91-139a-4bc5-b908-fa585462da09">
## Installation & Setup
## Installation & Setup
Install MemGPT:
```sh
pip install -U pymemgpt
```
To use MemGPT with OpenAI, set the environment variable `OPENAI_API_KEY` to your OpenAI key then run:
To use MemGPT with OpenAI, set the environment variable `OPENAI_API_KEY` to your OpenAI key then run:
```
memgpt quickstart --backend openai
```
@ -54,20 +54,20 @@ MemGPT provides a developer portal that enables you to easily create, edit, moni
<img width="1000" alt="image" src="https://github.com/cpacker/MemGPT/assets/5475622/071117c5-46a7-4953-bc9d-d74880e66258">
## Quickstart (Server)
## Quickstart (Server)
**Option 1 (Recommended)**: Run with docker compose
**Option 1 (Recommended)**: Run with docker compose
1. [Install docker on your system](https://docs.docker.com/get-docker/)
2. Clone the repo: `git clone https://github.com/cpacker/MemGPT.git`
3. Copy-paste `.env.example` to `.env` and optionally modify
4. Run `docker compose up`
5. Go to `memgpt.localhost` in the browser to view the developer portal
5. Go to `memgpt.localhost` in the browser to view the developer portal
**Option 2:** Run with the CLI:
1. Run `memgpt server`
2. Go to `localhost:8283` in the browser to view the developer portal
Once the server is running, you can use the [Python client](https://memgpt.readme.io/docs/admin-client) or [REST API](https://memgpt.readme.io/reference/api) to connect to `memgpt.localhost` (if you're running with docker compose) or `localhost:8283` (if you're running with the CLI) to create users, agents, and more. The service requires authentication with a MemGPT admin password; it is the value of `MEMGPT_SERVER_PASS` in `.env`.
Once the server is running, you can use the [Python client](https://memgpt.readme.io/docs/admin-client) or [REST API](https://memgpt.readme.io/reference/api) to connect to `memgpt.localhost` (if you're running with docker compose) or `localhost:8283` (if you're running with the CLI) to create users, agents, and more. The service requires authentication with a MemGPT admin password; it is the value of `MEMGPT_SERVER_PASS` in `.env`.
## Supported Endpoints & Backends
MemGPT is designed to be model and provider agnostic. The following LLM and embedding endpoints are supported:
@ -96,7 +96,7 @@ When using MemGPT with open LLMs (such as those downloaded from HuggingFace), th
* **Report Issues or Suggest Features**: Have an issue or a feature request? Please submit them through our [GitHub Issues page](https://github.com/cpacker/MemGPT/issues).
* **Explore the Roadmap**: Curious about future developments? View and comment on our [project roadmap](https://github.com/cpacker/MemGPT/issues/1200).
* **Benchmark the Performance**: Want to benchmark the performance of a model on MemGPT? Follow our [Benchmarking Guidance](#benchmarking-guidance).
* **Join Community Events**: Stay updated with the [MemGPT event calendar](https://lu.ma/berkeley-llm-meetup) or follow our [Twitter account](https://twitter.com/MemGPT).
* **Join Community Events**: Stay updated with the [MemGPT event calendar](https://lu.ma/berkeley-llm-meetup) or follow our [Twitter account](https://twitter.com/MemGPT).
## Benchmarking Guidance
@ -104,4 +104,3 @@ To evaluate the performance of a model on MemGPT, simply configure the appropria
## Legal notices
By using MemGPT and related MemGPT services (such as the MemGPT endpoint or hosted service), you agree to our [privacy policy](https://github.com/cpacker/MemGPT/tree/main/PRIVACY.md) and [terms of service](https://github.com/cpacker/MemGPT/tree/main/TERMS.md).

View File

@ -90,4 +90,3 @@ memgpt run --preset resend_preset --persona sam_pov --human cs_phd --stream
Waiting in our inbox:
<img width="500" alt="image" src="https://github.com/cpacker/MemGPT/assets/5475622/95f9b24a-98c3-493a-a787-72a2a956641a">

View File

@ -1101,9 +1101,12 @@ class ListChoice(str, Enum):
@app.command()
def list(arg: Annotated[ListChoice, typer.Argument]):
from memgpt.client.client import create_client
config = MemGPTConfig.load()
ms = MetadataStore(config)
user_id = uuid.UUID(config.anon_clientid)
client = create_client(base_url=os.getenv("MEMGPT_BASE_URL"), token=os.getenv("MEMGPT_SERVER_PASS"))
table = ColorTable(theme=Themes.OCEAN)
if arg == ListChoice.agents:
"""List all agents"""
@ -1130,7 +1133,7 @@ def list(arg: Annotated[ListChoice, typer.Argument]):
elif arg == ListChoice.humans:
"""List all humans"""
table.field_names = ["Name", "Text"]
for human in ms.list_humans(user_id=user_id):
for human in client.list_humans(user_id=user_id):
table.add_row([human.name, human.text.replace("\n", "")[:100]])
print(table)
elif arg == ListChoice.personas:
@ -1194,9 +1197,12 @@ def add(
filename: Annotated[Optional[str], typer.Option("-f", help="Specify filename")] = None,
):
"""Add a person/human"""
from memgpt.client.client import create_client
config = MemGPTConfig.load()
user_id = uuid.UUID(config.anon_clientid)
ms = MetadataStore(config)
client = create_client(base_url=os.getenv("MEMGPT_BASE_URL"), token=os.getenv("MEMGPT_SERVER_PASS"))
if filename: # read from file
assert text is None, "Cannot specify both text and filename"
with open(filename, "r", encoding="utf-8") as f:
@ -1214,16 +1220,16 @@ def add(
ms.add_persona(persona)
elif option == "human":
human = ms.get_human(name=name, user_id=user_id)
human = client.get_human(name=name, user_id=user_id)
if human:
# config if user wants to overwrite
if not questionary.confirm(f"Human {name} already exists. Overwrite?").ask():
return
human.text = text
ms.update_human(human)
client.update_human(human)
else:
human = HumanModel(name=name, text=text, user_id=user_id)
ms.add_human(HumanModel(name=name, text=text, user_id=user_id))
client.add_human(HumanModel(name=name, text=text, user_id=user_id))
elif option == "preset":
assert filename, "Must specify filename for preset"
create_preset_from_file(filename, name, user_id, ms)
@ -1234,9 +1240,11 @@ def add(
@app.command()
def delete(option: str, name: str):
"""Delete a source from the archival memory."""
from memgpt.client.client import create_client
config = MemGPTConfig.load()
user_id = uuid.UUID(config.anon_clientid)
client = create_client(base_url=os.getenv("MEMGPT_BASE_URL"), token=os.getenv("MEMGPT_API_KEY"))
ms = MetadataStore(config)
assert ms.get_user(user_id=user_id), f"User {user_id} does not exist"
@ -1273,9 +1281,9 @@ def delete(option: str, name: str):
ms.delete_agent(agent_id=agent.id)
elif option == "human":
human = ms.get_human(name=name, user_id=user_id)
human = client.get_human(name=name, user_id=user_id)
assert human is not None, f"Human {name} does not exist"
ms.delete_human(name=name, user_id=user_id)
client.delete_human(name=name, user_id=user_id)
elif option == "persona":
persona = ms.get_persona(name=name, user_id=user_id)
assert persona is not None, f"Persona {name} does not exist"

View File

@ -630,6 +630,8 @@ class LocalClient(AbstractClient):
self.interface = QueuingInterface(debug=debug)
self.server = SyncServer(default_interface=self.interface)
# agents
def list_agents(self):
self.interface.clear()
return self.server.list_agents(user_id=self.user_id)
@ -665,6 +667,14 @@ class LocalClient(AbstractClient):
)
return agent_state
def delete_agent(self, agent_id: uuid.UUID):
self.server.delete_agent(user_id=self.user_id, agent_id=agent_id)
def get_agent_config(self, agent_id: str) -> AgentState:
self.interface.clear()
return self.server.get_agent_config(user_id=self.user_id, agent_id=agent_id)
# presets
def create_preset(self, preset: Preset) -> Preset:
if preset.user_id is None:
preset.user_id = self.user_id
@ -677,9 +687,7 @@ class LocalClient(AbstractClient):
def list_presets(self) -> List[PresetModel]:
return self.server.list_presets(user_id=self.user_id)
def get_agent_config(self, agent_id: str) -> AgentState:
self.interface.clear()
return self.server.get_agent_config(user_id=self.user_id, agent_id=agent_id)
# memory
def get_agent_memory(self, agent_id: str) -> Dict:
self.interface.clear()
@ -689,6 +697,8 @@ class LocalClient(AbstractClient):
self.interface.clear()
return self.server.update_agent_core_memory(user_id=self.user_id, agent_id=agent_id, new_memory_contents=new_memory_contents)
# agent interactions
def user_message(self, agent_id: str, message: str) -> Union[List[Dict], Tuple[List[Dict], int]]:
self.interface.clear()
self.server.user_message(user_id=self.user_id, agent_id=agent_id, message=message)
@ -704,17 +714,7 @@ class LocalClient(AbstractClient):
def save(self):
self.server.save_agents()
def load_data(self, connector: DataConnector, source_name: str):
self.server.load_data(user_id=self.user_id, connector=connector, source_name=source_name)
def create_source(self, name: str):
self.server.create_source(user_id=self.user_id, name=name)
def attach_source_to_agent(self, source_id: uuid.UUID, agent_id: uuid.UUID):
self.server.attach_source_to_agent(user_id=self.user_id, source_id=source_id, agent_id=agent_id)
def delete_agent(self, agent_id: uuid.UUID):
self.server.delete_agent(user_id=self.user_id, agent_id=agent_id)
# archival memory
def get_agent_archival_memory(
self, agent_id: uuid.UUID, before: Optional[uuid.UUID] = None, after: Optional[uuid.UUID] = None, limit: Optional[int] = 1000
@ -727,3 +727,35 @@ class LocalClient(AbstractClient):
limit=limit,
)
return archival_json_records
# messages
# humans / personas
def list_humans(self, user_id: uuid.UUID):
return self.server.list_humans(user_id=user_id if user_id else self.user_id)
def get_human(self, name: str, user_id: uuid.UUID):
return self.server.get_human(name=name, user_id=user_id)
def add_human(self, human: HumanModel):
return self.server.add_human(human=human)
def update_human(self, human: HumanModel):
return self.server.update_human(human=human)
def delete_human(self, name: str, user_id: uuid.UUID):
return self.server.delete_human(name, user_id)
# tools
# data sources
def load_data(self, connector: DataConnector, source_name: str):
self.server.load_data(user_id=self.user_id, connector=connector, source_name=source_name)
def create_source(self, name: str):
self.server.create_source(user_id=self.user_id, name=name)
def attach_source_to_agent(self, source_id: uuid.UUID, agent_id: uuid.UUID):
self.server.attach_source_to_agent(user_id=self.user_id, source_id=source_id, agent_id=agent_id)

View File

@ -40,6 +40,7 @@ from memgpt.interface import CLIInterface # for printing to terminal
from memgpt.metadata import MetadataStore
from memgpt.models.pydantic_models import (
DocumentModel,
HumanModel,
PassageModel,
PresetModel,
SourceModel,
@ -896,6 +897,21 @@ class SyncServer(LockingServer):
"agents": agents_states_dicts,
}
def list_humans(self, user_id: uuid.UUID):
return self.ms.list_humans(user_id=user_id)
def get_human(self, name: str, user_id: uuid.UUID):
return self.ms.get_human(name=name, user_id=user_id)
def add_human(self, human: HumanModel):
return self.ms.add_human(human=human)
def update_human(self, human: HumanModel):
return self.ms.update_human(human=human)
def delete_human(self, name: str, user_id: uuid.UUID):
return self.ms.delete_human(name, user_id)
def get_agent(self, user_id: uuid.UUID, agent_id: uuid.UUID):
"""Get the agent state"""
return self.ms.get_agent(agent_id=agent_id, user_id=user_id)

102
tests/test_new_cli.py Normal file
View File

@ -0,0 +1,102 @@
import os
import random
import string
import unittest.mock
import pytest
from memgpt.cli.cli_config import add, delete, list
@pytest.mark.skip(reason="This is a helper function.")
def generate_random_string(length):
characters = string.ascii_letters + string.digits
random_string = "".join(random.choices(characters, k=length))
return random_string
@pytest.mark.skip(reason="Ensures LocalClient is used during testing.")
def unset_env_variables():
server_url = os.environ.pop("MEMGPT_BASE_URL", None)
token = os.environ.pop("MEMGPT_SERVER_PASS", None)
return server_url, token
@pytest.mark.skip(reason="Set env variables back to values before test.")
def reset_env_variables(server_url, token):
if server_url is not None:
os.environ["MEMGPT_BASE_URL"] = server_url
if token is not None:
os.environ["MEMGPT_SERVER_PASS"] = token
def test_crud_human(capsys):
server_url, token = unset_env_variables()
# Initialize values that won't interfere with existing ones
human_1 = generate_random_string(16)
text_1 = generate_random_string(32)
human_2 = generate_random_string(16)
text_2 = generate_random_string(32)
text_3 = generate_random_string(32)
# Add inital human
add("human", human_1, text_1)
# Expect inital human to be listed
list("humans")
captured = capsys.readouterr()
output = captured.out[captured.out.find(human_1) :]
assert human_1 in output
assert text_1 in output
# Add second human
add("human", human_2, text_2)
# Expect to see second human
list("humans")
captured = capsys.readouterr()
output = captured.out[captured.out.find(human_1) :]
assert human_1 in output
assert text_1 in output
assert human_2 in output
assert text_2 in output
with unittest.mock.patch("questionary.confirm") as mock_confirm:
mock_confirm.return_value.ask.return_value = True
# Update second human
add("human", human_2, text_3)
# Expect to see update text
list("humans")
captured = capsys.readouterr()
output = captured.out[captured.out.find(human_1) :]
assert human_1 in output
assert text_1 in output
assert human_2 in output
assert output.count(human_2) == 1
assert text_3 in output
assert text_2 not in output
# Delete second human
delete("human", human_2)
# Expect second human to be deleted
list("humans")
captured = capsys.readouterr()
output = captured.out[captured.out.find(human_1) :]
assert human_1 in output
assert text_1 in output
assert human_2 not in output
assert text_2 not in output
# Clean up
delete("human", human_1)
reset_env_variables(server_url, token)