{ "cells": [ { "cell_type": "markdown", "id": "591be0c0-7332-4c57-adcf-fecc578eeb67", "metadata": {}, "source": [ "\n", " \"Open\n", "" ] }, { "cell_type": "code", "execution_count": null, "id": "43d71a67-3a01-4543-99ad-7dce12d793da", "metadata": {}, "outputs": [], "source": [ "%pip install pyautogen" ] }, { "cell_type": "code", "execution_count": null, "id": "b3754942-819b-4df9-be3f-6cfb3ca101dc", "metadata": {}, "outputs": [], "source": [ "%pip install pymemgpt" ] }, { "cell_type": "code", "execution_count": null, "id": "bd6df0ac-66a6-4dc7-9262-4c2ad05fab91", "metadata": {}, "outputs": [], "source": [ "import openai\n", "\n", "openai.api_key = \"YOUR_API_KEY\"" ] }, { "cell_type": "code", "execution_count": null, "id": "0cb9b18c-3662-4206-9ff5-de51a3aafb36", "metadata": {}, "outputs": [], "source": [ "\"\"\"Example of how to add MemGPT into an AutoGen groupchat\n", "\n", "Based on the official AutoGen example here: https://github.com/microsoft/autogen/blob/main/notebook/agentchat_groupchat.ipynb\n", "\n", "Begin by doing:\n", " pip install \"pyautogen[teachable]\"\n", " pip install pymemgpt\n", " or\n", " pip install -e . (inside the MemGPT home directory)\n", "\"\"\"\n", "\n", "import os\n", "import autogen\n", "from memgpt.autogen.memgpt_agent import create_autogen_memgpt_agent\n", "\n", "config_list = [\n", " {\n", " \"model\": \"gpt-4\",\n", " \"api_key\": os.getenv(\"OPENAI_API_KEY\"),\n", " },\n", "]\n", "\n", "# If USE_MEMGPT is False, then this example will be the same as the official AutoGen repo (https://github.com/microsoft/autogen/blob/main/notebook/agentchat_groupchat.ipynb)\n", "# If USE_MEMGPT is True, then we swap out the \"coder\" agent with a MemGPT agent\n", "USE_MEMGPT = True\n", "# If DEBUG is False, a lot of MemGPT's inner workings output is suppressed and only the final send_message is displayed.\n", "# If DEBUG is True, then all of MemGPT's inner workings (function calls, etc.) will be output.\n", "DEBUG = False\n", "\n", "llm_config = {\"config_list\": config_list, \"seed\": 42}\n", "\n", "# The user agent\n", "user_proxy = autogen.UserProxyAgent(\n", " name=\"User_proxy\",\n", " system_message=\"A human admin.\",\n", " code_execution_config={\"last_n_messages\": 2, \"work_dir\": \"groupchat\"},\n", " human_input_mode=\"TERMINATE\", # needed?\n", ")\n", "\n", "# The agent playing the role of the product manager (PM)\n", "pm = autogen.AssistantAgent(\n", " name=\"Product_manager\",\n", " system_message=\"Creative in software product ideas.\",\n", " llm_config=llm_config,\n", ")\n", "\n", "if not USE_MEMGPT:\n", " # In the AutoGen example, we create an AssistantAgent to play the role of the coder\n", " coder = autogen.AssistantAgent(\n", " name=\"Coder\",\n", " llm_config=llm_config,\n", " )\n", "\n", "else:\n", " # In our example, we swap this AutoGen agent with a MemGPT agent\n", " # This MemGPT agent will have all the benefits of MemGPT, ie persistent memory, etc.\n", " coder = create_autogen_memgpt_agent(\n", " \"MemGPT_coder\",\n", " persona_description=\"I am a 10x engineer, trained in Python. I was the first engineer at Uber (which I make sure to tell everyone I work with).\",\n", " user_description=f\"You are participating in a group chat with a user ({user_proxy.name}) and a product manager ({pm.name}).\",\n", " interface_kwargs={\"debug\": DEBUG},\n", " )\n", "\n", "# Initialize the group chat between the user and two LLM agents (PM and coder)\n", "groupchat = autogen.GroupChat(agents=[user_proxy, pm, coder], messages=[], max_round=12)\n", "manager = autogen.GroupChatManager(groupchat=groupchat, llm_config=llm_config)\n", "\n", "# Begin the group chat with a message from the user\n", "user_proxy.initiate_chat(\n", " manager,\n", " message=\"I want to design an app to make me one million dollars in one month. Yes, your heard that right.\",\n", ")" ] } ], "metadata": { "kernelspec": { "display_name": "Python 3 (ipykernel)", "language": "python", "name": "python3" }, "language_info": { "codemirror_mode": { "name": "ipython", "version": 3 }, "file_extension": ".py", "mimetype": "text/x-python", "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", "version": "3.11.6" } }, "nbformat": 4, "nbformat_minor": 5 }