MemGPT/letta/server/rest_api/routers/v1/llms.py
Sarah Wooders d631cec3d6
feat: require LLMConfig and EmbeddingConfig to be specified for agent creation + allow multiple simultaneous provider configs for server (#1814)
Co-authored-by: Shubham Naik <shubham.naik10@gmail.com>
Co-authored-by: Matthew Zhou <mattzh1314@gmail.com>
Co-authored-by: Matt Zhou <mattzhou@Matts-MacBook-Pro.local>
Co-authored-by: Shubham Naik <shub@memgpt.ai>
2024-10-04 19:35:00 -07:00

33 lines
877 B
Python

from typing import TYPE_CHECKING, List
from fastapi import APIRouter, Depends
from letta.schemas.embedding_config import EmbeddingConfig
from letta.schemas.llm_config import LLMConfig
from letta.server.rest_api.utils import get_letta_server
if TYPE_CHECKING:
from letta.server.server import SyncServer
router = APIRouter(prefix="/models", tags=["models", "llms"])
@router.get("/", response_model=List[LLMConfig], operation_id="list_models")
def list_llm_backends(
server: "SyncServer" = Depends(get_letta_server),
):
models = server.list_llm_models()
print(models)
return models
@router.get("/embedding", response_model=List[EmbeddingConfig], operation_id="list_embedding_models")
def list_embedding_backends(
server: "SyncServer" = Depends(get_letta_server),
):
models = server.list_embedding_models()
print(models)
return models