diff --git a/letta/embeddings.py b/letta/embeddings.py index 78ee9e090..f03cbf5fb 100644 --- a/letta/embeddings.py +++ b/letta/embeddings.py @@ -91,6 +91,9 @@ class EmbeddingEndpoint: raise ValueError( f"Embeddings endpoint was provided an invalid URL (set to: '{base_url}'). Make sure embedding_endpoint is set correctly in your Letta config." ) + # TODO: find a neater solution - re-mapping for letta endpoint + if model == "letta-free": + model = "BAAI/bge-large-en-v1.5" self.model_name = model self._user = user self._base_url = base_url diff --git a/letta/llm_api/openai.py b/letta/llm_api/openai.py index 0e8cda993..69a50fc26 100644 --- a/letta/llm_api/openai.py +++ b/letta/llm_api/openai.py @@ -145,6 +145,7 @@ def build_openai_chat_completions_request( import uuid data.user = str(uuid.UUID(int=0)) + data.model = "memgpt-openai" return data diff --git a/letta/providers.py b/letta/providers.py index 6d8c753ab..c524d9394 100644 --- a/letta/providers.py +++ b/letta/providers.py @@ -31,7 +31,7 @@ class LettaProvider(Provider): def list_llm_models(self) -> List[LLMConfig]: return [ LLMConfig( - model="memgpt-openai", + model="letta-free", # NOTE: renamed model_endpoint_type="openai", model_endpoint="https://inference.memgpt.ai", context_window=16384, @@ -41,7 +41,7 @@ class LettaProvider(Provider): def list_embedding_models(self): return [ EmbeddingConfig( - embedding_model="BAAI/bge-large-en-v1.5", + embedding_model="letta-free", # NOTE: renamed embedding_endpoint_type="hugging-face", embedding_endpoint="https://embeddings.memgpt.ai", embedding_dim=1024,