diff --git a/letta/server/rest_api/app.py b/letta/server/rest_api/app.py index dd9a51a08e..a0daff26b2 100644 --- a/letta/server/rest_api/app.py +++ b/letta/server/rest_api/app.py @@ -9,6 +9,7 @@ from starlette.middleware.cors import CORSMiddleware from letta.constants import ADMIN_PREFIX, API_PREFIX, OPENAI_API_PREFIX +from letta.schemas.letta_response import LettaResponse from letta.server.constants import REST_DEFAULT_PORT # NOTE(charles): these are extra routes that are not part of v1 but we still need to mount to pass tests @@ -128,6 +129,9 @@ def on_startup(): openai_docs["info"]["title"] = "OpenAI Assistants API" letta_docs["paths"] = {k: v for k, v in letta_docs["paths"].items() if not k.startswith("/openai")} letta_docs["info"]["title"] = "Letta API" + letta_docs["components"]["schemas"]["LettaResponse"] = { + "properties": LettaResponse.model_json_schema(ref_template="#/components/schemas/LettaResponse/properties/{model}")["$defs"] + } # Split the API docs into Letta API, and OpenAI Assistants compatible API for name, docs in [ diff --git a/letta/server/rest_api/routers/v1/agents.py b/letta/server/rest_api/routers/v1/agents.py index b0047cb61d..a30674c1d4 100644 --- a/letta/server/rest_api/routers/v1/agents.py +++ b/letta/server/rest_api/routers/v1/agents.py @@ -366,7 +366,7 @@ def update_message( 200: { "description": "Successful response", "content": { - "application/json": {"schema": LettaResponse.model_json_schema()}, # Use model_json_schema() instead of model directly + "application/json": {"$ref": "#/components/schemas/LettaResponse"}, # Use model_json_schema() instead of model directly "text/event-stream": {"description": "Server-Sent Events stream"}, }, }