From 15e7b646a83d13c49428a0237f2204494cb0cf16 Mon Sep 17 00:00:00 2001 From: Robin Goetz <35136007+goetzrobin@users.noreply.github.com> Date: Thu, 15 Aug 2024 18:29:52 +0200 Subject: [PATCH] fix: add correct dependencies and missing variable (#1638) --- memgpt/llm_api/openai.py | 2 +- memgpt/server/rest_api/agents/message.py | 1 + 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/memgpt/llm_api/openai.py b/memgpt/llm_api/openai.py index e7001d63e4..653d3f14a2 100644 --- a/memgpt/llm_api/openai.py +++ b/memgpt/llm_api/openai.py @@ -252,7 +252,7 @@ def _sse_post(url: str, data: dict, headers: dict) -> Generator[ChatCompletionCh # Inspect for errors before iterating (see https://github.com/florimondmanca/httpx-sse/pull/12) if not event_source.response.is_success: # handle errors - from utils import printd + from memgpt.utils import printd printd("Caught error before iterating SSE request:", vars(event_source.response)) printd(event_source.response.read()) diff --git a/memgpt/server/rest_api/agents/message.py b/memgpt/server/rest_api/agents/message.py index 5568e7705a..ab5e41b1ef 100644 --- a/memgpt/server/rest_api/agents/message.py +++ b/memgpt/server/rest_api/agents/message.py @@ -95,6 +95,7 @@ async def send_message_to_agent( ) -> Union[StreamingResponse, UserMessageResponse]: """Split off into a separate function so that it can be imported in the /chat/completion proxy.""" + include_final_message = True # handle the legacy mode streaming if stream_legacy: # NOTE: override