Skip to content

Commit

Permalink
cleanup
Browse files Browse the repository at this point in the history
  • Loading branch information
cpacker committed Oct 25, 2023
1 parent 3b36bf4 commit a7e06d0
Showing 1 changed file with 9 additions and 7 deletions.
16 changes: 9 additions & 7 deletions memgpt/local_llm/chat_completion_proxy.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,12 +5,14 @@
import json

from .webui.api import get_webui_completion
from .llm_chat_completion_wrappers import airoboros
from .llm_chat_completion_wrappers import airoboros, dolphin
from .utils import DotDict

HOST = os.getenv("OPENAI_API_BASE")
HOST_TYPE = os.getenv("BACKEND_TYPE") # default None == ChatCompletion
DEBUG = False
# DEBUG = True
DEFAULT_WRAPPER = airoboros.Airoboros21InnerMonologueWrapper()


async def get_chat_completion(
Expand All @@ -22,14 +24,14 @@ async def get_chat_completion(
if function_call != "auto":
raise ValueError(f"function_call == {function_call} not supported (auto only)")

if model == "airoboros_v2.1":
llm_wrapper = airoboros.Airoboros21Wrapper()
if model == "airoboros-l2-70b-2.1":
llm_wrapper = airoboros.Airoboros21InnerMonologueWrapper()
elif model == "dolphin-2.1-mistral-7b":
llm_wrapper = dolphin.Dolphin21MistralWrapper()
else:
# Warn the user that we're using the fallback
print(
f"Warning: could not find an LLM wrapper for {model}, using the airoboros wrapper"
)
llm_wrapper = airoboros.Airoboros21Wrapper()
print(f"Warning: no wrapper specified for local LLM, using the default wrapper")
llm_wrapper = DEFAULT_WRAPPER

# First step: turn the message sequence into a prompt that the model expects
prompt = llm_wrapper.chat_completion_to_prompt(messages, functions)
Expand Down

0 comments on commit a7e06d0

Please sign in to comment.