diff --git a/app/constant.ts b/app/constant.ts index 59a8c1b5048a..b91b931c685a 100644 --- a/app/constant.ts +++ b/app/constant.ts @@ -54,3 +54,8 @@ export const OpenaiPath = { }; export const DEFAULT_INPUT_TEMPLATE = `{{input}}`; // input / time / model / lang +export const DEFAULT_SYSTEM_TEMPLATE = ` +You are ChatGPT, a large language model trained by OpenAI. +Knowledge cutoff: 2021-09 +Current model: {{model}} +Current time: {{time}}`; diff --git a/app/store/chat.ts b/app/store/chat.ts index fa6296811bfe..4c466a295c56 100644 --- a/app/store/chat.ts +++ b/app/store/chat.ts @@ -7,7 +7,11 @@ import Locale, { getLang } from "../locales"; import { showToast } from "../components/ui-lib"; import { ModelConfig, ModelType, useAppConfig } from "./config"; import { createEmptyMask, Mask } from "./mask"; -import { DEFAULT_INPUT_TEMPLATE, StoreKey } from "../constant"; +import { + DEFAULT_INPUT_TEMPLATE, + DEFAULT_SYSTEM_TEMPLATE, + StoreKey, +} from "../constant"; import { api, RequestMessage } from "../client/api"; import { ChatControllerPool } from "../client/controller"; import { prettyObject } from "../utils/format"; @@ -279,7 +283,7 @@ export const useChatStore = create()( const modelConfig = session.mask.modelConfig; const userContent = fillTemplateWith(content, modelConfig); - console.log("[User Input] fill with template: ", userContent); + console.log("[User Input] after template: ", userContent); const userMessage: ChatMessage = createMessage({ role: "user", @@ -312,7 +316,6 @@ export const useChatStore = create()( }); // make request - console.log("[User Input] ", sendMessages); api.llm.chat({ messages: sendMessages, config: { ...modelConfig, stream: true }, @@ -391,6 +394,27 @@ export const useChatStore = create()( // in-context prompts const contextPrompts = session.mask.context.slice(); + // system prompts, to get close to OpenAI Web ChatGPT + // only will be injected if user does not use a mask or set none context prompts + const shouldInjectSystemPrompts = contextPrompts.length === 0; + const systemPrompts = shouldInjectSystemPrompts + ? [ + createMessage({ + role: "system", + content: fillTemplateWith("", { + ...modelConfig, + template: DEFAULT_SYSTEM_TEMPLATE, + }), + }), + ] + : []; + if (shouldInjectSystemPrompts) { + console.log( + "[Global System Prompt] ", + systemPrompts.at(0)?.content ?? "empty", + ); + } + // long term memory const shouldSendLongTermMemory = modelConfig.sendMemory && @@ -409,6 +433,7 @@ export const useChatStore = create()( ); // lets concat send messages, including 4 parts: + // 0. system prompt: to get close to OpenAI Web ChatGPT // 1. long term memory: summarized memory messages // 2. pre-defined in-context prompts // 3. short term memory: latest n messages @@ -435,6 +460,7 @@ export const useChatStore = create()( // concat all messages const recentMessages = [ + ...systemPrompts, ...longTermMemoryPrompts, ...contextPrompts, ...reversedRecentMessages.reverse(), diff --git a/app/store/config.ts b/app/store/config.ts index b15fa9148021..945e1be7c45d 100644 --- a/app/store/config.ts +++ b/app/store/config.ts @@ -177,9 +177,9 @@ export const useAppConfig = create()( }), { name: StoreKey.Config, - version: 3.1, + version: 3.2, migrate(persistedState, version) { - if (version === 3.1) return persistedState as any; + if (version === 3.2) return persistedState as any; const state = persistedState as ChatConfig; state.modelConfig.sendMemory = true;