Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

feat: #2144 improve input template #2145

Merged
merged 1 commit into from
Jun 26, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 5 additions & 0 deletions app/constant.ts
Original file line number Diff line number Diff line change
Expand Up @@ -54,3 +54,8 @@ export const OpenaiPath = {
};

export const DEFAULT_INPUT_TEMPLATE = `{{input}}`; // input / time / model / lang
export const DEFAULT_SYSTEM_TEMPLATE = `
You are ChatGPT, a large language model trained by OpenAI.
Knowledge cutoff: 2021-09
Current model: {{model}}
Current time: {{time}}`;
32 changes: 29 additions & 3 deletions app/store/chat.ts
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,11 @@ import Locale, { getLang } from "../locales";
import { showToast } from "../components/ui-lib";
import { ModelConfig, ModelType, useAppConfig } from "./config";
import { createEmptyMask, Mask } from "./mask";
import { DEFAULT_INPUT_TEMPLATE, StoreKey } from "../constant";
import {
DEFAULT_INPUT_TEMPLATE,
DEFAULT_SYSTEM_TEMPLATE,
StoreKey,
} from "../constant";
import { api, RequestMessage } from "../client/api";
import { ChatControllerPool } from "../client/controller";
import { prettyObject } from "../utils/format";
Expand Down Expand Up @@ -279,7 +283,7 @@ export const useChatStore = create<ChatStore>()(
const modelConfig = session.mask.modelConfig;

const userContent = fillTemplateWith(content, modelConfig);
console.log("[User Input] fill with template: ", userContent);
console.log("[User Input] after template: ", userContent);

const userMessage: ChatMessage = createMessage({
role: "user",
Expand Down Expand Up @@ -312,7 +316,6 @@ export const useChatStore = create<ChatStore>()(
});

// make request
console.log("[User Input] ", sendMessages);
api.llm.chat({
messages: sendMessages,
config: { ...modelConfig, stream: true },
Expand Down Expand Up @@ -391,6 +394,27 @@ export const useChatStore = create<ChatStore>()(
// in-context prompts
const contextPrompts = session.mask.context.slice();

// system prompts, to get close to OpenAI Web ChatGPT
// only will be injected if user does not use a mask or set none context prompts
const shouldInjectSystemPrompts = contextPrompts.length === 0;
const systemPrompts = shouldInjectSystemPrompts
? [
createMessage({
role: "system",
content: fillTemplateWith("", {
...modelConfig,
template: DEFAULT_SYSTEM_TEMPLATE,
}),
}),
]
: [];
if (shouldInjectSystemPrompts) {
console.log(
"[Global System Prompt] ",
systemPrompts.at(0)?.content ?? "empty",
);
}

// long term memory
const shouldSendLongTermMemory =
modelConfig.sendMemory &&
Expand All @@ -409,6 +433,7 @@ export const useChatStore = create<ChatStore>()(
);

// lets concat send messages, including 4 parts:
// 0. system prompt: to get close to OpenAI Web ChatGPT
// 1. long term memory: summarized memory messages
// 2. pre-defined in-context prompts
// 3. short term memory: latest n messages
Expand All @@ -435,6 +460,7 @@ export const useChatStore = create<ChatStore>()(

// concat all messages
const recentMessages = [
...systemPrompts,
...longTermMemoryPrompts,
...contextPrompts,
...reversedRecentMessages.reverse(),
Expand Down
4 changes: 2 additions & 2 deletions app/store/config.ts
Original file line number Diff line number Diff line change
Expand Up @@ -177,9 +177,9 @@ export const useAppConfig = create<ChatConfigStore>()(
}),
{
name: StoreKey.Config,
version: 3.1,
version: 3.2,
migrate(persistedState, version) {
if (version === 3.1) return persistedState as any;
if (version === 3.2) return persistedState as any;

const state = persistedState as ChatConfig;
state.modelConfig.sendMemory = true;
Expand Down