From bdd93f912f4e168aa51448f6dbff76f57fd311cb Mon Sep 17 00:00:00 2001 From: "chancel.yang" Date: Mon, 26 Jun 2023 15:01:32 +0800 Subject: [PATCH 1/2] feat: set the length of the historical message --- src/providers/openai/handler.ts | 28 ++++++++++++++++++++++++++-- src/providers/openai/index.ts | 10 ++++++++++ 2 files changed, 36 insertions(+), 2 deletions(-) diff --git a/src/providers/openai/handler.ts b/src/providers/openai/handler.ts index ba7a216f..ec984a71 100644 --- a/src/providers/openai/handler.ts +++ b/src/providers/openai/handler.ts @@ -1,5 +1,6 @@ import { fetchChatCompletion, fetchImageGeneration } from './api' import { parseStream } from './parser' +import type { Message } from '@/types/message' import type { HandlerPayload, Provider } from '@/types/provider' export const handlePrompt: Provider['handlePrompt'] = async(payload, signal?: AbortSignal) => { @@ -35,14 +36,37 @@ export const handleRapidPrompt: Provider['handleRapidPrompt'] = async(prompt, gl } const handleChatCompletion = async(payload: HandlerPayload, signal?: AbortSignal) => { + // An array to store the chat messages + const messages: Message[] = [] + + let maxTokens = payload.globalSettings.maxTokens as number + let messageHistorySize = payload.globalSettings.messageHistorySize as number + + // Iterate through the message history + while (messageHistorySize > 0) { + messageHistorySize-- + // Get the last message from the payload + const m = payload.messages.pop() + if (m === undefined) + break + + // Subtract the length of the message from the available tokens + maxTokens -= m.content.length + if (maxTokens < 0) + break + + // Add the message to the beginning of the messages array + messages.unshift(m) + } + const response = await fetchChatCompletion({ apiKey: payload.globalSettings.apiKey as string, baseUrl: (payload.globalSettings.baseUrl as string).trim().replace(/\/$/, ''), body: { + messages, + max_tokens: maxTokens, model: payload.globalSettings.model as string, - messages: payload.messages, temperature: payload.globalSettings.temperature as number, - max_tokens: payload.globalSettings.maxTokens as number, top_p: payload.globalSettings.topP as number, stream: payload.globalSettings.stream as boolean ?? true, }, diff --git a/src/providers/openai/index.ts b/src/providers/openai/index.ts index 348609c4..e0017cf9 100644 --- a/src/providers/openai/index.ts +++ b/src/providers/openai/index.ts @@ -51,6 +51,16 @@ const providerOpenAI = () => { default: 2048, step: 1, }, + { + key: 'messageHistorySize', + name: 'Max History Message Size', + description: 'The number of retained historical messages will be truncated if the length of the message exceeds the MaxToken parameter.', + type: 'slider', + min: 1, + max: 24, + default: 5, + step: 1, + }, { key: 'temperature', name: 'Temperature', From 787e49299496fa86385b85aaf0a854c26fb6e2f6 Mon Sep 17 00:00:00 2001 From: chancel <38032874+chancelyg@users.noreply.github.com> Date: Wed, 28 Jun 2023 10:09:39 +0800 Subject: [PATCH 2/2] fix: fixed the issue of negative values in MaxToken calculation. Update handler.ts --- src/providers/openai/handler.ts | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/src/providers/openai/handler.ts b/src/providers/openai/handler.ts index ec984a71..82785282 100644 --- a/src/providers/openai/handler.ts +++ b/src/providers/openai/handler.ts @@ -50,12 +50,10 @@ const handleChatCompletion = async(payload: HandlerPayload, signal?: AbortSignal if (m === undefined) break - // Subtract the length of the message from the available tokens - maxTokens -= m.content.length - if (maxTokens < 0) + if (maxTokens - m.content.length < 0) break - // Add the message to the beginning of the messages array + maxTokens -= m.content.length messages.unshift(m) }