From 3c2b344364823c340d9e93271839983f6a8c9144 Mon Sep 17 00:00:00 2001 From: chancel <38032874+chancelyg@users.noreply.github.com> Date: Wed, 28 Jun 2023 17:36:56 +0800 Subject: [PATCH] feat: set the length of the historical message (#75) --- src/providers/openai/handler.ts | 26 ++++++++++++++++++++++++-- src/providers/openai/index.ts | 10 ++++++++++ 2 files changed, 34 insertions(+), 2 deletions(-) diff --git a/src/providers/openai/handler.ts b/src/providers/openai/handler.ts index ba7a216f..82785282 100644 --- a/src/providers/openai/handler.ts +++ b/src/providers/openai/handler.ts @@ -1,5 +1,6 @@ import { fetchChatCompletion, fetchImageGeneration } from './api' import { parseStream } from './parser' +import type { Message } from '@/types/message' import type { HandlerPayload, Provider } from '@/types/provider' export const handlePrompt: Provider['handlePrompt'] = async(payload, signal?: AbortSignal) => { @@ -35,14 +36,35 @@ export const handleRapidPrompt: Provider['handleRapidPrompt'] = async(prompt, gl } const handleChatCompletion = async(payload: HandlerPayload, signal?: AbortSignal) => { + // An array to store the chat messages + const messages: Message[] = [] + + let maxTokens = payload.globalSettings.maxTokens as number + let messageHistorySize = payload.globalSettings.messageHistorySize as number + + // Iterate through the message history + while (messageHistorySize > 0) { + messageHistorySize-- + // Get the last message from the payload + const m = payload.messages.pop() + if (m === undefined) + break + + if (maxTokens - m.content.length < 0) + break + + maxTokens -= m.content.length + messages.unshift(m) + } + const response = await fetchChatCompletion({ apiKey: payload.globalSettings.apiKey as string, baseUrl: (payload.globalSettings.baseUrl as string).trim().replace(/\/$/, ''), body: { + messages, + max_tokens: maxTokens, model: payload.globalSettings.model as string, - messages: payload.messages, temperature: payload.globalSettings.temperature as number, - max_tokens: payload.globalSettings.maxTokens as number, top_p: payload.globalSettings.topP as number, stream: payload.globalSettings.stream as boolean ?? true, }, diff --git a/src/providers/openai/index.ts b/src/providers/openai/index.ts index 348609c4..e0017cf9 100644 --- a/src/providers/openai/index.ts +++ b/src/providers/openai/index.ts @@ -51,6 +51,16 @@ const providerOpenAI = () => { default: 2048, step: 1, }, + { + key: 'messageHistorySize', + name: 'Max History Message Size', + description: 'The number of retained historical messages will be truncated if the length of the message exceeds the MaxToken parameter.', + type: 'slider', + min: 1, + max: 24, + default: 5, + step: 1, + }, { key: 'temperature', name: 'Temperature',