From 439c4a5552bdc1fcf33f58b82d1fce58359f0b82 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E9=87=8E=E5=A3=B0?= Date: Thu, 9 Mar 2023 14:55:50 +0800 Subject: [PATCH] fix: count prompt token error --- legacy/src/chatgpt-api.ts | 16 +++++++--------- 1 file changed, 7 insertions(+), 9 deletions(-) diff --git a/legacy/src/chatgpt-api.ts b/legacy/src/chatgpt-api.ts index 16ef2c73..b5a15003 100644 --- a/legacy/src/chatgpt-api.ts +++ b/legacy/src/chatgpt-api.ts @@ -335,11 +335,9 @@ export class ChatGPTAPI { const systemMessageOffset = messages.length let nextMessages = messages.concat([ { - ...{ - role: 'user', - content: text, - name: opts.name - } + role: 'user', + content: text, + name: opts.name } ]) let numTokens = 0 @@ -349,13 +347,13 @@ export class ChatGPTAPI { .reduce((prompt, message) => { switch (message.role) { case 'system': - return [prompt, `Instructions:\n${message.content}`] + return [...prompt, `Instructions:\n${message.content}`] case 'user': - return [prompt, `${userLabel}:\n${message.content}`] + return [...prompt, `${userLabel}:\n${message.content}`] default: - return [prompt, `${assistantLabel}:\n${message.content}`] + return [...prompt, `${assistantLabel}:\n${message.content}`] } - }, []) + }, [] as string[]) .join('\n\n') const nextNumTokensEstimate = await this._getTokenCount(prompt)