#477 Estimate tokens usage with streaming completions

pull/507/head
Oleksii Myronenko 2023-03-29 23:34:06 +03:00
rodzic 4613b95cc3
commit dd6b457932
2 zmienionych plików z 26 dodań i 4 usunięć

Wyświetl plik

@ -211,16 +211,17 @@ export class ChatGPTAPI {
result.id = response.id
}
if (response?.choices?.length) {
if (response.choices?.length) {
const delta = response.choices[0].delta
result.delta = delta.content
if (delta?.content) result.text += delta.content
result.detail = response
if (delta.role) {
result.role = delta.role
}
result.detail = response
onProgress?.(result)
}
} catch (err) {
@ -286,7 +287,16 @@ export class ChatGPTAPI {
}
}
}
).then((message) => {
).then(async (message) => {
if (message.detail && !message.detail.usage) {
const promptTokens = numTokens
const completionTokens = await this._getTokenCount(message.text)
message.detail.usage = {
prompt_tokens: promptTokens,
completion_tokens: completionTokens,
total_tokens: promptTokens + completionTokens
}
}
return this._upsertMessage(message).then(() => message)
})

Wyświetl plik

@ -59,13 +59,25 @@ export type SendMessageBrowserOptions = {
abortSignal?: AbortSignal
}
interface CreateChatCompletionStreamResponse
extends openai.CreateChatCompletionDeltaResponse {
usage: CreateCompletionStreamResponseUsage
}
interface CreateCompletionStreamResponseUsage
extends openai.CreateCompletionResponseUsage {
estimated: true
}
export interface ChatMessage {
id: string
text: string
role: Role
name?: string
delta?: string
detail?: any
detail?:
| openai.CreateChatCompletionResponse
| CreateChatCompletionStreamResponse
// relevant for both ChatGPTAPI and ChatGPTUnofficialProxyAPI
parentMessageId?: string