From 1a0b570dc98840b20e4d560692941aa2e4bae15b Mon Sep 17 00:00:00 2001 From: NoodleOfDeath Date: Thu, 2 Mar 2023 13:19:45 -0500 Subject: [PATCH] feat: exports init options for external use --- src/chatgpt-api.ts | 59 +++++++++++++--------------------------------- src/types.ts | 30 +++++++++++++++++++++++ 2 files changed, 46 insertions(+), 43 deletions(-) diff --git a/src/chatgpt-api.ts b/src/chatgpt-api.ts index 53302cc..bce8184 100644 --- a/src/chatgpt-api.ts +++ b/src/chatgpt-api.ts @@ -7,6 +7,7 @@ import * as tokenizer from './tokenizer' import * as types from './types' import { fetch as globalFetch } from './fetch' import { fetchSSE } from './fetch-sse' +import { ChatGPTAPIOptions } from './types' const CHATGPT_MODEL = 'gpt-3.5-turbo' @@ -46,47 +47,19 @@ export class ChatGPTAPI { * @param upsertMessage - Optional function to insert or update a message. If not provided, the default implementation will be used (using an in-memory `messageStore`). * @param fetch - Optional override for the `fetch` implementation to use. Defaults to the global `fetch` function. */ - constructor(opts: { - apiKey: string - - /** @defaultValue `'https://api.openai.com'` **/ - apiBaseUrl?: string - - /** @defaultValue `false` **/ - debug?: boolean - - completionParams?: Partial< - Omit - > - - systemMessage?: string - - /** @defaultValue `4096` **/ - maxModelTokens?: number - - /** @defaultValue `1000` **/ - maxResponseTokens?: number - - messageStore?: Keyv - getMessageById?: types.GetMessageByIdFunction - upsertMessage?: types.UpsertMessageFunction - - fetch?: types.FetchFn - }) { - const { - apiKey, - apiBaseUrl = 'https://api.openai.com', - debug = false, - messageStore, - completionParams, - systemMessage, - maxModelTokens = 4096, - maxResponseTokens = 1000, - getMessageById = this._defaultGetMessageById, - upsertMessage = this._defaultUpsertMessage, - fetch = globalFetch - } = opts - + constructor({ + apiKey, + apiBaseUrl = 'https://api.openai.com', + debug = false, + messageStore, + completionParams, + systemMessage, + maxModelTokens = 4096, + maxResponseTokens = 1000, + getMessageById, + upsertMessage, + fetch = globalFetch + }: ChatGPTAPIOptions) { this._apiKey = apiKey this._apiBaseUrl = apiBaseUrl this._debug = !!debug @@ -110,8 +83,8 @@ export class ChatGPTAPI { this._maxModelTokens = maxModelTokens this._maxResponseTokens = maxResponseTokens - this._getMessageById = getMessageById - this._upsertMessage = upsertMessage + this._getMessageById = getMessageById ?? this._defaultGetMessageById + this._upsertMessage = upsertMessage ?? this._defaultUpsertMessage if (messageStore) { this._messageStore = messageStore diff --git a/src/types.ts b/src/types.ts index 23beb1f..f37223a 100644 --- a/src/types.ts +++ b/src/types.ts @@ -1,7 +1,37 @@ +import Keyv from 'keyv' + export type Role = 'user' | 'assistant' | 'system' export type FetchFn = typeof fetch +export type ChatGPTAPIOptions = { + apiKey: string + + /** @defaultValue `'https://api.openai.com'` **/ + apiBaseUrl?: string + + /** @defaultValue `false` **/ + debug?: boolean + + completionParams?: Partial< + Omit + > + + systemMessage?: string + + /** @defaultValue `4096` **/ + maxModelTokens?: number + + /** @defaultValue `1000` **/ + maxResponseTokens?: number + + messageStore?: Keyv + getMessageById?: GetMessageByIdFunction + upsertMessage?: UpsertMessageFunction + + fetch?: FetchFn +} + export type SendMessageOptions = { /** The name of a user in a multi-user chat. */ name?: string