From aaf5efc01bd91023ee79254938f848519b8d1dee Mon Sep 17 00:00:00 2001 From: Travis Fischer Date: Tue, 6 Dec 2022 16:13:11 -0600 Subject: [PATCH 1/8] feat: WIP add support for native fetch; undici on node.js < 18, and refactor conversation support --- package.json | 10 ++-- pnpm-lock.yaml | 102 +++++++++------------------------ src/chatgpt-api.test.ts | 2 +- src/chatgpt-api.ts | 106 ++++++++--------------------------- src/chatgpt-conversation.ts | 71 +++++++++++++++++++++++ src/demo-conversation.ts | 4 +- src/demo.ts | 2 +- src/fetch-sse.ts | 19 ++----- src/fetch.ts | 10 +++- src/index.ts | 1 + src/stream-async-iterable.ts | 4 +- tsup.config.ts | 40 +++++++++---- 12 files changed, 174 insertions(+), 197 deletions(-) create mode 100644 src/chatgpt-conversation.ts diff --git a/package.json b/package.json index aac5024..d27c553 100644 --- a/package.json +++ b/package.json @@ -19,13 +19,14 @@ "build" ], "engines": { - "node": ">=14" + "node": ">=16.8" }, "scripts": { "build": "tsup", "dev": "tsup --watch", "clean": "del build", "prebuild": "run-s clean", + "postbuild": "sed -i 's/ *\\?\\? *(await import(\"undici\")).fetch//' build/browser/index.js", "predev": "run-s clean", "pretest": "run-s build", "docs": "typedoc", @@ -38,7 +39,6 @@ "dependencies": { "eventsource-parser": "^0.0.5", "expiry-map": "^2.0.0", - "node-fetch": "2", "remark": "^14.0.2", "strip-markdown": "^5.0.0", "uuid": "^9.0.0" @@ -46,7 +46,6 @@ "devDependencies": { "@trivago/prettier-plugin-sort-imports": "^4.0.0", "@types/node": "^18.11.9", - "@types/node-fetch": "2", "@types/uuid": "^9.0.0", "ava": "^5.1.0", "del-cli": "^5.0.0", @@ -89,5 +88,8 @@ "ai", "ml", "bot" - ] + ], + "optionalDependencies": { + "undici": "^5.13.0" + } } diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index 24fc4dc..4ddaac7 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -3,7 +3,6 @@ lockfileVersion: 5.4 specifiers: '@trivago/prettier-plugin-sort-imports': ^4.0.0 '@types/node': ^18.11.9 - '@types/node-fetch': '2' '@types/uuid': ^9.0.0 ava: ^5.1.0 del-cli: ^5.0.0 @@ -12,7 +11,6 @@ specifiers: expiry-map: ^2.0.0 husky: ^8.0.2 lint-staged: ^13.0.3 - node-fetch: '2' npm-run-all: ^4.1.5 ora: ^6.1.2 prettier: ^2.8.0 @@ -23,20 +21,22 @@ specifiers: typedoc: ^0.23.21 typedoc-plugin-markdown: ^3.13.6 typescript: ^4.9.3 + undici: ^5.13.0 uuid: ^9.0.0 dependencies: eventsource-parser: 0.0.5 expiry-map: 2.0.0 - node-fetch: 2.6.7 remark: 14.0.2 strip-markdown: 5.0.0 uuid: 9.0.0 +optionalDependencies: + undici: 5.13.0 + devDependencies: '@trivago/prettier-plugin-sort-imports': 4.0.0_prettier@2.8.0 '@types/node': 18.11.10 - '@types/node-fetch': 2.6.2 '@types/uuid': 9.0.0 ava: 5.1.0 del-cli: 5.0.0 @@ -434,13 +434,6 @@ packages: resolution: {integrity: sha512-iiUgKzV9AuaEkZqkOLDIvlQiL6ltuZd9tGcW3gwpnX8JbuiuhFlEGmmFXEXkN50Cvq7Os88IY2v0dkDqXYWVgA==} dev: false - /@types/node-fetch/2.6.2: - resolution: {integrity: sha512-DHqhlq5jeESLy19TYhLakJ07kNumXWjcDdxXsLUMJZ6ue8VZJj4kLPQVE/2mdHh3xZziNF1xppu5lwmS53HR+A==} - dependencies: - '@types/node': 18.11.10 - form-data: 3.0.1 - dev: true - /@types/node/18.11.10: resolution: {integrity: sha512-juG3RWMBOqcOuXC643OAdSA525V44cVgGV6dUDuiFtss+8Fk5x1hI93Rsld43VeJVIeqlP9I7Fn9/qaVqoEAuQ==} dev: true @@ -568,10 +561,6 @@ packages: engines: {node: '>=8'} dev: true - /asynckit/0.4.0: - resolution: {integrity: sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q==} - dev: true - /ava/5.1.0: resolution: {integrity: sha512-e5VFrSQ0WBPyZJWRXVrO7RFOizFeNM0t2PORwrPvWtApgkORI6cvGnY3GX1G+lzpd0HjqNx5Jus22AhxVnUMNA==} engines: {node: '>=14.19 <15 || >=16.15 <17 || >=18'} @@ -712,6 +701,14 @@ packages: load-tsconfig: 0.2.3 dev: true + /busboy/1.6.0: + resolution: {integrity: sha512-8SFQbg/0hQ9xy3UNTB0YEnsNBbWfhf7RtnzpL7TkBiTBRfrQ9Fxcnz7VJsleJpyp6rVLvXiuORqjlHi5q+PYuA==} + engines: {node: '>=10.16.0'} + dependencies: + streamsearch: 1.1.0 + dev: false + optional: true + /cac/6.7.14: resolution: {integrity: sha512-b6Ilus+c3RrdDk+JhLKUAQfzzgLEPy6wcXqS7f/xe1EETvsDP6GORG7SFuOs6cID5YkqchW/LXZbX5bc8j7ZcQ==} engines: {node: '>=8'} @@ -899,13 +896,6 @@ packages: resolution: {integrity: sha512-3tlv/dIP7FWvj3BsbHrGLJ6l/oKh1O3TcgBqMn+yyCagOxc23fyzDS6HypQbgxWbkpDnf52p1LuR4eWDQ/K9WQ==} dev: true - /combined-stream/1.0.8: - resolution: {integrity: sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg==} - engines: {node: '>= 0.8'} - dependencies: - delayed-stream: 1.0.0 - dev: true - /commander/4.1.1: resolution: {integrity: sha512-NOKm8xhkzAjzFx8B2v5OAHT+u5pRQc2UCa2Vq9jYL/31o2wi9mxBA7LIFs3sV5VSC49z6pEhfbMULvShKj26WA==} engines: {node: '>= 6'} @@ -1053,11 +1043,6 @@ packages: slash: 4.0.0 dev: true - /delayed-stream/1.0.0: - resolution: {integrity: sha512-ZySD7Nf91aLB0RxL4KGrKHBXl7Eds1DAmEdcoVawXnLD7SDhpNgtuII2aAkg7a7QS41jxPSZ17p4VdGnMHk3MQ==} - engines: {node: '>=0.4.0'} - dev: true - /dequal/2.0.3: resolution: {integrity: sha512-0je+qPKHEMohvfRTCEo3CrPG6cAzAYgmzKyxRiYSSDkS6eGJdyVJm7WaYA5ECaAD9wLB2T4EEeymA5aFVcYXCA==} engines: {node: '>=6'} @@ -1491,15 +1476,6 @@ packages: path-exists: 5.0.0 dev: true - /form-data/3.0.1: - resolution: {integrity: sha512-RHkBKtLWUVwd7SqRIvCZMEvAMoGUp0XU+seQiZejj0COz3RI3hWP4sCv3gZWWLjJTd7rGwcsF5eKZGii0r/hbg==} - engines: {node: '>= 6'} - dependencies: - asynckit: 0.4.0 - combined-stream: 1.0.8 - mime-types: 2.1.35 - dev: true - /fs.realpath/1.0.0: resolution: {integrity: sha512-OO0pH2lK6a0hZnAdau5ItzHPI6pUlvI7jMVnxUQRtw4owF2wk8lOSabtGDCTP4Ggrg2MbGnWO9X8K1t4+fGMDw==} dev: true @@ -2426,18 +2402,6 @@ packages: picomatch: 2.3.1 dev: true - /mime-db/1.52.0: - resolution: {integrity: sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg==} - engines: {node: '>= 0.6'} - dev: true - - /mime-types/2.1.35: - resolution: {integrity: sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==} - engines: {node: '>= 0.6'} - dependencies: - mime-db: 1.52.0 - dev: true - /mimic-fn/2.1.0: resolution: {integrity: sha512-OqbOk5oEQeAZ8WXWydlu9HJjz9WVdEIvamMCcXmuqUYjTknH/sqsWvhQ3vgwKFRR1HpjvNBKQ37nbJgYzGqGcg==} engines: {node: '>=6'} @@ -2507,18 +2471,6 @@ packages: resolution: {integrity: sha512-1nh45deeb5olNY7eX82BkPO7SSxR5SSYJiPTrTdFUVYwAl8CKMA5N9PjTYkHiRjisVcxcQ1HXdLhx2qxxJzLNQ==} dev: true - /node-fetch/2.6.7: - resolution: {integrity: sha512-ZjMPFEfVx5j+y2yF35Kzx5sF7kDzxuDj6ziH4FFbOp87zKDZNx8yExJIb05OGF4Nlt9IHFIMBkRl41VdvcNdbQ==} - engines: {node: 4.x || >=6.0.0} - peerDependencies: - encoding: ^0.1.0 - peerDependenciesMeta: - encoding: - optional: true - dependencies: - whatwg-url: 5.0.0 - dev: false - /node-releases/2.0.6: resolution: {integrity: sha512-PiVXnNuFm5+iYkLBNeq5211hvO38y63T0i2KKh2KnUs3RpzJ+JtODFjkD8yjLwnDkTYF1eKXheUwdssR+NRZdg==} dev: true @@ -3210,6 +3162,12 @@ packages: escape-string-regexp: 2.0.0 dev: true + /streamsearch/1.1.0: + resolution: {integrity: sha512-Mcc5wHehp9aXz1ax6bZUyY5afg9u2rv5cqQI3mRrYkGC8rW2hM02jWuwjtL++LS5qinSyhj2QfLyNsuc+VsExg==} + engines: {node: '>=10.0.0'} + dev: false + optional: true + /string-argv/0.3.1: resolution: {integrity: sha512-a1uQGz7IyVy9YwhqjZIZu1c8JO8dNIe20xBmSS6qu9kv++k3JGzCVmprbNN5Kn+BgzD5E7YYwg1CcjuJMRNsvg==} engines: {node: '>=0.6.19'} @@ -3382,10 +3340,6 @@ packages: is-number: 7.0.0 dev: true - /tr46/0.0.3: - resolution: {integrity: sha512-N3WMsuqV66lT30CrXNbEjx4GEwlow3v6rr4mCcv6prnfwhS01rkgyFdjPNBYd9br7LpXV1+Emh01fHnq2Gdgrw==} - dev: false - /tr46/1.0.1: resolution: {integrity: sha512-dTpowEjclQ7Kgx5SdBkqRzVhERQXov8/l9Ft9dVM9fmg0W0KQSVaXX9T4i6twCPNtYiZM53lpSSUAwJbFPOHxA==} dependencies: @@ -3522,6 +3476,15 @@ packages: which-boxed-primitive: 1.0.2 dev: true + /undici/5.13.0: + resolution: {integrity: sha512-UDZKtwb2k7KRsK4SdXWG7ErXiL7yTGgLWvk2AXO1JMjgjh404nFo6tWSCM2xMpJwMPx3J8i/vfqEh1zOqvj82Q==} + engines: {node: '>=12.18'} + requiresBuild: true + dependencies: + busboy: 1.6.0 + dev: false + optional: true + /unified/10.1.2: resolution: {integrity: sha512-pUSWAi/RAnVy1Pif2kAoeWNBa3JVrx0MId2LASj8G+7AiHWoKZNTomq6LG326T68U7/e263X6fTdcXIy7XnF7Q==} dependencies: @@ -3627,10 +3590,6 @@ packages: defaults: 1.0.4 dev: true - /webidl-conversions/3.0.1: - resolution: {integrity: sha512-2JAn3z8AR6rjK8Sm8orRC0h/bcl/DqL7tRPdGZ4I1CjdF+EaMLmYxBHyXuKL849eucPFhvBoxMsflfOb8kxaeQ==} - dev: false - /webidl-conversions/4.0.2: resolution: {integrity: sha512-YQ+BmxuTgd6UXZW3+ICGfyqRyHXVlD5GtQr5+qjiNW7bF0cqrzX500HVXPBOvgXb5YnzDd+h0zqyv61KUD7+Sg==} dev: true @@ -3640,13 +3599,6 @@ packages: engines: {node: '>=6'} dev: true - /whatwg-url/5.0.0: - resolution: {integrity: sha512-saE57nupxk6v3HY35+jzBwYa0rKSy0XR8JSxZPwgLr7ys0IBzhGviA1/TUGJLmSVqs8pb9AnvICXEuOHLprYTw==} - dependencies: - tr46: 0.0.3 - webidl-conversions: 3.0.1 - dev: false - /whatwg-url/7.1.0: resolution: {integrity: sha512-WUu7Rg1DroM7oQvGWfOiAK21n74Gg+T4elXEQYkOhtyLeWiJFoOGLXPKI/9gzIie9CtwVLm8wtw6YJdKyxSjeg==} dependencies: diff --git a/src/chatgpt-api.test.ts b/src/chatgpt-api.test.ts index ea0bd18..090afba 100644 --- a/src/chatgpt-api.test.ts +++ b/src/chatgpt-api.test.ts @@ -63,7 +63,7 @@ if (!isCI) { }, { message: - 'ChatGPT failed to refresh auth token. Error: session token has expired' + 'ChatGPT failed to refresh auth token. Error: session token may have expired' } ) }) diff --git a/src/chatgpt-api.ts b/src/chatgpt-api.ts index 32a39a1..6db89bf 100644 --- a/src/chatgpt-api.ts +++ b/src/chatgpt-api.ts @@ -2,6 +2,7 @@ import ExpiryMap from 'expiry-map' import { v4 as uuidv4 } from 'uuid' import * as types from './types' +import { ChatGPTConversation } from './chatgpt-conversation' import { fetch } from './fetch' import { fetchSSE } from './fetch-sse' import { markdownToText } from './utils' @@ -10,80 +11,10 @@ const KEY_ACCESS_TOKEN = 'accessToken' const USER_AGENT = 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/107.0.0.0 Safari/537.36' -/** - * A conversation wrapper around the ChatGPT API. This allows you to send - * multiple messages to ChatGPT and receive responses, without having to - * manually pass the conversation ID and parent message ID for each message. - */ -class Conversation { - api: ChatGPTAPI - conversationId: string = undefined - parentMessageId: string = undefined - - /** - * Creates a new conversation wrapper around the ChatGPT API. - * @param api - The ChatGPT API instance to use. - */ - constructor( - api: ChatGPTAPI, - opts: { conversationId?: string; parentMessageId?: string } = {} - ) { - this.api = api - this.conversationId = opts.conversationId - this.parentMessageId = opts.parentMessageId - } - - /** - * Sends a message to ChatGPT, waits for the response to resolve, and returns - * the response. - * If this is the first message in the conversation, the conversation ID and - * parent message ID will be automatically set. - * This allows you to send multiple messages to ChatGPT and receive responses, - * without having to manually pass the conversation ID and parent message ID - * for each message. - * If you want to manually pass the conversation ID and parent message ID, - * use `api.sendMessage` instead. - * - * @param message - The plaintext message to send. - * @param opts.onProgress - Optional listener which will be called every time the partial response is updated - * @param opts.onConversationResponse - Optional listener which will be called every time a conversation response is received - * @returns The plaintext response from ChatGPT. - */ - async sendMessage( - message: string, - opts: { - onProgress?: (partialResponse: string) => void - onConversationResponse?: ( - response: types.ConversationResponseEvent - ) => void - } = {} - ) { - const { onProgress, onConversationResponse } = opts - if (!this.conversationId) { - return this.api.sendMessage(message, { - onProgress, - onConversationResponse: (response) => { - this.conversationId = response.conversation_id - this.parentMessageId = response.message.id - onConversationResponse?.(response) - } - }) - } - - return this.api.sendMessage(message, { - conversationId: this.conversationId, - parentMessageId: this.parentMessageId, - onProgress, - onConversationResponse: (response) => { - this.conversationId = response.conversation_id - this.parentMessageId = response.message.id - onConversationResponse?.(response) - } - }) - } -} - export class ChatGPTAPI { + public conversationId: string = undefined + public parentMessageId: string = undefined + protected _sessionToken: string protected _markdown: boolean protected _apiBaseUrl: string @@ -152,10 +83,13 @@ export class ChatGPTAPI { * Sends a message to ChatGPT, waits for the response to resolve, and returns * the response. * - * @param message - The plaintext message to send. - * @param opts.conversationId - Optional ID of the previous message in a conversation - * @param opts.onProgress - Optional listener which will be called every time the partial response is updated - * @param opts.onConversationResponse - Optional listener which will be called every time the partial response is updated with the full conversation response + * @param message - The prompt message to send + * @param opts.conversationId - Optional ID of a conversation to continue + * @param opts.parentMessageId - Optional ID of the previous message in the conversation + * @param opts.onProgress - Optional function which will be called every time the partial response is updated + * @param opts.onConversationResponse - Optional function which will be called every time the partial response is updated with the full conversation response + * @param opts.abortSignal - Optional function used to abort the underlying `fetch` call using an [AbortController](https://developer.mozilla.org/en-US/docs/Web/API/AbortController) + * @returns The response from ChatGPT */ async sendMessage( message: string, @@ -166,13 +100,15 @@ export class ChatGPTAPI { onConversationResponse?: ( response: types.ConversationResponseEvent ) => void + abortSignal?: AbortSignal } = {} ): Promise { const { conversationId, parentMessageId = uuidv4(), onProgress, - onConversationResponse + onConversationResponse, + abortSignal } = opts const accessToken = await this.refreshAccessToken() @@ -214,6 +150,7 @@ export class ChatGPTAPI { 'user-agent': this._userAgent }, body: JSON.stringify(body), + signal: abortSignal, onMessage: (data: string) => { if (data === '[DONE]') { return resolve(response) @@ -275,7 +212,7 @@ export class ChatGPTAPI { const error = res?.error if (error) { if (error === 'RefreshAccessTokenError') { - throw new Error('session token has expired') + throw new Error('session token may have expired') } else { throw new Error(error) } @@ -289,15 +226,16 @@ export class ChatGPTAPI { } /** - * Get a new Conversation instance, which can be used to send multiple messages as part of a single conversation. + * Gets a new ChatGPTConversation instance, which can be used to send multiple + * messages as part of a single conversation. * - * @param opts.conversationId - Optional Data of the previous message in a conversation - * @param opts.parentMessageId - Optional Data of the previous message in a conversation - * @returns a new Conversation instance + * @param opts.conversationId - Optional ID of the previous message in a conversation + * @param opts.parentMessageId - Optional ID of the previous message in a conversation + * @returns The new conversation instance */ getConversation( opts: { conversationId?: string; parentMessageId?: string } = {} ) { - return new Conversation(this, opts) + return new ChatGPTConversation(this, opts) } } diff --git a/src/chatgpt-conversation.ts b/src/chatgpt-conversation.ts new file mode 100644 index 0000000..f84cae2 --- /dev/null +++ b/src/chatgpt-conversation.ts @@ -0,0 +1,71 @@ +import * as types from './types' +import { type ChatGPTAPI } from './chatgpt-api' + +/** + * A conversation wrapper around the ChatGPTAPI. This allows you to send + * multiple messages to ChatGPT and receive responses, without having to + * manually pass the conversation ID and parent message ID for each message. + */ +export class ChatGPTConversation { + api: ChatGPTAPI + conversationId: string = undefined + parentMessageId: string = undefined + + /** + * Creates a new conversation wrapper around the ChatGPT API. + * + * @param api - The ChatGPT API instance to use + * @param opts.conversationId - Optional ID of a conversation to continue + * @param opts.parentMessageId - Optional ID of the previous message in the conversation + */ + constructor( + api: ChatGPTAPI, + opts: { conversationId?: string; parentMessageId?: string } = {} + ) { + this.api = api + this.conversationId = opts.conversationId + this.parentMessageId = opts.parentMessageId + } + + /** + * Sends a message to ChatGPT, waits for the response to resolve, and returns + * the response. + * + * If this is the first message in the conversation, the conversation ID and + * parent message ID will be automatically set. + * + * This allows you to send multiple messages to ChatGPT and receive responses, + * without having to manually pass the conversation ID and parent message ID + * for each message. + * + * @param message - The prompt message to send + * @param opts.onProgress - Optional listener which will be called every time the partial response is updated + * @param opts.onConversationResponse - Optional listener which will be called every time a conversation response is received + * @returns The response from ChatGPT + */ + async sendMessage( + message: string, + opts: { + onProgress?: (partialResponse: string) => void + onConversationResponse?: ( + response: types.ConversationResponseEvent + ) => void + } = {} + ): Promise { + const { onProgress, onConversationResponse } = opts + + return this.api.sendMessage(message, { + conversationId: this.conversationId, + parentMessageId: this.parentMessageId, + onProgress, + onConversationResponse: (response) => { + this.conversationId = response.conversation_id + this.parentMessageId = response.message.id + + if (onConversationResponse) { + return onConversationResponse(response) + } + } + }) + } +} diff --git a/src/demo-conversation.ts b/src/demo-conversation.ts index c0199b8..915f39b 100644 --- a/src/demo-conversation.ts +++ b/src/demo-conversation.ts @@ -6,10 +6,10 @@ import { ChatGPTAPI } from '.' dotenv.config() /** - * Example CLI for testing functionality. + * Demo CLI for testing conversation support. * * ``` - * npx tsx src/demo.ts + * npx tsx src/demo-conversation.ts * ``` */ async function main() { diff --git a/src/demo.ts b/src/demo.ts index 7bae4db..dcfd028 100644 --- a/src/demo.ts +++ b/src/demo.ts @@ -6,7 +6,7 @@ import { ChatGPTAPI } from '.' dotenv.config() /** - * Example CLI for testing functionality. + * Demo CLI for testing basic functionality. * * ``` * npx tsx src/demo.ts diff --git a/src/fetch-sse.ts b/src/fetch-sse.ts index 3a0b585..15f2a54 100644 --- a/src/fetch-sse.ts +++ b/src/fetch-sse.ts @@ -1,8 +1,7 @@ import { createParser } from 'eventsource-parser' import { fetch } from './fetch' - -// import { streamAsyncIterable } from './stream-async-iterable' +import { streamAsyncIterable } from './stream-async-iterable' export async function fetchSSE( url: string, @@ -16,16 +15,8 @@ export async function fetchSSE( } }) - resp.body.on('readable', () => { - let chunk: string | Buffer - while (null !== (chunk = resp.body.read())) { - parser.feed(chunk.toString()) - } - }) - - // TODO: add support for web-compatible `fetch` - // for await (const chunk of streamAsyncIterable(resp.body)) { - // const str = new TextDecoder().decode(chunk) - // parser.feed(str) - // } + for await (const chunk of streamAsyncIterable(resp.body)) { + const str = new TextDecoder().decode(chunk) + parser.feed(str) + } } diff --git a/src/fetch.ts b/src/fetch.ts index fe65965..308761c 100644 --- a/src/fetch.ts +++ b/src/fetch.ts @@ -1,3 +1,11 @@ -import fetch from 'node-fetch' +/// + +// Use `undici` for node.js 16 and 17 +// Use `fetch` for node.js >= 18 +// Use `fetch` for browsers +// Use `fetch` for all other environments +const fetch = + globalThis.fetch ?? + ((await import('undici')).fetch as unknown as typeof globalThis.fetch) export { fetch } diff --git a/src/index.ts b/src/index.ts index 451712e..ed6a4b5 100644 --- a/src/index.ts +++ b/src/index.ts @@ -1,3 +1,4 @@ export * from './chatgpt-api' +export * from './chatgpt-conversation' export * from './types' export * from './utils' diff --git a/src/stream-async-iterable.ts b/src/stream-async-iterable.ts index fbfe174..78eb497 100644 --- a/src/stream-async-iterable.ts +++ b/src/stream-async-iterable.ts @@ -1,6 +1,4 @@ -import { type ReadableStream } from 'stream/web' - -export async function* streamAsyncIterable(stream: ReadableStream) { +export async function* streamAsyncIterable(stream: ReadableStream) { const reader = stream.getReader() try { while (true) { diff --git a/tsup.config.ts b/tsup.config.ts index 5dedfd7..321fadf 100644 --- a/tsup.config.ts +++ b/tsup.config.ts @@ -1,14 +1,30 @@ import { defineConfig } from 'tsup' -export default defineConfig({ - entry: ['src/index.ts'], - outDir: 'build', - target: 'node14', - platform: 'node', - format: ['esm'], - splitting: false, - sourcemap: true, - minify: true, - shims: false, - dts: true -}) +export default defineConfig([ + { + entry: ['src/index.ts'], + outDir: 'build', + target: 'node16', + platform: 'node', + format: ['esm'], + splitting: false, + sourcemap: true, + minify: false, + shims: true, + dts: true, + external: ['undici'] + }, + { + entry: ['src/index.ts'], + outDir: 'build/browser', + target: 'chrome89', + platform: 'browser', + format: ['esm'], + splitting: false, + sourcemap: true, + minify: false, + shims: true, + dts: true, + external: ['undici'] + } +]) From ec49713843136336f278d8b37105fa8158cff156 Mon Sep 17 00:00:00 2001 From: Travis Fischer Date: Tue, 6 Dec 2022 16:44:25 -0600 Subject: [PATCH 2/8] chore: remove support for node.js 14 from CI --- .github/workflows/test.yml | 1 - 1 file changed, 1 deletion(-) diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 168526b..73af79e 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -12,7 +12,6 @@ jobs: node-version: - 18 - 16 - - 14 steps: - name: Checkout From c8b3ac7029ce00a9819ac0db9c868bd9f5bcf4b2 Mon Sep 17 00:00:00 2001 From: Travis Fischer Date: Tue, 6 Dec 2022 18:19:30 -0600 Subject: [PATCH 3/8] fix: various fixes and doc updates --- package.json | 3 ++- readme.md | 14 ++++++++++++++ src/chatgpt-api.ts | 29 +++++++++++++---------------- src/chatgpt-conversation.ts | 26 ++++++++++++++------------ src/fetch-sse.ts | 8 ++++++-- src/types.ts | 13 +++++++++++++ 6 files changed, 62 insertions(+), 31 deletions(-) diff --git a/package.json b/package.json index d27c553..3094656 100644 --- a/package.json +++ b/package.json @@ -10,6 +10,7 @@ "types": "./build/index.d.ts", "exports": { ".": { + "browser": "./build/browser/index.js", "import": "./build/index.js", "types": "./build/index.d.ts", "default": "./build/index.js" @@ -26,7 +27,7 @@ "dev": "tsup --watch", "clean": "del build", "prebuild": "run-s clean", - "postbuild": "sed -i 's/ *\\?\\? *(await import(\"undici\")).fetch//' build/browser/index.js", + "postbuild": "[ -n CI ] && sed -i '' 's/ *\\?\\? *(await import(\"undici\")).fetch//' build/browser/index.js || echo 'skipping postbuild on CI'", "predev": "run-s clean", "pretest": "run-s build", "docs": "typedoc", diff --git a/readme.md b/readme.md index ed4cd90..982ef57 100644 --- a/readme.md +++ b/readme.md @@ -13,6 +13,7 @@ - [Usage](#usage) - [Docs](#docs) - [How it works](#how-it-works) +- [Compatibility](#compatibility) - [Examples](#examples) - [Credit](#credit) - [License](#license) @@ -114,6 +115,19 @@ If you want to run the built-in demo, store this value as `SESSION_TOKEN` in a l > **Note** > Prior to v1.0.0, this package used a headless browser via [Playwright](https://playwright.dev/) to automate the web UI. Here are the [docs for the initial browser version](https://github.com/transitive-bullshit/chatgpt-api/tree/v0.4.2). +## Compatibility + +This package is ESM-only. It supports: + +- Node.js >= 16.8 + - If you need Node.js 14 support, use [`v1.4.0`](https://github.com/transitive-bullshit/chatgpt-api/releases/tag/v1.4.0) + - If you need CommonJS support, use [`v1.3.0`](https://github.com/transitive-bullshit/chatgpt-api/releases/tag/v1.3.0) +- Edge runtimes like CF workers and Vercel edge functions +- Modern browsers + - This is mainly intended for chrome extensions where your code is protected to a degree + - **We do not recommend using `chatgpt` from client-side browser code** because it would expose your private session token + - If you want to build a website with `chatgpt`, we recommend using it only from your backend API + ## Examples All of these awesome projects are built using the `chatgpt` package. 🤯 diff --git a/src/chatgpt-api.ts b/src/chatgpt-api.ts index 6db89bf..bca3515 100644 --- a/src/chatgpt-api.ts +++ b/src/chatgpt-api.ts @@ -12,9 +12,6 @@ const USER_AGENT = 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/107.0.0.0 Safari/537.36' export class ChatGPTAPI { - public conversationId: string = undefined - public parentMessageId: string = undefined - protected _sessionToken: string protected _markdown: boolean protected _apiBaseUrl: string @@ -86,22 +83,15 @@ export class ChatGPTAPI { * @param message - The prompt message to send * @param opts.conversationId - Optional ID of a conversation to continue * @param opts.parentMessageId - Optional ID of the previous message in the conversation - * @param opts.onProgress - Optional function which will be called every time the partial response is updated - * @param opts.onConversationResponse - Optional function which will be called every time the partial response is updated with the full conversation response - * @param opts.abortSignal - Optional function used to abort the underlying `fetch` call using an [AbortController](https://developer.mozilla.org/en-US/docs/Web/API/AbortController) + * @param opts.onProgress - Optional callback which will be invoked every time the partial response is updated + * @param opts.onConversationResponse - Optional callback which will be invoked every time the partial response is updated with the full conversation response + * @param opts.abortSignal - Optional callback used to abort the underlying `fetch` call using an [AbortController](https://developer.mozilla.org/en-US/docs/Web/API/AbortController) + * * @returns The response from ChatGPT */ async sendMessage( message: string, - opts: { - conversationId?: string - parentMessageId?: string - onProgress?: (partialResponse: string) => void - onConversationResponse?: ( - response: types.ConversationResponseEvent - ) => void - abortSignal?: AbortSignal - } = {} + opts: types.SendMessageOptions = {} ): Promise { const { conversationId, @@ -161,6 +151,7 @@ export class ChatGPTAPI { if (onConversationResponse) { onConversationResponse(parsedData) } + const message = parsedData.message // console.log('event', JSON.stringify(parsedData, null, 2)) @@ -201,7 +192,13 @@ export class ChatGPTAPI { cookie: `__Secure-next-auth.session-token=${this._sessionToken}`, 'user-agent': this._userAgent } - }).then((r) => r.json() as any as types.SessionResult) + }).then((r) => { + if (!r.ok) { + throw new Error(`${r.status} ${r.statusText}`) + } + + return r.json() as any as types.SessionResult + }) const accessToken = res?.accessToken diff --git a/src/chatgpt-conversation.ts b/src/chatgpt-conversation.ts index f84cae2..972917a 100644 --- a/src/chatgpt-conversation.ts +++ b/src/chatgpt-conversation.ts @@ -39,28 +39,30 @@ export class ChatGPTConversation { * for each message. * * @param message - The prompt message to send - * @param opts.onProgress - Optional listener which will be called every time the partial response is updated - * @param opts.onConversationResponse - Optional listener which will be called every time a conversation response is received + * @param opts.onProgress - Optional callback which will be invoked every time the partial response is updated + * @param opts.onConversationResponse - Optional callback which will be invoked every time the partial response is updated with the full conversation response + * @param opts.abortSignal - Optional callback used to abort the underlying `fetch` call using an [AbortController](https://developer.mozilla.org/en-US/docs/Web/API/AbortController) + * * @returns The response from ChatGPT */ async sendMessage( message: string, - opts: { - onProgress?: (partialResponse: string) => void - onConversationResponse?: ( - response: types.ConversationResponseEvent - ) => void - } = {} + opts: types.SendConversationMessageOptions = {} ): Promise { - const { onProgress, onConversationResponse } = opts + const { onConversationResponse, ...rest } = opts return this.api.sendMessage(message, { + ...rest, conversationId: this.conversationId, parentMessageId: this.parentMessageId, - onProgress, onConversationResponse: (response) => { - this.conversationId = response.conversation_id - this.parentMessageId = response.message.id + if (response.conversation_id) { + this.conversationId = response.conversation_id + } + + if (response.message?.id) { + this.parentMessageId = response.message.id + } if (onConversationResponse) { return onConversationResponse(response) diff --git a/src/fetch-sse.ts b/src/fetch-sse.ts index 15f2a54..705480c 100644 --- a/src/fetch-sse.ts +++ b/src/fetch-sse.ts @@ -8,14 +8,18 @@ export async function fetchSSE( options: Parameters[1] & { onMessage: (data: string) => void } ) { const { onMessage, ...fetchOptions } = options - const resp = await fetch(url, fetchOptions) + const res = await fetch(url, fetchOptions) + if (!res.ok) { + throw new Error(`ChatGPTAPI error ${res.status || res.statusText}`) + } + const parser = createParser((event) => { if (event.type === 'event') { onMessage(event.data) } }) - for await (const chunk of streamAsyncIterable(resp.body)) { + for await (const chunk of streamAsyncIterable(res.body)) { const str = new TextDecoder().decode(chunk) parser.feed(str) } diff --git a/src/types.ts b/src/types.ts index b26da3b..80e7004 100644 --- a/src/types.ts +++ b/src/types.ts @@ -273,3 +273,16 @@ export type MessageContent = { } export type MessageMetadata = any + +export type SendMessageOptions = { + conversationId?: string + parentMessageId?: string + onProgress?: (partialResponse: string) => void + onConversationResponse?: (response: ConversationResponseEvent) => void + abortSignal?: AbortSignal +} + +export type SendConversationMessageOptions = Omit< + SendMessageOptions, + 'conversationId' | 'parentMessageId' +> From 52ae3679ec194cb6b2ea18adefaa20c68bf1a8db Mon Sep 17 00:00:00 2001 From: Travis Fischer Date: Tue, 6 Dec 2022 18:19:50 -0600 Subject: [PATCH 4/8] docs: update auto-generated docs --- docs/classes/ChatGPTAPI.md | 46 +++++++++--- docs/classes/ChatGPTConversation.md | 107 ++++++++++++++++++++++++++++ docs/modules.md | 75 +++++++++++++------ docs/readme.md | 19 ++++- 4 files changed, 216 insertions(+), 31 deletions(-) create mode 100644 docs/classes/ChatGPTConversation.md diff --git a/docs/classes/ChatGPTAPI.md b/docs/classes/ChatGPTAPI.md index 35c6b7a..61d3b44 100644 --- a/docs/classes/ChatGPTAPI.md +++ b/docs/classes/ChatGPTAPI.md @@ -11,6 +11,7 @@ ### Methods - [ensureAuth](ChatGPTAPI.md#ensureauth) +- [getConversation](ChatGPTAPI.md#getconversation) - [getIsAuthenticated](ChatGPTAPI.md#getisauthenticated) - [refreshAccessToken](ChatGPTAPI.md#refreshaccesstoken) - [sendMessage](ChatGPTAPI.md#sendmessage) @@ -36,7 +37,7 @@ Creates a new client wrapper around the unofficial ChatGPT REST API. #### Defined in -[chatgpt-api.ts:31](https://github.com/transitive-bullshit/chatgpt-api/blob/80d77a3/src/chatgpt-api.ts#L31) +[chatgpt-api.ts:32](https://github.com/transitive-bullshit/chatgpt-api/blob/8e045b2/src/chatgpt-api.ts#L32) ## Methods @@ -50,7 +51,34 @@ Creates a new client wrapper around the unofficial ChatGPT REST API. #### Defined in -[chatgpt-api.ts:74](https://github.com/transitive-bullshit/chatgpt-api/blob/80d77a3/src/chatgpt-api.ts#L74) +[chatgpt-api.ts:75](https://github.com/transitive-bullshit/chatgpt-api/blob/8e045b2/src/chatgpt-api.ts#L75) + +___ + +### getConversation + +▸ **getConversation**(`opts?`): [`ChatGPTConversation`](ChatGPTConversation.md) + +Gets a new ChatGPTConversation instance, which can be used to send multiple +messages as part of a single conversation. + +#### Parameters + +| Name | Type | Description | +| :------ | :------ | :------ | +| `opts` | `Object` | - | +| `opts.conversationId?` | `string` | Optional ID of the previous message in a conversation | +| `opts.parentMessageId?` | `string` | Optional ID of the previous message in a conversation | + +#### Returns + +[`ChatGPTConversation`](ChatGPTConversation.md) + +The new conversation instance + +#### Defined in + +[chatgpt-api.ts:233](https://github.com/transitive-bullshit/chatgpt-api/blob/8e045b2/src/chatgpt-api.ts#L233) ___ @@ -64,7 +92,7 @@ ___ #### Defined in -[chatgpt-api.ts:65](https://github.com/transitive-bullshit/chatgpt-api/blob/80d77a3/src/chatgpt-api.ts#L65) +[chatgpt-api.ts:66](https://github.com/transitive-bullshit/chatgpt-api/blob/8e045b2/src/chatgpt-api.ts#L66) ___ @@ -78,7 +106,7 @@ ___ #### Defined in -[chatgpt-api.ts:165](https://github.com/transitive-bullshit/chatgpt-api/blob/80d77a3/src/chatgpt-api.ts#L165) +[chatgpt-api.ts:183](https://github.com/transitive-bullshit/chatgpt-api/blob/8e045b2/src/chatgpt-api.ts#L183) ___ @@ -93,15 +121,15 @@ the response. | Name | Type | Description | | :------ | :------ | :------ | -| `message` | `string` | The plaintext message to send. | -| `opts` | `Object` | - | -| `opts.conversationId?` | `string` | Optional ID of the previous message in a conversation | -| `opts.onProgress?` | (`partialResponse`: `string`) => `void` | - | +| `message` | `string` | The prompt message to send | +| `opts` | [`SendMessageOptions`](../modules.md#sendmessageoptions) | - | #### Returns `Promise`<`string`\> +The response from ChatGPT + #### Defined in -[chatgpt-api.ts:86](https://github.com/transitive-bullshit/chatgpt-api/blob/80d77a3/src/chatgpt-api.ts#L86) +[chatgpt-api.ts:92](https://github.com/transitive-bullshit/chatgpt-api/blob/8e045b2/src/chatgpt-api.ts#L92) diff --git a/docs/classes/ChatGPTConversation.md b/docs/classes/ChatGPTConversation.md new file mode 100644 index 0000000..5c7b81b --- /dev/null +++ b/docs/classes/ChatGPTConversation.md @@ -0,0 +1,107 @@ +[chatgpt](../readme.md) / [Exports](../modules.md) / ChatGPTConversation + +# Class: ChatGPTConversation + +A conversation wrapper around the ChatGPTAPI. This allows you to send +multiple messages to ChatGPT and receive responses, without having to +manually pass the conversation ID and parent message ID for each message. + +## Table of contents + +### Constructors + +- [constructor](ChatGPTConversation.md#constructor) + +### Properties + +- [api](ChatGPTConversation.md#api) +- [conversationId](ChatGPTConversation.md#conversationid) +- [parentMessageId](ChatGPTConversation.md#parentmessageid) + +### Methods + +- [sendMessage](ChatGPTConversation.md#sendmessage) + +## Constructors + +### constructor + +• **new ChatGPTConversation**(`api`, `opts?`) + +Creates a new conversation wrapper around the ChatGPT API. + +#### Parameters + +| Name | Type | Description | +| :------ | :------ | :------ | +| `api` | [`ChatGPTAPI`](ChatGPTAPI.md) | The ChatGPT API instance to use | +| `opts` | `Object` | - | +| `opts.conversationId?` | `string` | Optional ID of a conversation to continue | +| `opts.parentMessageId?` | `string` | Optional ID of the previous message in the conversation | + +#### Defined in + +[chatgpt-conversation.ts:21](https://github.com/transitive-bullshit/chatgpt-api/blob/8e045b2/src/chatgpt-conversation.ts#L21) + +## Properties + +### api + +• **api**: [`ChatGPTAPI`](ChatGPTAPI.md) + +#### Defined in + +[chatgpt-conversation.ts:10](https://github.com/transitive-bullshit/chatgpt-api/blob/8e045b2/src/chatgpt-conversation.ts#L10) + +___ + +### conversationId + +• **conversationId**: `string` = `undefined` + +#### Defined in + +[chatgpt-conversation.ts:11](https://github.com/transitive-bullshit/chatgpt-api/blob/8e045b2/src/chatgpt-conversation.ts#L11) + +___ + +### parentMessageId + +• **parentMessageId**: `string` = `undefined` + +#### Defined in + +[chatgpt-conversation.ts:12](https://github.com/transitive-bullshit/chatgpt-api/blob/8e045b2/src/chatgpt-conversation.ts#L12) + +## Methods + +### sendMessage + +▸ **sendMessage**(`message`, `opts?`): `Promise`<`string`\> + +Sends a message to ChatGPT, waits for the response to resolve, and returns +the response. + +If this is the first message in the conversation, the conversation ID and +parent message ID will be automatically set. + +This allows you to send multiple messages to ChatGPT and receive responses, +without having to manually pass the conversation ID and parent message ID +for each message. + +#### Parameters + +| Name | Type | Description | +| :------ | :------ | :------ | +| `message` | `string` | The prompt message to send | +| `opts` | [`SendConversationMessageOptions`](../modules.md#sendconversationmessageoptions) | - | + +#### Returns + +`Promise`<`string`\> + +The response from ChatGPT + +#### Defined in + +[chatgpt-conversation.ts:48](https://github.com/transitive-bullshit/chatgpt-api/blob/8e045b2/src/chatgpt-conversation.ts#L48) diff --git a/docs/modules.md b/docs/modules.md index 9b0f344..c9c4e03 100644 --- a/docs/modules.md +++ b/docs/modules.md @@ -7,6 +7,7 @@ ### Classes - [ChatGPTAPI](classes/ChatGPTAPI.md) +- [ChatGPTConversation](classes/ChatGPTConversation.md) ### Type Aliases @@ -28,6 +29,8 @@ - [Prompt](modules.md#prompt) - [PromptContent](modules.md#promptcontent) - [Role](modules.md#role) +- [SendConversationMessageOptions](modules.md#sendconversationmessageoptions) +- [SendMessageOptions](modules.md#sendmessageoptions) - [SessionResult](modules.md#sessionresult) - [User](modules.md#user) @@ -43,7 +46,7 @@ #### Defined in -[types.ts:109](https://github.com/transitive-bullshit/chatgpt-api/blob/80d77a3/src/types.ts#L109) +[types.ts:109](https://github.com/transitive-bullshit/chatgpt-api/blob/8e045b2/src/types.ts#L109) ___ @@ -53,7 +56,7 @@ ___ #### Defined in -[types.ts:1](https://github.com/transitive-bullshit/chatgpt-api/blob/80d77a3/src/types.ts#L1) +[types.ts:1](https://github.com/transitive-bullshit/chatgpt-api/blob/8e045b2/src/types.ts#L1) ___ @@ -75,7 +78,7 @@ https://chat.openapi.com/backend-api/conversation #### Defined in -[types.ts:134](https://github.com/transitive-bullshit/chatgpt-api/blob/80d77a3/src/types.ts#L134) +[types.ts:134](https://github.com/transitive-bullshit/chatgpt-api/blob/8e045b2/src/types.ts#L134) ___ @@ -93,7 +96,7 @@ ___ #### Defined in -[types.ts:251](https://github.com/transitive-bullshit/chatgpt-api/blob/80d77a3/src/types.ts#L251) +[types.ts:251](https://github.com/transitive-bullshit/chatgpt-api/blob/8e045b2/src/types.ts#L251) ___ @@ -118,7 +121,7 @@ ___ #### Defined in -[types.ts:257](https://github.com/transitive-bullshit/chatgpt-api/blob/80d77a3/src/types.ts#L257) +[types.ts:257](https://github.com/transitive-bullshit/chatgpt-api/blob/8e045b2/src/types.ts#L257) ___ @@ -135,7 +138,7 @@ ___ #### Defined in -[types.ts:270](https://github.com/transitive-bullshit/chatgpt-api/blob/80d77a3/src/types.ts#L270) +[types.ts:270](https://github.com/transitive-bullshit/chatgpt-api/blob/8e045b2/src/types.ts#L270) ___ @@ -157,7 +160,7 @@ https://chat.openapi.com/backend-api/conversation/message_feedback #### Defined in -[types.ts:193](https://github.com/transitive-bullshit/chatgpt-api/blob/80d77a3/src/types.ts#L193) +[types.ts:193](https://github.com/transitive-bullshit/chatgpt-api/blob/8e045b2/src/types.ts#L193) ___ @@ -167,7 +170,7 @@ ___ #### Defined in -[types.ts:249](https://github.com/transitive-bullshit/chatgpt-api/blob/80d77a3/src/types.ts#L249) +[types.ts:249](https://github.com/transitive-bullshit/chatgpt-api/blob/8e045b2/src/types.ts#L249) ___ @@ -187,7 +190,7 @@ ___ #### Defined in -[types.ts:222](https://github.com/transitive-bullshit/chatgpt-api/blob/80d77a3/src/types.ts#L222) +[types.ts:222](https://github.com/transitive-bullshit/chatgpt-api/blob/8e045b2/src/types.ts#L222) ___ @@ -197,7 +200,7 @@ ___ #### Defined in -[types.ts:220](https://github.com/transitive-bullshit/chatgpt-api/blob/80d77a3/src/types.ts#L220) +[types.ts:220](https://github.com/transitive-bullshit/chatgpt-api/blob/8e045b2/src/types.ts#L220) ___ @@ -207,7 +210,7 @@ ___ #### Defined in -[types.ts:275](https://github.com/transitive-bullshit/chatgpt-api/blob/80d77a3/src/types.ts#L275) +[types.ts:275](https://github.com/transitive-bullshit/chatgpt-api/blob/8e045b2/src/types.ts#L275) ___ @@ -225,7 +228,7 @@ ___ #### Defined in -[types.ts:77](https://github.com/transitive-bullshit/chatgpt-api/blob/80d77a3/src/types.ts#L77) +[types.ts:77](https://github.com/transitive-bullshit/chatgpt-api/blob/8e045b2/src/types.ts#L77) ___ @@ -243,7 +246,7 @@ https://chat.openapi.com/backend-api/models #### Defined in -[types.ts:70](https://github.com/transitive-bullshit/chatgpt-api/blob/80d77a3/src/types.ts#L70) +[types.ts:70](https://github.com/transitive-bullshit/chatgpt-api/blob/8e045b2/src/types.ts#L70) ___ @@ -262,7 +265,7 @@ https://chat.openapi.com/backend-api/moderations #### Defined in -[types.ts:97](https://github.com/transitive-bullshit/chatgpt-api/blob/80d77a3/src/types.ts#L97) +[types.ts:97](https://github.com/transitive-bullshit/chatgpt-api/blob/8e045b2/src/types.ts#L97) ___ @@ -282,7 +285,7 @@ https://chat.openapi.com/backend-api/moderations #### Defined in -[types.ts:114](https://github.com/transitive-bullshit/chatgpt-api/blob/80d77a3/src/types.ts#L114) +[types.ts:114](https://github.com/transitive-bullshit/chatgpt-api/blob/8e045b2/src/types.ts#L114) ___ @@ -300,7 +303,7 @@ ___ #### Defined in -[types.ts:161](https://github.com/transitive-bullshit/chatgpt-api/blob/80d77a3/src/types.ts#L161) +[types.ts:161](https://github.com/transitive-bullshit/chatgpt-api/blob/8e045b2/src/types.ts#L161) ___ @@ -317,7 +320,7 @@ ___ #### Defined in -[types.ts:178](https://github.com/transitive-bullshit/chatgpt-api/blob/80d77a3/src/types.ts#L178) +[types.ts:178](https://github.com/transitive-bullshit/chatgpt-api/blob/8e045b2/src/types.ts#L178) ___ @@ -327,7 +330,37 @@ ___ #### Defined in -[types.ts:3](https://github.com/transitive-bullshit/chatgpt-api/blob/80d77a3/src/types.ts#L3) +[types.ts:3](https://github.com/transitive-bullshit/chatgpt-api/blob/8e045b2/src/types.ts#L3) + +___ + +### SendConversationMessageOptions + +Ƭ **SendConversationMessageOptions**: `Omit`<[`SendMessageOptions`](modules.md#sendmessageoptions), ``"conversationId"`` \| ``"parentMessageId"``\> + +#### Defined in + +[types.ts:285](https://github.com/transitive-bullshit/chatgpt-api/blob/8e045b2/src/types.ts#L285) + +___ + +### SendMessageOptions + +Ƭ **SendMessageOptions**: `Object` + +#### Type declaration + +| Name | Type | +| :------ | :------ | +| `abortSignal?` | `AbortSignal` | +| `conversationId?` | `string` | +| `onConversationResponse?` | (`response`: [`ConversationResponseEvent`](modules.md#conversationresponseevent)) => `void` | +| `onProgress?` | (`partialResponse`: `string`) => `void` | +| `parentMessageId?` | `string` | + +#### Defined in + +[types.ts:277](https://github.com/transitive-bullshit/chatgpt-api/blob/8e045b2/src/types.ts#L277) ___ @@ -348,7 +381,7 @@ https://chat.openapi.com/api/auth/session #### Defined in -[types.ts:8](https://github.com/transitive-bullshit/chatgpt-api/blob/80d77a3/src/types.ts#L8) +[types.ts:8](https://github.com/transitive-bullshit/chatgpt-api/blob/8e045b2/src/types.ts#L8) ___ @@ -370,7 +403,7 @@ ___ #### Defined in -[types.ts:30](https://github.com/transitive-bullshit/chatgpt-api/blob/80d77a3/src/types.ts#L30) +[types.ts:30](https://github.com/transitive-bullshit/chatgpt-api/blob/8e045b2/src/types.ts#L30) ## Functions @@ -390,4 +423,4 @@ ___ #### Defined in -[utils.ts:4](https://github.com/transitive-bullshit/chatgpt-api/blob/80d77a3/src/utils.ts#L4) +[utils.ts:4](https://github.com/transitive-bullshit/chatgpt-api/blob/8e045b2/src/utils.ts#L4) diff --git a/docs/readme.md b/docs/readme.md index bf546da..f5b28ae 100644 --- a/docs/readme.md +++ b/docs/readme.md @@ -15,6 +15,7 @@ chatgpt / [Exports](modules.md) - [Usage](#usage) - [Docs](#docs) - [How it works](#how-it-works) +- [Compatibility](#compatibility) - [Examples](#examples) - [Credit](#credit) - [License](#license) @@ -96,6 +97,19 @@ If you want to run the built-in demo, store this value as `SESSION_TOKEN` in a l > **Note** > Prior to v1.0.0, this package used a headless browser via [Playwright](https://playwright.dev/) to automate the web UI. Here are the [docs for the initial browser version](https://github.com/transitive-bullshit/chatgpt-api/tree/v0.4.2). +## Compatibility + +This package is ESM-only. It supports: + +- Node.js >= 16.8 + - If you need Node.js 14 support, use [`v1.4.0`](https://github.com/transitive-bullshit/chatgpt-api/releases/tag/v1.4.0) + - If you need CommonJS support, use [`v1.3.0`](https://github.com/transitive-bullshit/chatgpt-api/releases/tag/v1.3.0) +- Edge runtimes like CF workers and Vercel edge functions +- Modern browsers + - This is mainly intended for chrome extensions where your code is protected to a degree + - **We do not recommend using `chatgpt` from client-side browser code** because it would expose your private session token + - If you want to build a website with `chatgpt`, we recommend using it only from your backend API + ## Examples All of these awesome projects are built using the `chatgpt` package. 🤯 @@ -103,13 +117,16 @@ All of these awesome projects are built using the `chatgpt` package. 🤯 - [Twitter Bot](https://github.com/transitive-bullshit/chatgpt-twitter-bot) powered by ChatGPT ✨ - Mention [@ChatGPTBot](https://twitter.com/ChatGPTBot) on Twitter with your prompt to try it out - [Chrome Extension](https://github.com/gragland/chatgpt-everywhere) ([demo](https://twitter.com/gabe_ragland/status/1599466486422470656)) -- [VSCode Extension](https://github.com/mpociot/chatgpt-vscode) ([demo](https://twitter.com/marcelpociot/status/1599180144551526400)) +- [VSCode Extension #1](https://github.com/mpociot/chatgpt-vscode) ([demo](https://twitter.com/marcelpociot/status/1599180144551526400)) +- [VSCode Extension #2](https://github.com/barnesoir/chatgpt-vscode-plugin) +- [Raycast Extension](https://github.com/abielzulio/chatgpt-raycast) ([demo](https://twitter.com/abielzulio/status/1600176002042191875)) - [Go Telegram Bot](https://github.com/m1guelpf/chatgpt-telegram) - [GitHub ProBot](https://github.com/oceanlvr/ChatGPTBot) - [Discord Bot](https://github.com/onury5506/Discord-ChatGPT-Bot) - [WeChat Bot](https://github.com/AutumnWhj/ChatGPT-wechat-bot) - [Lovelines.xyz](https://lovelines.xyz) - [EXM smart contracts](https://github.com/decentldotland/molecule) +- [Flutter ChatGPT API](https://github.com/coskuncay/flutter_chatgpt_api) If you create a cool integration, feel free to open a PR and add it to the list. From 4693de97a1e8dbea74d08b5c0473f6a13438fd17 Mon Sep 17 00:00:00 2001 From: Travis Fischer Date: Tue, 6 Dec 2022 18:27:55 -0600 Subject: [PATCH 5/8] docs: update readme for new release --- readme.md | 50 ++++++++++++++++++++++++++++++++++++++++++-------- 1 file changed, 42 insertions(+), 8 deletions(-) diff --git a/readme.md b/readme.md index 982ef57..ff4bd76 100644 --- a/readme.md +++ b/readme.md @@ -11,8 +11,9 @@ - [Intro](#intro) - [Install](#install) - [Usage](#usage) -- [Docs](#docs) -- [How it works](#how-it-works) + - [Docs](#docs) + - [Demos](#demos) + - [Session Tokens](#session-tokens) - [Compatibility](#compatibility) - [Examples](#examples) - [Credit](#credit) @@ -37,7 +38,9 @@ import { ChatGPTAPI } from 'chatgpt' async function example() { // sessionToken is required; see below for details - const api = new ChatGPTAPI({ sessionToken: process.env.SESSION_TOKEN }) + const api = new ChatGPTAPI({ + sessionToken: process.env.SESSION_TOKEN + }) // ensure the API is properly authenticated await api.ensureAuth() @@ -52,7 +55,7 @@ async function example() { } ``` -By default, the response will be formatted as markdown. If you want to work with plaintext only, you can use: +The default ChatGPT responses are formatted as markdown. If you want to work with plaintext only, you can use: ```ts const api = new ChatGPTAPI({ @@ -61,6 +64,25 @@ const api = new ChatGPTAPI({ }) ``` +If you want to automatically track the conversation, you can use `ChatGPTAPI.getConversation()`: + +```ts +const api = new ChatGPTAPI({ + sessionToken: process.env.SESSION_TOKEN +}) + +const conversation = api.getConversation() + +// send a message and wait for the response +const response0 = await conversation.sendMessage('What is OpenAI?') + +// send a follow-up prompt to the previous message and wait for the response +const response1 = await conversation.sendMessage('Can you expand on that?') + +// send another follow-up to the same conversation +const response2 = await conversation.sendMessage('Oh cool; thank you') +``` +
Usage in CommonJS (Dynamic import) @@ -81,7 +103,13 @@ async function example() {
-A full [demo](./src/demo.ts) is included for testing purposes: +### Docs + +See the [auto-generated docs](./docs/classes/ChatGPTAPI.md) for more info on methods and parameters. + +### Demos + +A [basic demo](./src/demo.ts) is included for testing purposes: ```bash # 1. clone repo @@ -91,11 +119,17 @@ A full [demo](./src/demo.ts) is included for testing purposes: npx tsx src/demo.ts ``` -## Docs +A [conversation demo](./src/demo-conversation.ts) is also included: -See the [auto-generated docs](./docs/classes/ChatGPTAPI.md) for more info on methods and parameters. +```bash +# 1. clone repo +# 2. install node deps +# 3. set `SESSION_TOKEN` in .env +# 4. run: +npx tsx src/demo-conversation.ts +``` -## How it works +### Session Tokens **This package requires a valid session token from ChatGPT to access it's unofficial REST API.** From 58795f41507a524e5dd52a12a421d65ef074ca44 Mon Sep 17 00:00:00 2001 From: Travis Fischer Date: Tue, 6 Dec 2022 22:07:14 -0600 Subject: [PATCH 6/8] feat: fixes and documenting methods --- package.json | 9 ++-- pnpm-lock.yaml | 7 +++ src/chatgpt-api.test.ts | 51 ++++++++++++++++++-- src/chatgpt-api.ts | 102 ++++++++++++++++++++++++++++++---------- src/fetch.ts | 5 +- src/types.ts | 1 + 6 files changed, 141 insertions(+), 34 deletions(-) diff --git a/package.json b/package.json index 3094656..100a49f 100644 --- a/package.json +++ b/package.json @@ -40,6 +40,7 @@ "dependencies": { "eventsource-parser": "^0.0.5", "expiry-map": "^2.0.0", + "p-timeout": "^6.0.0", "remark": "^14.0.2", "strip-markdown": "^5.0.0", "uuid": "^9.0.0" @@ -62,6 +63,9 @@ "typedoc-plugin-markdown": "^3.13.6", "typescript": "^4.9.3" }, + "optionalDependencies": { + "undici": "^5.13.0" + }, "lint-staged": { "*.{ts,tsx}": [ "prettier --write" @@ -89,8 +93,5 @@ "ai", "ml", "bot" - ], - "optionalDependencies": { - "undici": "^5.13.0" - } + ] } diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index 4ddaac7..8bb76f7 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -13,6 +13,7 @@ specifiers: lint-staged: ^13.0.3 npm-run-all: ^4.1.5 ora: ^6.1.2 + p-timeout: ^6.0.0 prettier: ^2.8.0 remark: ^14.0.2 strip-markdown: ^5.0.0 @@ -27,6 +28,7 @@ specifiers: dependencies: eventsource-parser: 0.0.5 expiry-map: 2.0.0 + p-timeout: 6.0.0 remark: 14.0.2 strip-markdown: 5.0.0 uuid: 9.0.0 @@ -2651,6 +2653,11 @@ packages: engines: {node: '>=12'} dev: true + /p-timeout/6.0.0: + resolution: {integrity: sha512-5iS61MOdUMemWH9CORQRxVXTp9g5K8rPnI9uQpo97aWgsH3vVXKjkIhDi+OgIDmN3Ly9+AZ2fZV01Wut1yzfKA==} + engines: {node: '>=14.16'} + dev: false + /parse-json/4.0.0: resolution: {integrity: sha512-aOIos8bujGN93/8Ox/jPLh7RwVnPEysynVFE+fQZyg6jKELEHwzgKdLRFHUgXJL6kylijVSBC4BvN9OmsB48Rw==} engines: {node: '>=4'} diff --git a/src/chatgpt-api.test.ts b/src/chatgpt-api.test.ts index 090afba..fd4bdd6 100644 --- a/src/chatgpt-api.test.ts +++ b/src/chatgpt-api.test.ts @@ -36,12 +36,14 @@ test('ChatGPTAPI valid session token', async (t) => { await t.notThrowsAsync( (async () => { - const api = new ChatGPTAPI({ sessionToken: process.env.SESSION_TOKEN }) + const chatgpt = new ChatGPTAPI({ + sessionToken: process.env.SESSION_TOKEN + }) // Don't make any real API calls using our session token if we're running on CI if (!isCI) { - await api.ensureAuth() - const response = await api.sendMessage('test') + await chatgpt.ensureAuth() + const response = await chatgpt.sendMessage('test') console.log('chatgpt response', response) t.truthy(response) @@ -68,3 +70,46 @@ if (!isCI) { ) }) } + +if (!isCI) { + test('ChatGPTAPI timeout', async (t) => { + t.timeout(30 * 1000) // 30 seconds + + await t.throwsAsync( + async () => { + const chatgpt = new ChatGPTAPI({ + sessionToken: process.env.SESSION_TOKEN + }) + + await chatgpt.sendMessage('test', { + timeoutMs: 1 + }) + }, + { + message: 'ChatGPT timed out waiting for response' + } + ) + }) + + test('ChatGPTAPI abort', async (t) => { + t.timeout(30 * 1000) // 30 seconds + + await t.throwsAsync( + async () => { + const chatgpt = new ChatGPTAPI({ + sessionToken: process.env.SESSION_TOKEN + }) + + const abortController = new AbortController() + setTimeout(() => abortController.abort(new Error('testing abort')), 10) + + await chatgpt.sendMessage('test', { + abortSignal: abortController.signal + }) + }, + { + message: 'testing abort' + } + ) + }) +} diff --git a/src/chatgpt-api.ts b/src/chatgpt-api.ts index bca3515..e02c811 100644 --- a/src/chatgpt-api.ts +++ b/src/chatgpt-api.ts @@ -1,4 +1,5 @@ import ExpiryMap from 'expiry-map' +import pTimeout, { TimeoutError } from 'p-timeout' import { v4 as uuidv4 } from 'uuid' import * as types from './types' @@ -18,8 +19,9 @@ export class ChatGPTAPI { protected _backendApiBaseUrl: string protected _userAgent: string - // stores access tokens for up to 10 seconds before needing to refresh - protected _accessTokenCache = new ExpiryMap(10 * 1000) + // Stores access tokens for `accessTokenTTL` milliseconds before needing to refresh + // (defaults to 60 seconds) + protected _accessTokenCache: ExpiryMap /** * Creates a new client wrapper around the unofficial ChatGPT REST API. @@ -28,6 +30,7 @@ export class ChatGPTAPI { * @param apiBaseUrl - Optional override; the base URL for ChatGPT webapp's API (`/api`) * @param backendApiBaseUrl - Optional override; the base URL for the ChatGPT backend API (`/backend-api`) * @param userAgent - Optional override; the `user-agent` header to use with ChatGPT requests + * @param accessTokenTTL - Optional override; how long in milliseconds access tokens should last before being forcefully refreshed */ constructor(opts: { sessionToken: string @@ -43,13 +46,17 @@ export class ChatGPTAPI { /** @defaultValue `'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/107.0.0.0 Safari/537.36'` **/ userAgent?: string + + /** @defaultValue 60000 (60 seconds) */ + accessTokenTTL?: number }) { const { sessionToken, markdown = true, apiBaseUrl = 'https://chat.openai.com/api', backendApiBaseUrl = 'https://chat.openai.com/backend-api', - userAgent = USER_AGENT + userAgent = USER_AGENT, + accessTokenTTL = 60000 // 60 seconds } = opts this._sessionToken = sessionToken @@ -58,31 +65,26 @@ export class ChatGPTAPI { this._backendApiBaseUrl = backendApiBaseUrl this._userAgent = userAgent + this._accessTokenCache = new ExpiryMap(accessTokenTTL) + if (!this._sessionToken) { throw new Error('ChatGPT invalid session token') } } - async getIsAuthenticated() { - try { - void (await this.refreshAccessToken()) - return true - } catch (err) { - return false - } - } - - async ensureAuth() { - return await this.refreshAccessToken() - } - /** * Sends a message to ChatGPT, waits for the response to resolve, and returns * the response. * + * If you want to receive a stream of partial responses, use `opts.onProgress`. + * If you want to receive the full response, including message and conversation IDs, + * you can use `opts.onConversationResponse` or use the `ChatGPTAPI.getConversation` + * helper. + * * @param message - The prompt message to send * @param opts.conversationId - Optional ID of a conversation to continue * @param opts.parentMessageId - Optional ID of the previous message in the conversation + * @param opts.timeoutMs - Optional timeout in milliseconds (defaults to no timeout) * @param opts.onProgress - Optional callback which will be invoked every time the partial response is updated * @param opts.onConversationResponse - Optional callback which will be invoked every time the partial response is updated with the full conversation response * @param opts.abortSignal - Optional callback used to abort the underlying `fetch` call using an [AbortController](https://developer.mozilla.org/en-US/docs/Web/API/AbortController) @@ -96,11 +98,19 @@ export class ChatGPTAPI { const { conversationId, parentMessageId = uuidv4(), + timeoutMs, onProgress, - onConversationResponse, - abortSignal + onConversationResponse } = opts + let { abortSignal } = opts + + let abortController: AbortController = null + if (timeoutMs && !abortSignal) { + abortController = new AbortController() + abortSignal = abortController.signal + } + const accessToken = await this.refreshAccessToken() const body: types.ConversationJSONBody = { @@ -124,14 +134,9 @@ export class ChatGPTAPI { } const url = `${this._backendApiBaseUrl}/conversation` - - // TODO: What's the best way to differentiate btwn wanting just the response text - // versus wanting the full response message, so you can extract the ID and other - // metadata? - // let fullResponse: types.Message = null let response = '' - return new Promise((resolve, reject) => { + const responseP = new Promise((resolve, reject) => { fetchSSE(url, { method: 'POST', headers: { @@ -164,7 +169,6 @@ export class ChatGPTAPI { } response = text - // fullResponse = message if (onProgress) { onProgress(text) @@ -178,8 +182,56 @@ export class ChatGPTAPI { } }).catch(reject) }) + + if (timeoutMs) { + if (abortController) { + // This will be called when a timeout occurs in order for us to forcibly + // ensure that the underlying HTTP request is aborted. + ;(responseP as any).cancel = () => { + abortController.abort() + } + } + + return pTimeout(responseP, { + milliseconds: timeoutMs, + message: 'ChatGPT timed out waiting for response' + }) + } else { + return responseP + } } + /** + * @returns `true` if the client has a valid acces token or `false` if refreshing + * the token fails. + */ + async getIsAuthenticated() { + try { + void (await this.refreshAccessToken()) + return true + } catch (err) { + return false + } + } + + /** + * Refreshes the client's access token which will succeed only if the session + * is still valid. + */ + async ensureAuth() { + return await this.refreshAccessToken() + } + + /** + * Attempts to refresh the current access token using the ChatGPT + * `sessionToken` cookie. + * + * Access tokens will be cached for up to `accessTokenTTL` milliseconds to + * prevent refreshing access tokens too frequently. + * + * @returns A valid access token + * @throws An error if refreshing the access token fails. + */ async refreshAccessToken(): Promise { const cachedAccessToken = this._accessTokenCache.get(KEY_ACCESS_TOKEN) if (cachedAccessToken) { diff --git a/src/fetch.ts b/src/fetch.ts index 308761c..8448010 100644 --- a/src/fetch.ts +++ b/src/fetch.ts @@ -2,8 +2,9 @@ // Use `undici` for node.js 16 and 17 // Use `fetch` for node.js >= 18 -// Use `fetch` for browsers -// Use `fetch` for all other environments +// Use `fetch` for all other environments, including browsers +// NOTE: The top-level await is removed in a `postbuild` npm script for the +// browser build const fetch = globalThis.fetch ?? ((await import('undici')).fetch as unknown as typeof globalThis.fetch) diff --git a/src/types.ts b/src/types.ts index 80e7004..ae73f6c 100644 --- a/src/types.ts +++ b/src/types.ts @@ -277,6 +277,7 @@ export type MessageMetadata = any export type SendMessageOptions = { conversationId?: string parentMessageId?: string + timeoutMs?: number onProgress?: (partialResponse: string) => void onConversationResponse?: (response: ConversationResponseEvent) => void abortSignal?: AbortSignal From 92318f1e49b1fe4dd59e1fbea5721084c110ba3c Mon Sep 17 00:00:00 2001 From: Travis Fischer Date: Tue, 6 Dec 2022 22:29:10 -0600 Subject: [PATCH 7/8] docs: updates --- readme.md | 46 ++++++++++++++++++++++++++++------------------ 1 file changed, 28 insertions(+), 18 deletions(-) diff --git a/readme.md b/readme.md index ff4bd76..3f8d795 100644 --- a/readme.md +++ b/readme.md @@ -14,9 +14,9 @@ - [Docs](#docs) - [Demos](#demos) - [Session Tokens](#session-tokens) +- [Projects](#projects) - [Compatibility](#compatibility) -- [Examples](#examples) -- [Credit](#credit) +- [Credits](#credits) - [License](#license) ## Intro @@ -83,6 +83,17 @@ const response1 = await conversation.sendMessage('Can you expand on that?') const response2 = await conversation.sendMessage('Oh cool; thank you') ``` +Sometimes, ChatGPT will hang for an extended period of time before sending it's response. This may be due to rate limiting or it may be due to OpenAI's servers being overloaded. + +To mitigate this issues, you can add a timeout like this: + +```ts +// timeout after 2 minutes (which will also abort the underlying HTTP request) +const response = await api.sendMessage('this is a timeout test', { + timeoutMs: 2 * 60 * 1000 +}) +``` +
Usage in CommonJS (Dynamic import) @@ -149,20 +160,7 @@ If you want to run the built-in demo, store this value as `SESSION_TOKEN` in a l > **Note** > Prior to v1.0.0, this package used a headless browser via [Playwright](https://playwright.dev/) to automate the web UI. Here are the [docs for the initial browser version](https://github.com/transitive-bullshit/chatgpt-api/tree/v0.4.2). -## Compatibility - -This package is ESM-only. It supports: - -- Node.js >= 16.8 - - If you need Node.js 14 support, use [`v1.4.0`](https://github.com/transitive-bullshit/chatgpt-api/releases/tag/v1.4.0) - - If you need CommonJS support, use [`v1.3.0`](https://github.com/transitive-bullshit/chatgpt-api/releases/tag/v1.3.0) -- Edge runtimes like CF workers and Vercel edge functions -- Modern browsers - - This is mainly intended for chrome extensions where your code is protected to a degree - - **We do not recommend using `chatgpt` from client-side browser code** because it would expose your private session token - - If you want to build a website with `chatgpt`, we recommend using it only from your backend API - -## Examples +## Projects All of these awesome projects are built using the `chatgpt` package. 🤯 @@ -183,7 +181,19 @@ All of these awesome projects are built using the `chatgpt` package. 🤯 If you create a cool integration, feel free to open a PR and add it to the list. -## Credit +## Compatibility + +This package is ESM-only. It supports: + +- Node.js >= 16.8 + - If you need Node.js 14 support, use [`v1.4.0`](https://github.com/transitive-bullshit/chatgpt-api/releases/tag/v1.4.0) +- Edge runtimes like CF workers and Vercel edge functions +- Modern browsers + - Mainly chrome extensions where your code is protected to a degree + - **We do not recommend using `chatgpt` from client-side browser code** because it would expose your private session token + - If you want to build a website using `chatgpt`, we recommend using it only from your backend API + +## Credits - Huge thanks to [@RomanHotsiy](https://github.com/RomanHotsiy), [@ElijahPepe](https://github.com/ElijahPepe), and all the other contributors 💪 - The original browser version was inspired by this [Go module](https://github.com/danielgross/whatsapp-gpt) by [Daniel Gross](https://github.com/danielgross) @@ -193,4 +203,4 @@ If you create a cool integration, feel free to open a PR and add it to the list. MIT © [Travis Fischer](https://transitivebullsh.it) -If you found this project interesting, please consider supporting my open source work by [sponsoring me](https://github.com/sponsors/transitive-bullshit) or following me on twitter twitter +If you found this project interesting, please consider [sponsoring me](https://github.com/sponsors/transitive-bullshit) or following me on twitter twitter From f723ae5c4d3a979565cf2eefeeeec9aaad37beec Mon Sep 17 00:00:00 2001 From: Travis Fischer Date: Tue, 6 Dec 2022 22:37:56 -0600 Subject: [PATCH 8/8] chore: update auto-generated docs --- docs/classes/ChatGPTAPI.md | 36 +++++++-- docs/classes/ChatGPTConversation.md | 10 +-- docs/modules.md | 47 +++++------ docs/readme.md | 119 +++++++++++++++++++++------- 4 files changed, 151 insertions(+), 61 deletions(-) diff --git a/docs/classes/ChatGPTAPI.md b/docs/classes/ChatGPTAPI.md index 61d3b44..e48818f 100644 --- a/docs/classes/ChatGPTAPI.md +++ b/docs/classes/ChatGPTAPI.md @@ -29,6 +29,7 @@ Creates a new client wrapper around the unofficial ChatGPT REST API. | Name | Type | Description | | :------ | :------ | :------ | | `opts` | `Object` | - | +| `opts.accessTokenTTL?` | `number` | **`Default Value`** 60000 (60 seconds) | | `opts.apiBaseUrl?` | `string` | **`Default Value`** `'https://chat.openai.com/api'` * | | `opts.backendApiBaseUrl?` | `string` | **`Default Value`** `'https://chat.openai.com/backend-api'` * | | `opts.markdown?` | `boolean` | **`Default Value`** `true` * | @@ -37,7 +38,7 @@ Creates a new client wrapper around the unofficial ChatGPT REST API. #### Defined in -[chatgpt-api.ts:32](https://github.com/transitive-bullshit/chatgpt-api/blob/8e045b2/src/chatgpt-api.ts#L32) +[chatgpt-api.ts:35](https://github.com/transitive-bullshit/chatgpt-api/blob/042b2fe/src/chatgpt-api.ts#L35) ## Methods @@ -45,13 +46,16 @@ Creates a new client wrapper around the unofficial ChatGPT REST API. ▸ **ensureAuth**(): `Promise`<`string`\> +Refreshes the client's access token which will succeed only if the session +is still valid. + #### Returns `Promise`<`string`\> #### Defined in -[chatgpt-api.ts:75](https://github.com/transitive-bullshit/chatgpt-api/blob/8e045b2/src/chatgpt-api.ts#L75) +[chatgpt-api.ts:221](https://github.com/transitive-bullshit/chatgpt-api/blob/042b2fe/src/chatgpt-api.ts#L221) ___ @@ -78,7 +82,7 @@ The new conversation instance #### Defined in -[chatgpt-api.ts:233](https://github.com/transitive-bullshit/chatgpt-api/blob/8e045b2/src/chatgpt-api.ts#L233) +[chatgpt-api.ts:285](https://github.com/transitive-bullshit/chatgpt-api/blob/042b2fe/src/chatgpt-api.ts#L285) ___ @@ -90,9 +94,12 @@ ___ `Promise`<`boolean`\> +`true` if the client has a valid acces token or `false` if refreshing +the token fails. + #### Defined in -[chatgpt-api.ts:66](https://github.com/transitive-bullshit/chatgpt-api/blob/8e045b2/src/chatgpt-api.ts#L66) +[chatgpt-api.ts:208](https://github.com/transitive-bullshit/chatgpt-api/blob/042b2fe/src/chatgpt-api.ts#L208) ___ @@ -100,13 +107,25 @@ ___ ▸ **refreshAccessToken**(): `Promise`<`string`\> +Attempts to refresh the current access token using the ChatGPT +`sessionToken` cookie. + +Access tokens will be cached for up to `accessTokenTTL` milliseconds to +prevent refreshing access tokens too frequently. + +**`Throws`** + +An error if refreshing the access token fails. + #### Returns `Promise`<`string`\> +A valid access token + #### Defined in -[chatgpt-api.ts:183](https://github.com/transitive-bullshit/chatgpt-api/blob/8e045b2/src/chatgpt-api.ts#L183) +[chatgpt-api.ts:235](https://github.com/transitive-bullshit/chatgpt-api/blob/042b2fe/src/chatgpt-api.ts#L235) ___ @@ -117,6 +136,11 @@ ___ Sends a message to ChatGPT, waits for the response to resolve, and returns the response. +If you want to receive a stream of partial responses, use `opts.onProgress`. +If you want to receive the full response, including message and conversation IDs, +you can use `opts.onConversationResponse` or use the `ChatGPTAPI.getConversation` +helper. + #### Parameters | Name | Type | Description | @@ -132,4 +156,4 @@ The response from ChatGPT #### Defined in -[chatgpt-api.ts:92](https://github.com/transitive-bullshit/chatgpt-api/blob/8e045b2/src/chatgpt-api.ts#L92) +[chatgpt-api.ts:94](https://github.com/transitive-bullshit/chatgpt-api/blob/042b2fe/src/chatgpt-api.ts#L94) diff --git a/docs/classes/ChatGPTConversation.md b/docs/classes/ChatGPTConversation.md index 5c7b81b..1407648 100644 --- a/docs/classes/ChatGPTConversation.md +++ b/docs/classes/ChatGPTConversation.md @@ -41,7 +41,7 @@ Creates a new conversation wrapper around the ChatGPT API. #### Defined in -[chatgpt-conversation.ts:21](https://github.com/transitive-bullshit/chatgpt-api/blob/8e045b2/src/chatgpt-conversation.ts#L21) +[chatgpt-conversation.ts:21](https://github.com/transitive-bullshit/chatgpt-api/blob/042b2fe/src/chatgpt-conversation.ts#L21) ## Properties @@ -51,7 +51,7 @@ Creates a new conversation wrapper around the ChatGPT API. #### Defined in -[chatgpt-conversation.ts:10](https://github.com/transitive-bullshit/chatgpt-api/blob/8e045b2/src/chatgpt-conversation.ts#L10) +[chatgpt-conversation.ts:10](https://github.com/transitive-bullshit/chatgpt-api/blob/042b2fe/src/chatgpt-conversation.ts#L10) ___ @@ -61,7 +61,7 @@ ___ #### Defined in -[chatgpt-conversation.ts:11](https://github.com/transitive-bullshit/chatgpt-api/blob/8e045b2/src/chatgpt-conversation.ts#L11) +[chatgpt-conversation.ts:11](https://github.com/transitive-bullshit/chatgpt-api/blob/042b2fe/src/chatgpt-conversation.ts#L11) ___ @@ -71,7 +71,7 @@ ___ #### Defined in -[chatgpt-conversation.ts:12](https://github.com/transitive-bullshit/chatgpt-api/blob/8e045b2/src/chatgpt-conversation.ts#L12) +[chatgpt-conversation.ts:12](https://github.com/transitive-bullshit/chatgpt-api/blob/042b2fe/src/chatgpt-conversation.ts#L12) ## Methods @@ -104,4 +104,4 @@ The response from ChatGPT #### Defined in -[chatgpt-conversation.ts:48](https://github.com/transitive-bullshit/chatgpt-api/blob/8e045b2/src/chatgpt-conversation.ts#L48) +[chatgpt-conversation.ts:48](https://github.com/transitive-bullshit/chatgpt-api/blob/042b2fe/src/chatgpt-conversation.ts#L48) diff --git a/docs/modules.md b/docs/modules.md index c9c4e03..5a50b73 100644 --- a/docs/modules.md +++ b/docs/modules.md @@ -46,7 +46,7 @@ #### Defined in -[types.ts:109](https://github.com/transitive-bullshit/chatgpt-api/blob/8e045b2/src/types.ts#L109) +[types.ts:109](https://github.com/transitive-bullshit/chatgpt-api/blob/042b2fe/src/types.ts#L109) ___ @@ -56,7 +56,7 @@ ___ #### Defined in -[types.ts:1](https://github.com/transitive-bullshit/chatgpt-api/blob/8e045b2/src/types.ts#L1) +[types.ts:1](https://github.com/transitive-bullshit/chatgpt-api/blob/042b2fe/src/types.ts#L1) ___ @@ -78,7 +78,7 @@ https://chat.openapi.com/backend-api/conversation #### Defined in -[types.ts:134](https://github.com/transitive-bullshit/chatgpt-api/blob/8e045b2/src/types.ts#L134) +[types.ts:134](https://github.com/transitive-bullshit/chatgpt-api/blob/042b2fe/src/types.ts#L134) ___ @@ -96,7 +96,7 @@ ___ #### Defined in -[types.ts:251](https://github.com/transitive-bullshit/chatgpt-api/blob/8e045b2/src/types.ts#L251) +[types.ts:251](https://github.com/transitive-bullshit/chatgpt-api/blob/042b2fe/src/types.ts#L251) ___ @@ -121,7 +121,7 @@ ___ #### Defined in -[types.ts:257](https://github.com/transitive-bullshit/chatgpt-api/blob/8e045b2/src/types.ts#L257) +[types.ts:257](https://github.com/transitive-bullshit/chatgpt-api/blob/042b2fe/src/types.ts#L257) ___ @@ -138,7 +138,7 @@ ___ #### Defined in -[types.ts:270](https://github.com/transitive-bullshit/chatgpt-api/blob/8e045b2/src/types.ts#L270) +[types.ts:270](https://github.com/transitive-bullshit/chatgpt-api/blob/042b2fe/src/types.ts#L270) ___ @@ -160,7 +160,7 @@ https://chat.openapi.com/backend-api/conversation/message_feedback #### Defined in -[types.ts:193](https://github.com/transitive-bullshit/chatgpt-api/blob/8e045b2/src/types.ts#L193) +[types.ts:193](https://github.com/transitive-bullshit/chatgpt-api/blob/042b2fe/src/types.ts#L193) ___ @@ -170,7 +170,7 @@ ___ #### Defined in -[types.ts:249](https://github.com/transitive-bullshit/chatgpt-api/blob/8e045b2/src/types.ts#L249) +[types.ts:249](https://github.com/transitive-bullshit/chatgpt-api/blob/042b2fe/src/types.ts#L249) ___ @@ -190,7 +190,7 @@ ___ #### Defined in -[types.ts:222](https://github.com/transitive-bullshit/chatgpt-api/blob/8e045b2/src/types.ts#L222) +[types.ts:222](https://github.com/transitive-bullshit/chatgpt-api/blob/042b2fe/src/types.ts#L222) ___ @@ -200,7 +200,7 @@ ___ #### Defined in -[types.ts:220](https://github.com/transitive-bullshit/chatgpt-api/blob/8e045b2/src/types.ts#L220) +[types.ts:220](https://github.com/transitive-bullshit/chatgpt-api/blob/042b2fe/src/types.ts#L220) ___ @@ -210,7 +210,7 @@ ___ #### Defined in -[types.ts:275](https://github.com/transitive-bullshit/chatgpt-api/blob/8e045b2/src/types.ts#L275) +[types.ts:275](https://github.com/transitive-bullshit/chatgpt-api/blob/042b2fe/src/types.ts#L275) ___ @@ -228,7 +228,7 @@ ___ #### Defined in -[types.ts:77](https://github.com/transitive-bullshit/chatgpt-api/blob/8e045b2/src/types.ts#L77) +[types.ts:77](https://github.com/transitive-bullshit/chatgpt-api/blob/042b2fe/src/types.ts#L77) ___ @@ -246,7 +246,7 @@ https://chat.openapi.com/backend-api/models #### Defined in -[types.ts:70](https://github.com/transitive-bullshit/chatgpt-api/blob/8e045b2/src/types.ts#L70) +[types.ts:70](https://github.com/transitive-bullshit/chatgpt-api/blob/042b2fe/src/types.ts#L70) ___ @@ -265,7 +265,7 @@ https://chat.openapi.com/backend-api/moderations #### Defined in -[types.ts:97](https://github.com/transitive-bullshit/chatgpt-api/blob/8e045b2/src/types.ts#L97) +[types.ts:97](https://github.com/transitive-bullshit/chatgpt-api/blob/042b2fe/src/types.ts#L97) ___ @@ -285,7 +285,7 @@ https://chat.openapi.com/backend-api/moderations #### Defined in -[types.ts:114](https://github.com/transitive-bullshit/chatgpt-api/blob/8e045b2/src/types.ts#L114) +[types.ts:114](https://github.com/transitive-bullshit/chatgpt-api/blob/042b2fe/src/types.ts#L114) ___ @@ -303,7 +303,7 @@ ___ #### Defined in -[types.ts:161](https://github.com/transitive-bullshit/chatgpt-api/blob/8e045b2/src/types.ts#L161) +[types.ts:161](https://github.com/transitive-bullshit/chatgpt-api/blob/042b2fe/src/types.ts#L161) ___ @@ -320,7 +320,7 @@ ___ #### Defined in -[types.ts:178](https://github.com/transitive-bullshit/chatgpt-api/blob/8e045b2/src/types.ts#L178) +[types.ts:178](https://github.com/transitive-bullshit/chatgpt-api/blob/042b2fe/src/types.ts#L178) ___ @@ -330,7 +330,7 @@ ___ #### Defined in -[types.ts:3](https://github.com/transitive-bullshit/chatgpt-api/blob/8e045b2/src/types.ts#L3) +[types.ts:3](https://github.com/transitive-bullshit/chatgpt-api/blob/042b2fe/src/types.ts#L3) ___ @@ -340,7 +340,7 @@ ___ #### Defined in -[types.ts:285](https://github.com/transitive-bullshit/chatgpt-api/blob/8e045b2/src/types.ts#L285) +[types.ts:286](https://github.com/transitive-bullshit/chatgpt-api/blob/042b2fe/src/types.ts#L286) ___ @@ -357,10 +357,11 @@ ___ | `onConversationResponse?` | (`response`: [`ConversationResponseEvent`](modules.md#conversationresponseevent)) => `void` | | `onProgress?` | (`partialResponse`: `string`) => `void` | | `parentMessageId?` | `string` | +| `timeoutMs?` | `number` | #### Defined in -[types.ts:277](https://github.com/transitive-bullshit/chatgpt-api/blob/8e045b2/src/types.ts#L277) +[types.ts:277](https://github.com/transitive-bullshit/chatgpt-api/blob/042b2fe/src/types.ts#L277) ___ @@ -381,7 +382,7 @@ https://chat.openapi.com/api/auth/session #### Defined in -[types.ts:8](https://github.com/transitive-bullshit/chatgpt-api/blob/8e045b2/src/types.ts#L8) +[types.ts:8](https://github.com/transitive-bullshit/chatgpt-api/blob/042b2fe/src/types.ts#L8) ___ @@ -403,7 +404,7 @@ ___ #### Defined in -[types.ts:30](https://github.com/transitive-bullshit/chatgpt-api/blob/8e045b2/src/types.ts#L30) +[types.ts:30](https://github.com/transitive-bullshit/chatgpt-api/blob/042b2fe/src/types.ts#L30) ## Functions @@ -423,4 +424,4 @@ ___ #### Defined in -[utils.ts:4](https://github.com/transitive-bullshit/chatgpt-api/blob/8e045b2/src/utils.ts#L4) +[utils.ts:4](https://github.com/transitive-bullshit/chatgpt-api/blob/042b2fe/src/utils.ts#L4) diff --git a/docs/readme.md b/docs/readme.md index f5b28ae..1be5668 100644 --- a/docs/readme.md +++ b/docs/readme.md @@ -13,11 +13,12 @@ chatgpt / [Exports](modules.md) - [Intro](#intro) - [Install](#install) - [Usage](#usage) -- [Docs](#docs) -- [How it works](#how-it-works) + - [Docs](#docs) + - [Demos](#demos) + - [Session Tokens](#session-tokens) +- [Projects](#projects) - [Compatibility](#compatibility) -- [Examples](#examples) -- [Credit](#credit) +- [Credits](#credits) - [License](#license) ## Intro @@ -39,7 +40,9 @@ import { ChatGPTAPI } from 'chatgpt' async function example() { // sessionToken is required; see below for details - const api = new ChatGPTAPI({ sessionToken: process.env.SESSION_TOKEN }) + const api = new ChatGPTAPI({ + sessionToken: process.env.SESSION_TOKEN + }) // ensure the API is properly authenticated await api.ensureAuth() @@ -54,7 +57,7 @@ async function example() { } ``` -By default, the response will be formatted as markdown. If you want to work with plaintext only, you can use: +The default ChatGPT responses are formatted as markdown. If you want to work with plaintext only, you can use: ```ts const api = new ChatGPTAPI({ @@ -63,7 +66,63 @@ const api = new ChatGPTAPI({ }) ``` -A full [demo](./src/demo.ts) is included for testing purposes: +If you want to automatically track the conversation, you can use `ChatGPTAPI.getConversation()`: + +```ts +const api = new ChatGPTAPI({ + sessionToken: process.env.SESSION_TOKEN +}) + +const conversation = api.getConversation() + +// send a message and wait for the response +const response0 = await conversation.sendMessage('What is OpenAI?') + +// send a follow-up prompt to the previous message and wait for the response +const response1 = await conversation.sendMessage('Can you expand on that?') + +// send another follow-up to the same conversation +const response2 = await conversation.sendMessage('Oh cool; thank you') +``` + +Sometimes, ChatGPT will hang for an extended period of time before sending it's response. This may be due to rate limiting or it may be due to OpenAI's servers being overloaded. + +To mitigate this issues, you can add a timeout like this: + +```ts +// timeout after 2 minutes (which will also abort the underlying HTTP request) +const response = await api.sendMessage('this is a timeout test', { + timeoutMs: 2 * 60 * 1000 +}) +``` + +
+Usage in CommonJS (Dynamic import) + +```js +async function example() { + // To use ESM in CommonJS, you can use a dynamic import + const { ChatGPTAPI } = await import('chatgpt') + + const api = new ChatGPTAPI({ + sessionToken: process.env.SESSION_TOKEN + }) + await api.ensureAuth() + + const response = await api.sendMessage('Hello World!') + console.log(response) +} +``` + +
+ +### Docs + +See the [auto-generated docs](./docs/classes/ChatGPTAPI.md) for more info on methods and parameters. + +### Demos + +A [basic demo](./src/demo.ts) is included for testing purposes: ```bash # 1. clone repo @@ -73,11 +132,17 @@ A full [demo](./src/demo.ts) is included for testing purposes: npx tsx src/demo.ts ``` -## Docs +A [conversation demo](./src/demo-conversation.ts) is also included: -See the [auto-generated docs](./docs/classes/ChatGPTAPI.md) for more info on methods and parameters. +```bash +# 1. clone repo +# 2. install node deps +# 3. set `SESSION_TOKEN` in .env +# 4. run: +npx tsx src/demo-conversation.ts +``` -## How it works +### Session Tokens **This package requires a valid session token from ChatGPT to access it's unofficial REST API.** @@ -97,20 +162,7 @@ If you want to run the built-in demo, store this value as `SESSION_TOKEN` in a l > **Note** > Prior to v1.0.0, this package used a headless browser via [Playwright](https://playwright.dev/) to automate the web UI. Here are the [docs for the initial browser version](https://github.com/transitive-bullshit/chatgpt-api/tree/v0.4.2). -## Compatibility - -This package is ESM-only. It supports: - -- Node.js >= 16.8 - - If you need Node.js 14 support, use [`v1.4.0`](https://github.com/transitive-bullshit/chatgpt-api/releases/tag/v1.4.0) - - If you need CommonJS support, use [`v1.3.0`](https://github.com/transitive-bullshit/chatgpt-api/releases/tag/v1.3.0) -- Edge runtimes like CF workers and Vercel edge functions -- Modern browsers - - This is mainly intended for chrome extensions where your code is protected to a degree - - **We do not recommend using `chatgpt` from client-side browser code** because it would expose your private session token - - If you want to build a website with `chatgpt`, we recommend using it only from your backend API - -## Examples +## Projects All of these awesome projects are built using the `chatgpt` package. 🤯 @@ -118,7 +170,8 @@ All of these awesome projects are built using the `chatgpt` package. 🤯 - Mention [@ChatGPTBot](https://twitter.com/ChatGPTBot) on Twitter with your prompt to try it out - [Chrome Extension](https://github.com/gragland/chatgpt-everywhere) ([demo](https://twitter.com/gabe_ragland/status/1599466486422470656)) - [VSCode Extension #1](https://github.com/mpociot/chatgpt-vscode) ([demo](https://twitter.com/marcelpociot/status/1599180144551526400)) -- [VSCode Extension #2](https://github.com/barnesoir/chatgpt-vscode-plugin) +- [VSCode Extension #2](https://github.com/barnesoir/chatgpt-vscode-plugin) ([marketplace](https://marketplace.visualstudio.com/items?itemName=JayBarnes.chatgpt-vscode-plugin)) +- [VSCode Extension #3](https://github.com/gencay/vscode-chatgpt) ([marketplace](https://marketplace.visualstudio.com/items?itemName=gencay.vscode-chatgpt)) - [Raycast Extension](https://github.com/abielzulio/chatgpt-raycast) ([demo](https://twitter.com/abielzulio/status/1600176002042191875)) - [Go Telegram Bot](https://github.com/m1guelpf/chatgpt-telegram) - [GitHub ProBot](https://github.com/oceanlvr/ChatGPTBot) @@ -130,7 +183,19 @@ All of these awesome projects are built using the `chatgpt` package. 🤯 If you create a cool integration, feel free to open a PR and add it to the list. -## Credit +## Compatibility + +This package is ESM-only. It supports: + +- Node.js >= 16.8 + - If you need Node.js 14 support, use [`v1.4.0`](https://github.com/transitive-bullshit/chatgpt-api/releases/tag/v1.4.0) +- Edge runtimes like CF workers and Vercel edge functions +- Modern browsers + - Mainly chrome extensions where your code is protected to a degree + - **We do not recommend using `chatgpt` from client-side browser code** because it would expose your private session token + - If you want to build a website using `chatgpt`, we recommend using it only from your backend API + +## Credits - Huge thanks to [@RomanHotsiy](https://github.com/RomanHotsiy), [@ElijahPepe](https://github.com/ElijahPepe), and all the other contributors 💪 - The original browser version was inspired by this [Go module](https://github.com/danielgross/whatsapp-gpt) by [Daniel Gross](https://github.com/danielgross) @@ -140,4 +205,4 @@ If you create a cool integration, feel free to open a PR and add it to the list. MIT © [Travis Fischer](https://transitivebullsh.it) -If you found this project interesting, please consider supporting my open source work by [sponsoring me](https://github.com/sponsors/transitive-bullshit) or following me on twitter twitter +If you found this project interesting, please consider [sponsoring me](https://github.com/sponsors/transitive-bullshit) or following me on twitter twitter