diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml
index 168526b..73af79e 100644
--- a/.github/workflows/test.yml
+++ b/.github/workflows/test.yml
@@ -12,7 +12,6 @@ jobs:
node-version:
- 18
- 16
- - 14
steps:
- name: Checkout
diff --git a/docs/classes/ChatGPTAPI.md b/docs/classes/ChatGPTAPI.md
index 35c6b7a..e48818f 100644
--- a/docs/classes/ChatGPTAPI.md
+++ b/docs/classes/ChatGPTAPI.md
@@ -11,6 +11,7 @@
### Methods
- [ensureAuth](ChatGPTAPI.md#ensureauth)
+- [getConversation](ChatGPTAPI.md#getconversation)
- [getIsAuthenticated](ChatGPTAPI.md#getisauthenticated)
- [refreshAccessToken](ChatGPTAPI.md#refreshaccesstoken)
- [sendMessage](ChatGPTAPI.md#sendmessage)
@@ -28,6 +29,7 @@ Creates a new client wrapper around the unofficial ChatGPT REST API.
| Name | Type | Description |
| :------ | :------ | :------ |
| `opts` | `Object` | - |
+| `opts.accessTokenTTL?` | `number` | **`Default Value`** 60000 (60 seconds) |
| `opts.apiBaseUrl?` | `string` | **`Default Value`** `'https://chat.openai.com/api'` * |
| `opts.backendApiBaseUrl?` | `string` | **`Default Value`** `'https://chat.openai.com/backend-api'` * |
| `opts.markdown?` | `boolean` | **`Default Value`** `true` * |
@@ -36,7 +38,7 @@ Creates a new client wrapper around the unofficial ChatGPT REST API.
#### Defined in
-[chatgpt-api.ts:31](https://github.com/transitive-bullshit/chatgpt-api/blob/80d77a3/src/chatgpt-api.ts#L31)
+[chatgpt-api.ts:35](https://github.com/transitive-bullshit/chatgpt-api/blob/042b2fe/src/chatgpt-api.ts#L35)
## Methods
@@ -44,13 +46,43 @@ Creates a new client wrapper around the unofficial ChatGPT REST API.
▸ **ensureAuth**(): `Promise`<`string`\>
+Refreshes the client's access token which will succeed only if the session
+is still valid.
+
#### Returns
`Promise`<`string`\>
#### Defined in
-[chatgpt-api.ts:74](https://github.com/transitive-bullshit/chatgpt-api/blob/80d77a3/src/chatgpt-api.ts#L74)
+[chatgpt-api.ts:221](https://github.com/transitive-bullshit/chatgpt-api/blob/042b2fe/src/chatgpt-api.ts#L221)
+
+___
+
+### getConversation
+
+▸ **getConversation**(`opts?`): [`ChatGPTConversation`](ChatGPTConversation.md)
+
+Gets a new ChatGPTConversation instance, which can be used to send multiple
+messages as part of a single conversation.
+
+#### Parameters
+
+| Name | Type | Description |
+| :------ | :------ | :------ |
+| `opts` | `Object` | - |
+| `opts.conversationId?` | `string` | Optional ID of the previous message in a conversation |
+| `opts.parentMessageId?` | `string` | Optional ID of the previous message in a conversation |
+
+#### Returns
+
+[`ChatGPTConversation`](ChatGPTConversation.md)
+
+The new conversation instance
+
+#### Defined in
+
+[chatgpt-api.ts:285](https://github.com/transitive-bullshit/chatgpt-api/blob/042b2fe/src/chatgpt-api.ts#L285)
___
@@ -62,9 +94,12 @@ ___
`Promise`<`boolean`\>
+`true` if the client has a valid acces token or `false` if refreshing
+the token fails.
+
#### Defined in
-[chatgpt-api.ts:65](https://github.com/transitive-bullshit/chatgpt-api/blob/80d77a3/src/chatgpt-api.ts#L65)
+[chatgpt-api.ts:208](https://github.com/transitive-bullshit/chatgpt-api/blob/042b2fe/src/chatgpt-api.ts#L208)
___
@@ -72,13 +107,25 @@ ___
▸ **refreshAccessToken**(): `Promise`<`string`\>
+Attempts to refresh the current access token using the ChatGPT
+`sessionToken` cookie.
+
+Access tokens will be cached for up to `accessTokenTTL` milliseconds to
+prevent refreshing access tokens too frequently.
+
+**`Throws`**
+
+An error if refreshing the access token fails.
+
#### Returns
`Promise`<`string`\>
+A valid access token
+
#### Defined in
-[chatgpt-api.ts:165](https://github.com/transitive-bullshit/chatgpt-api/blob/80d77a3/src/chatgpt-api.ts#L165)
+[chatgpt-api.ts:235](https://github.com/transitive-bullshit/chatgpt-api/blob/042b2fe/src/chatgpt-api.ts#L235)
___
@@ -89,19 +136,24 @@ ___
Sends a message to ChatGPT, waits for the response to resolve, and returns
the response.
+If you want to receive a stream of partial responses, use `opts.onProgress`.
+If you want to receive the full response, including message and conversation IDs,
+you can use `opts.onConversationResponse` or use the `ChatGPTAPI.getConversation`
+helper.
+
#### Parameters
| Name | Type | Description |
| :------ | :------ | :------ |
-| `message` | `string` | The plaintext message to send. |
-| `opts` | `Object` | - |
-| `opts.conversationId?` | `string` | Optional ID of the previous message in a conversation |
-| `opts.onProgress?` | (`partialResponse`: `string`) => `void` | - |
+| `message` | `string` | The prompt message to send |
+| `opts` | [`SendMessageOptions`](../modules.md#sendmessageoptions) | - |
#### Returns
`Promise`<`string`\>
+The response from ChatGPT
+
#### Defined in
-[chatgpt-api.ts:86](https://github.com/transitive-bullshit/chatgpt-api/blob/80d77a3/src/chatgpt-api.ts#L86)
+[chatgpt-api.ts:94](https://github.com/transitive-bullshit/chatgpt-api/blob/042b2fe/src/chatgpt-api.ts#L94)
diff --git a/docs/classes/ChatGPTConversation.md b/docs/classes/ChatGPTConversation.md
new file mode 100644
index 0000000..1407648
--- /dev/null
+++ b/docs/classes/ChatGPTConversation.md
@@ -0,0 +1,107 @@
+[chatgpt](../readme.md) / [Exports](../modules.md) / ChatGPTConversation
+
+# Class: ChatGPTConversation
+
+A conversation wrapper around the ChatGPTAPI. This allows you to send
+multiple messages to ChatGPT and receive responses, without having to
+manually pass the conversation ID and parent message ID for each message.
+
+## Table of contents
+
+### Constructors
+
+- [constructor](ChatGPTConversation.md#constructor)
+
+### Properties
+
+- [api](ChatGPTConversation.md#api)
+- [conversationId](ChatGPTConversation.md#conversationid)
+- [parentMessageId](ChatGPTConversation.md#parentmessageid)
+
+### Methods
+
+- [sendMessage](ChatGPTConversation.md#sendmessage)
+
+## Constructors
+
+### constructor
+
+• **new ChatGPTConversation**(`api`, `opts?`)
+
+Creates a new conversation wrapper around the ChatGPT API.
+
+#### Parameters
+
+| Name | Type | Description |
+| :------ | :------ | :------ |
+| `api` | [`ChatGPTAPI`](ChatGPTAPI.md) | The ChatGPT API instance to use |
+| `opts` | `Object` | - |
+| `opts.conversationId?` | `string` | Optional ID of a conversation to continue |
+| `opts.parentMessageId?` | `string` | Optional ID of the previous message in the conversation |
+
+#### Defined in
+
+[chatgpt-conversation.ts:21](https://github.com/transitive-bullshit/chatgpt-api/blob/042b2fe/src/chatgpt-conversation.ts#L21)
+
+## Properties
+
+### api
+
+• **api**: [`ChatGPTAPI`](ChatGPTAPI.md)
+
+#### Defined in
+
+[chatgpt-conversation.ts:10](https://github.com/transitive-bullshit/chatgpt-api/blob/042b2fe/src/chatgpt-conversation.ts#L10)
+
+___
+
+### conversationId
+
+• **conversationId**: `string` = `undefined`
+
+#### Defined in
+
+[chatgpt-conversation.ts:11](https://github.com/transitive-bullshit/chatgpt-api/blob/042b2fe/src/chatgpt-conversation.ts#L11)
+
+___
+
+### parentMessageId
+
+• **parentMessageId**: `string` = `undefined`
+
+#### Defined in
+
+[chatgpt-conversation.ts:12](https://github.com/transitive-bullshit/chatgpt-api/blob/042b2fe/src/chatgpt-conversation.ts#L12)
+
+## Methods
+
+### sendMessage
+
+▸ **sendMessage**(`message`, `opts?`): `Promise`<`string`\>
+
+Sends a message to ChatGPT, waits for the response to resolve, and returns
+the response.
+
+If this is the first message in the conversation, the conversation ID and
+parent message ID will be automatically set.
+
+This allows you to send multiple messages to ChatGPT and receive responses,
+without having to manually pass the conversation ID and parent message ID
+for each message.
+
+#### Parameters
+
+| Name | Type | Description |
+| :------ | :------ | :------ |
+| `message` | `string` | The prompt message to send |
+| `opts` | [`SendConversationMessageOptions`](../modules.md#sendconversationmessageoptions) | - |
+
+#### Returns
+
+`Promise`<`string`\>
+
+The response from ChatGPT
+
+#### Defined in
+
+[chatgpt-conversation.ts:48](https://github.com/transitive-bullshit/chatgpt-api/blob/042b2fe/src/chatgpt-conversation.ts#L48)
diff --git a/docs/modules.md b/docs/modules.md
index 9b0f344..5a50b73 100644
--- a/docs/modules.md
+++ b/docs/modules.md
@@ -7,6 +7,7 @@
### Classes
- [ChatGPTAPI](classes/ChatGPTAPI.md)
+- [ChatGPTConversation](classes/ChatGPTConversation.md)
### Type Aliases
@@ -28,6 +29,8 @@
- [Prompt](modules.md#prompt)
- [PromptContent](modules.md#promptcontent)
- [Role](modules.md#role)
+- [SendConversationMessageOptions](modules.md#sendconversationmessageoptions)
+- [SendMessageOptions](modules.md#sendmessageoptions)
- [SessionResult](modules.md#sessionresult)
- [User](modules.md#user)
@@ -43,7 +46,7 @@
#### Defined in
-[types.ts:109](https://github.com/transitive-bullshit/chatgpt-api/blob/80d77a3/src/types.ts#L109)
+[types.ts:109](https://github.com/transitive-bullshit/chatgpt-api/blob/042b2fe/src/types.ts#L109)
___
@@ -53,7 +56,7 @@ ___
#### Defined in
-[types.ts:1](https://github.com/transitive-bullshit/chatgpt-api/blob/80d77a3/src/types.ts#L1)
+[types.ts:1](https://github.com/transitive-bullshit/chatgpt-api/blob/042b2fe/src/types.ts#L1)
___
@@ -75,7 +78,7 @@ https://chat.openapi.com/backend-api/conversation
#### Defined in
-[types.ts:134](https://github.com/transitive-bullshit/chatgpt-api/blob/80d77a3/src/types.ts#L134)
+[types.ts:134](https://github.com/transitive-bullshit/chatgpt-api/blob/042b2fe/src/types.ts#L134)
___
@@ -93,7 +96,7 @@ ___
#### Defined in
-[types.ts:251](https://github.com/transitive-bullshit/chatgpt-api/blob/80d77a3/src/types.ts#L251)
+[types.ts:251](https://github.com/transitive-bullshit/chatgpt-api/blob/042b2fe/src/types.ts#L251)
___
@@ -118,7 +121,7 @@ ___
#### Defined in
-[types.ts:257](https://github.com/transitive-bullshit/chatgpt-api/blob/80d77a3/src/types.ts#L257)
+[types.ts:257](https://github.com/transitive-bullshit/chatgpt-api/blob/042b2fe/src/types.ts#L257)
___
@@ -135,7 +138,7 @@ ___
#### Defined in
-[types.ts:270](https://github.com/transitive-bullshit/chatgpt-api/blob/80d77a3/src/types.ts#L270)
+[types.ts:270](https://github.com/transitive-bullshit/chatgpt-api/blob/042b2fe/src/types.ts#L270)
___
@@ -157,7 +160,7 @@ https://chat.openapi.com/backend-api/conversation/message_feedback
#### Defined in
-[types.ts:193](https://github.com/transitive-bullshit/chatgpt-api/blob/80d77a3/src/types.ts#L193)
+[types.ts:193](https://github.com/transitive-bullshit/chatgpt-api/blob/042b2fe/src/types.ts#L193)
___
@@ -167,7 +170,7 @@ ___
#### Defined in
-[types.ts:249](https://github.com/transitive-bullshit/chatgpt-api/blob/80d77a3/src/types.ts#L249)
+[types.ts:249](https://github.com/transitive-bullshit/chatgpt-api/blob/042b2fe/src/types.ts#L249)
___
@@ -187,7 +190,7 @@ ___
#### Defined in
-[types.ts:222](https://github.com/transitive-bullshit/chatgpt-api/blob/80d77a3/src/types.ts#L222)
+[types.ts:222](https://github.com/transitive-bullshit/chatgpt-api/blob/042b2fe/src/types.ts#L222)
___
@@ -197,7 +200,7 @@ ___
#### Defined in
-[types.ts:220](https://github.com/transitive-bullshit/chatgpt-api/blob/80d77a3/src/types.ts#L220)
+[types.ts:220](https://github.com/transitive-bullshit/chatgpt-api/blob/042b2fe/src/types.ts#L220)
___
@@ -207,7 +210,7 @@ ___
#### Defined in
-[types.ts:275](https://github.com/transitive-bullshit/chatgpt-api/blob/80d77a3/src/types.ts#L275)
+[types.ts:275](https://github.com/transitive-bullshit/chatgpt-api/blob/042b2fe/src/types.ts#L275)
___
@@ -225,7 +228,7 @@ ___
#### Defined in
-[types.ts:77](https://github.com/transitive-bullshit/chatgpt-api/blob/80d77a3/src/types.ts#L77)
+[types.ts:77](https://github.com/transitive-bullshit/chatgpt-api/blob/042b2fe/src/types.ts#L77)
___
@@ -243,7 +246,7 @@ https://chat.openapi.com/backend-api/models
#### Defined in
-[types.ts:70](https://github.com/transitive-bullshit/chatgpt-api/blob/80d77a3/src/types.ts#L70)
+[types.ts:70](https://github.com/transitive-bullshit/chatgpt-api/blob/042b2fe/src/types.ts#L70)
___
@@ -262,7 +265,7 @@ https://chat.openapi.com/backend-api/moderations
#### Defined in
-[types.ts:97](https://github.com/transitive-bullshit/chatgpt-api/blob/80d77a3/src/types.ts#L97)
+[types.ts:97](https://github.com/transitive-bullshit/chatgpt-api/blob/042b2fe/src/types.ts#L97)
___
@@ -282,7 +285,7 @@ https://chat.openapi.com/backend-api/moderations
#### Defined in
-[types.ts:114](https://github.com/transitive-bullshit/chatgpt-api/blob/80d77a3/src/types.ts#L114)
+[types.ts:114](https://github.com/transitive-bullshit/chatgpt-api/blob/042b2fe/src/types.ts#L114)
___
@@ -300,7 +303,7 @@ ___
#### Defined in
-[types.ts:161](https://github.com/transitive-bullshit/chatgpt-api/blob/80d77a3/src/types.ts#L161)
+[types.ts:161](https://github.com/transitive-bullshit/chatgpt-api/blob/042b2fe/src/types.ts#L161)
___
@@ -317,7 +320,7 @@ ___
#### Defined in
-[types.ts:178](https://github.com/transitive-bullshit/chatgpt-api/blob/80d77a3/src/types.ts#L178)
+[types.ts:178](https://github.com/transitive-bullshit/chatgpt-api/blob/042b2fe/src/types.ts#L178)
___
@@ -327,7 +330,38 @@ ___
#### Defined in
-[types.ts:3](https://github.com/transitive-bullshit/chatgpt-api/blob/80d77a3/src/types.ts#L3)
+[types.ts:3](https://github.com/transitive-bullshit/chatgpt-api/blob/042b2fe/src/types.ts#L3)
+
+___
+
+### SendConversationMessageOptions
+
+Ƭ **SendConversationMessageOptions**: `Omit`<[`SendMessageOptions`](modules.md#sendmessageoptions), ``"conversationId"`` \| ``"parentMessageId"``\>
+
+#### Defined in
+
+[types.ts:286](https://github.com/transitive-bullshit/chatgpt-api/blob/042b2fe/src/types.ts#L286)
+
+___
+
+### SendMessageOptions
+
+Ƭ **SendMessageOptions**: `Object`
+
+#### Type declaration
+
+| Name | Type |
+| :------ | :------ |
+| `abortSignal?` | `AbortSignal` |
+| `conversationId?` | `string` |
+| `onConversationResponse?` | (`response`: [`ConversationResponseEvent`](modules.md#conversationresponseevent)) => `void` |
+| `onProgress?` | (`partialResponse`: `string`) => `void` |
+| `parentMessageId?` | `string` |
+| `timeoutMs?` | `number` |
+
+#### Defined in
+
+[types.ts:277](https://github.com/transitive-bullshit/chatgpt-api/blob/042b2fe/src/types.ts#L277)
___
@@ -348,7 +382,7 @@ https://chat.openapi.com/api/auth/session
#### Defined in
-[types.ts:8](https://github.com/transitive-bullshit/chatgpt-api/blob/80d77a3/src/types.ts#L8)
+[types.ts:8](https://github.com/transitive-bullshit/chatgpt-api/blob/042b2fe/src/types.ts#L8)
___
@@ -370,7 +404,7 @@ ___
#### Defined in
-[types.ts:30](https://github.com/transitive-bullshit/chatgpt-api/blob/80d77a3/src/types.ts#L30)
+[types.ts:30](https://github.com/transitive-bullshit/chatgpt-api/blob/042b2fe/src/types.ts#L30)
## Functions
@@ -390,4 +424,4 @@ ___
#### Defined in
-[utils.ts:4](https://github.com/transitive-bullshit/chatgpt-api/blob/80d77a3/src/utils.ts#L4)
+[utils.ts:4](https://github.com/transitive-bullshit/chatgpt-api/blob/042b2fe/src/utils.ts#L4)
diff --git a/docs/readme.md b/docs/readme.md
index bf546da..1be5668 100644
--- a/docs/readme.md
+++ b/docs/readme.md
@@ -13,10 +13,12 @@ chatgpt / [Exports](modules.md)
- [Intro](#intro)
- [Install](#install)
- [Usage](#usage)
-- [Docs](#docs)
-- [How it works](#how-it-works)
-- [Examples](#examples)
-- [Credit](#credit)
+ - [Docs](#docs)
+ - [Demos](#demos)
+ - [Session Tokens](#session-tokens)
+- [Projects](#projects)
+- [Compatibility](#compatibility)
+- [Credits](#credits)
- [License](#license)
## Intro
@@ -38,7 +40,9 @@ import { ChatGPTAPI } from 'chatgpt'
async function example() {
// sessionToken is required; see below for details
- const api = new ChatGPTAPI({ sessionToken: process.env.SESSION_TOKEN })
+ const api = new ChatGPTAPI({
+ sessionToken: process.env.SESSION_TOKEN
+ })
// ensure the API is properly authenticated
await api.ensureAuth()
@@ -53,7 +57,7 @@ async function example() {
}
```
-By default, the response will be formatted as markdown. If you want to work with plaintext only, you can use:
+The default ChatGPT responses are formatted as markdown. If you want to work with plaintext only, you can use:
```ts
const api = new ChatGPTAPI({
@@ -62,7 +66,63 @@ const api = new ChatGPTAPI({
})
```
-A full [demo](./src/demo.ts) is included for testing purposes:
+If you want to automatically track the conversation, you can use `ChatGPTAPI.getConversation()`:
+
+```ts
+const api = new ChatGPTAPI({
+ sessionToken: process.env.SESSION_TOKEN
+})
+
+const conversation = api.getConversation()
+
+// send a message and wait for the response
+const response0 = await conversation.sendMessage('What is OpenAI?')
+
+// send a follow-up prompt to the previous message and wait for the response
+const response1 = await conversation.sendMessage('Can you expand on that?')
+
+// send another follow-up to the same conversation
+const response2 = await conversation.sendMessage('Oh cool; thank you')
+```
+
+Sometimes, ChatGPT will hang for an extended period of time before sending it's response. This may be due to rate limiting or it may be due to OpenAI's servers being overloaded.
+
+To mitigate this issues, you can add a timeout like this:
+
+```ts
+// timeout after 2 minutes (which will also abort the underlying HTTP request)
+const response = await api.sendMessage('this is a timeout test', {
+ timeoutMs: 2 * 60 * 1000
+})
+```
+
+
+Usage in CommonJS (Dynamic import)
+
+```js
+async function example() {
+ // To use ESM in CommonJS, you can use a dynamic import
+ const { ChatGPTAPI } = await import('chatgpt')
+
+ const api = new ChatGPTAPI({
+ sessionToken: process.env.SESSION_TOKEN
+ })
+ await api.ensureAuth()
+
+ const response = await api.sendMessage('Hello World!')
+ console.log(response)
+}
+```
+
+
+
+### Docs
+
+See the [auto-generated docs](./docs/classes/ChatGPTAPI.md) for more info on methods and parameters.
+
+### Demos
+
+A [basic demo](./src/demo.ts) is included for testing purposes:
```bash
# 1. clone repo
@@ -72,11 +132,17 @@ A full [demo](./src/demo.ts) is included for testing purposes:
npx tsx src/demo.ts
```
-## Docs
+A [conversation demo](./src/demo-conversation.ts) is also included:
-See the [auto-generated docs](./docs/classes/ChatGPTAPI.md) for more info on methods and parameters.
+```bash
+# 1. clone repo
+# 2. install node deps
+# 3. set `SESSION_TOKEN` in .env
+# 4. run:
+npx tsx src/demo-conversation.ts
+```
-## How it works
+### Session Tokens
**This package requires a valid session token from ChatGPT to access it's unofficial REST API.**
@@ -96,24 +162,40 @@ If you want to run the built-in demo, store this value as `SESSION_TOKEN` in a l
> **Note**
> Prior to v1.0.0, this package used a headless browser via [Playwright](https://playwright.dev/) to automate the web UI. Here are the [docs for the initial browser version](https://github.com/transitive-bullshit/chatgpt-api/tree/v0.4.2).
-## Examples
+## Projects
All of these awesome projects are built using the `chatgpt` package. 🤯
- [Twitter Bot](https://github.com/transitive-bullshit/chatgpt-twitter-bot) powered by ChatGPT ✨
- Mention [@ChatGPTBot](https://twitter.com/ChatGPTBot) on Twitter with your prompt to try it out
- [Chrome Extension](https://github.com/gragland/chatgpt-everywhere) ([demo](https://twitter.com/gabe_ragland/status/1599466486422470656))
-- [VSCode Extension](https://github.com/mpociot/chatgpt-vscode) ([demo](https://twitter.com/marcelpociot/status/1599180144551526400))
+- [VSCode Extension #1](https://github.com/mpociot/chatgpt-vscode) ([demo](https://twitter.com/marcelpociot/status/1599180144551526400))
+- [VSCode Extension #2](https://github.com/barnesoir/chatgpt-vscode-plugin) ([marketplace](https://marketplace.visualstudio.com/items?itemName=JayBarnes.chatgpt-vscode-plugin))
+- [VSCode Extension #3](https://github.com/gencay/vscode-chatgpt) ([marketplace](https://marketplace.visualstudio.com/items?itemName=gencay.vscode-chatgpt))
+- [Raycast Extension](https://github.com/abielzulio/chatgpt-raycast) ([demo](https://twitter.com/abielzulio/status/1600176002042191875))
- [Go Telegram Bot](https://github.com/m1guelpf/chatgpt-telegram)
- [GitHub ProBot](https://github.com/oceanlvr/ChatGPTBot)
- [Discord Bot](https://github.com/onury5506/Discord-ChatGPT-Bot)
- [WeChat Bot](https://github.com/AutumnWhj/ChatGPT-wechat-bot)
- [Lovelines.xyz](https://lovelines.xyz)
- [EXM smart contracts](https://github.com/decentldotland/molecule)
+- [Flutter ChatGPT API](https://github.com/coskuncay/flutter_chatgpt_api)
If you create a cool integration, feel free to open a PR and add it to the list.
-## Credit
+## Compatibility
+
+This package is ESM-only. It supports:
+
+- Node.js >= 16.8
+ - If you need Node.js 14 support, use [`v1.4.0`](https://github.com/transitive-bullshit/chatgpt-api/releases/tag/v1.4.0)
+- Edge runtimes like CF workers and Vercel edge functions
+- Modern browsers
+ - Mainly chrome extensions where your code is protected to a degree
+ - **We do not recommend using `chatgpt` from client-side browser code** because it would expose your private session token
+ - If you want to build a website using `chatgpt`, we recommend using it only from your backend API
+
+## Credits
- Huge thanks to [@RomanHotsiy](https://github.com/RomanHotsiy), [@ElijahPepe](https://github.com/ElijahPepe), and all the other contributors 💪
- The original browser version was inspired by this [Go module](https://github.com/danielgross/whatsapp-gpt) by [Daniel Gross](https://github.com/danielgross)
@@ -123,4 +205,4 @@ If you create a cool integration, feel free to open a PR and add it to the list.
MIT © [Travis Fischer](https://transitivebullsh.it)
-If you found this project interesting, please consider supporting my open source work by [sponsoring me](https://github.com/sponsors/transitive-bullshit) or following me on twitter
+If you found this project interesting, please consider [sponsoring me](https://github.com/sponsors/transitive-bullshit) or following me on twitter
diff --git a/package.json b/package.json
index aac5024..100a49f 100644
--- a/package.json
+++ b/package.json
@@ -10,6 +10,7 @@
"types": "./build/index.d.ts",
"exports": {
".": {
+ "browser": "./build/browser/index.js",
"import": "./build/index.js",
"types": "./build/index.d.ts",
"default": "./build/index.js"
@@ -19,13 +20,14 @@
"build"
],
"engines": {
- "node": ">=14"
+ "node": ">=16.8"
},
"scripts": {
"build": "tsup",
"dev": "tsup --watch",
"clean": "del build",
"prebuild": "run-s clean",
+ "postbuild": "[ -n CI ] && sed -i '' 's/ *\\?\\? *(await import(\"undici\")).fetch//' build/browser/index.js || echo 'skipping postbuild on CI'",
"predev": "run-s clean",
"pretest": "run-s build",
"docs": "typedoc",
@@ -38,7 +40,7 @@
"dependencies": {
"eventsource-parser": "^0.0.5",
"expiry-map": "^2.0.0",
- "node-fetch": "2",
+ "p-timeout": "^6.0.0",
"remark": "^14.0.2",
"strip-markdown": "^5.0.0",
"uuid": "^9.0.0"
@@ -46,7 +48,6 @@
"devDependencies": {
"@trivago/prettier-plugin-sort-imports": "^4.0.0",
"@types/node": "^18.11.9",
- "@types/node-fetch": "2",
"@types/uuid": "^9.0.0",
"ava": "^5.1.0",
"del-cli": "^5.0.0",
@@ -62,6 +63,9 @@
"typedoc-plugin-markdown": "^3.13.6",
"typescript": "^4.9.3"
},
+ "optionalDependencies": {
+ "undici": "^5.13.0"
+ },
"lint-staged": {
"*.{ts,tsx}": [
"prettier --write"
diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml
index 24fc4dc..8bb76f7 100644
--- a/pnpm-lock.yaml
+++ b/pnpm-lock.yaml
@@ -3,7 +3,6 @@ lockfileVersion: 5.4
specifiers:
'@trivago/prettier-plugin-sort-imports': ^4.0.0
'@types/node': ^18.11.9
- '@types/node-fetch': '2'
'@types/uuid': ^9.0.0
ava: ^5.1.0
del-cli: ^5.0.0
@@ -12,9 +11,9 @@ specifiers:
expiry-map: ^2.0.0
husky: ^8.0.2
lint-staged: ^13.0.3
- node-fetch: '2'
npm-run-all: ^4.1.5
ora: ^6.1.2
+ p-timeout: ^6.0.0
prettier: ^2.8.0
remark: ^14.0.2
strip-markdown: ^5.0.0
@@ -23,20 +22,23 @@ specifiers:
typedoc: ^0.23.21
typedoc-plugin-markdown: ^3.13.6
typescript: ^4.9.3
+ undici: ^5.13.0
uuid: ^9.0.0
dependencies:
eventsource-parser: 0.0.5
expiry-map: 2.0.0
- node-fetch: 2.6.7
+ p-timeout: 6.0.0
remark: 14.0.2
strip-markdown: 5.0.0
uuid: 9.0.0
+optionalDependencies:
+ undici: 5.13.0
+
devDependencies:
'@trivago/prettier-plugin-sort-imports': 4.0.0_prettier@2.8.0
'@types/node': 18.11.10
- '@types/node-fetch': 2.6.2
'@types/uuid': 9.0.0
ava: 5.1.0
del-cli: 5.0.0
@@ -434,13 +436,6 @@ packages:
resolution: {integrity: sha512-iiUgKzV9AuaEkZqkOLDIvlQiL6ltuZd9tGcW3gwpnX8JbuiuhFlEGmmFXEXkN50Cvq7Os88IY2v0dkDqXYWVgA==}
dev: false
- /@types/node-fetch/2.6.2:
- resolution: {integrity: sha512-DHqhlq5jeESLy19TYhLakJ07kNumXWjcDdxXsLUMJZ6ue8VZJj4kLPQVE/2mdHh3xZziNF1xppu5lwmS53HR+A==}
- dependencies:
- '@types/node': 18.11.10
- form-data: 3.0.1
- dev: true
-
/@types/node/18.11.10:
resolution: {integrity: sha512-juG3RWMBOqcOuXC643OAdSA525V44cVgGV6dUDuiFtss+8Fk5x1hI93Rsld43VeJVIeqlP9I7Fn9/qaVqoEAuQ==}
dev: true
@@ -568,10 +563,6 @@ packages:
engines: {node: '>=8'}
dev: true
- /asynckit/0.4.0:
- resolution: {integrity: sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q==}
- dev: true
-
/ava/5.1.0:
resolution: {integrity: sha512-e5VFrSQ0WBPyZJWRXVrO7RFOizFeNM0t2PORwrPvWtApgkORI6cvGnY3GX1G+lzpd0HjqNx5Jus22AhxVnUMNA==}
engines: {node: '>=14.19 <15 || >=16.15 <17 || >=18'}
@@ -712,6 +703,14 @@ packages:
load-tsconfig: 0.2.3
dev: true
+ /busboy/1.6.0:
+ resolution: {integrity: sha512-8SFQbg/0hQ9xy3UNTB0YEnsNBbWfhf7RtnzpL7TkBiTBRfrQ9Fxcnz7VJsleJpyp6rVLvXiuORqjlHi5q+PYuA==}
+ engines: {node: '>=10.16.0'}
+ dependencies:
+ streamsearch: 1.1.0
+ dev: false
+ optional: true
+
/cac/6.7.14:
resolution: {integrity: sha512-b6Ilus+c3RrdDk+JhLKUAQfzzgLEPy6wcXqS7f/xe1EETvsDP6GORG7SFuOs6cID5YkqchW/LXZbX5bc8j7ZcQ==}
engines: {node: '>=8'}
@@ -899,13 +898,6 @@ packages:
resolution: {integrity: sha512-3tlv/dIP7FWvj3BsbHrGLJ6l/oKh1O3TcgBqMn+yyCagOxc23fyzDS6HypQbgxWbkpDnf52p1LuR4eWDQ/K9WQ==}
dev: true
- /combined-stream/1.0.8:
- resolution: {integrity: sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg==}
- engines: {node: '>= 0.8'}
- dependencies:
- delayed-stream: 1.0.0
- dev: true
-
/commander/4.1.1:
resolution: {integrity: sha512-NOKm8xhkzAjzFx8B2v5OAHT+u5pRQc2UCa2Vq9jYL/31o2wi9mxBA7LIFs3sV5VSC49z6pEhfbMULvShKj26WA==}
engines: {node: '>= 6'}
@@ -1053,11 +1045,6 @@ packages:
slash: 4.0.0
dev: true
- /delayed-stream/1.0.0:
- resolution: {integrity: sha512-ZySD7Nf91aLB0RxL4KGrKHBXl7Eds1DAmEdcoVawXnLD7SDhpNgtuII2aAkg7a7QS41jxPSZ17p4VdGnMHk3MQ==}
- engines: {node: '>=0.4.0'}
- dev: true
-
/dequal/2.0.3:
resolution: {integrity: sha512-0je+qPKHEMohvfRTCEo3CrPG6cAzAYgmzKyxRiYSSDkS6eGJdyVJm7WaYA5ECaAD9wLB2T4EEeymA5aFVcYXCA==}
engines: {node: '>=6'}
@@ -1491,15 +1478,6 @@ packages:
path-exists: 5.0.0
dev: true
- /form-data/3.0.1:
- resolution: {integrity: sha512-RHkBKtLWUVwd7SqRIvCZMEvAMoGUp0XU+seQiZejj0COz3RI3hWP4sCv3gZWWLjJTd7rGwcsF5eKZGii0r/hbg==}
- engines: {node: '>= 6'}
- dependencies:
- asynckit: 0.4.0
- combined-stream: 1.0.8
- mime-types: 2.1.35
- dev: true
-
/fs.realpath/1.0.0:
resolution: {integrity: sha512-OO0pH2lK6a0hZnAdau5ItzHPI6pUlvI7jMVnxUQRtw4owF2wk8lOSabtGDCTP4Ggrg2MbGnWO9X8K1t4+fGMDw==}
dev: true
@@ -2426,18 +2404,6 @@ packages:
picomatch: 2.3.1
dev: true
- /mime-db/1.52.0:
- resolution: {integrity: sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg==}
- engines: {node: '>= 0.6'}
- dev: true
-
- /mime-types/2.1.35:
- resolution: {integrity: sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==}
- engines: {node: '>= 0.6'}
- dependencies:
- mime-db: 1.52.0
- dev: true
-
/mimic-fn/2.1.0:
resolution: {integrity: sha512-OqbOk5oEQeAZ8WXWydlu9HJjz9WVdEIvamMCcXmuqUYjTknH/sqsWvhQ3vgwKFRR1HpjvNBKQ37nbJgYzGqGcg==}
engines: {node: '>=6'}
@@ -2507,18 +2473,6 @@ packages:
resolution: {integrity: sha512-1nh45deeb5olNY7eX82BkPO7SSxR5SSYJiPTrTdFUVYwAl8CKMA5N9PjTYkHiRjisVcxcQ1HXdLhx2qxxJzLNQ==}
dev: true
- /node-fetch/2.6.7:
- resolution: {integrity: sha512-ZjMPFEfVx5j+y2yF35Kzx5sF7kDzxuDj6ziH4FFbOp87zKDZNx8yExJIb05OGF4Nlt9IHFIMBkRl41VdvcNdbQ==}
- engines: {node: 4.x || >=6.0.0}
- peerDependencies:
- encoding: ^0.1.0
- peerDependenciesMeta:
- encoding:
- optional: true
- dependencies:
- whatwg-url: 5.0.0
- dev: false
-
/node-releases/2.0.6:
resolution: {integrity: sha512-PiVXnNuFm5+iYkLBNeq5211hvO38y63T0i2KKh2KnUs3RpzJ+JtODFjkD8yjLwnDkTYF1eKXheUwdssR+NRZdg==}
dev: true
@@ -2699,6 +2653,11 @@ packages:
engines: {node: '>=12'}
dev: true
+ /p-timeout/6.0.0:
+ resolution: {integrity: sha512-5iS61MOdUMemWH9CORQRxVXTp9g5K8rPnI9uQpo97aWgsH3vVXKjkIhDi+OgIDmN3Ly9+AZ2fZV01Wut1yzfKA==}
+ engines: {node: '>=14.16'}
+ dev: false
+
/parse-json/4.0.0:
resolution: {integrity: sha512-aOIos8bujGN93/8Ox/jPLh7RwVnPEysynVFE+fQZyg6jKELEHwzgKdLRFHUgXJL6kylijVSBC4BvN9OmsB48Rw==}
engines: {node: '>=4'}
@@ -3210,6 +3169,12 @@ packages:
escape-string-regexp: 2.0.0
dev: true
+ /streamsearch/1.1.0:
+ resolution: {integrity: sha512-Mcc5wHehp9aXz1ax6bZUyY5afg9u2rv5cqQI3mRrYkGC8rW2hM02jWuwjtL++LS5qinSyhj2QfLyNsuc+VsExg==}
+ engines: {node: '>=10.0.0'}
+ dev: false
+ optional: true
+
/string-argv/0.3.1:
resolution: {integrity: sha512-a1uQGz7IyVy9YwhqjZIZu1c8JO8dNIe20xBmSS6qu9kv++k3JGzCVmprbNN5Kn+BgzD5E7YYwg1CcjuJMRNsvg==}
engines: {node: '>=0.6.19'}
@@ -3382,10 +3347,6 @@ packages:
is-number: 7.0.0
dev: true
- /tr46/0.0.3:
- resolution: {integrity: sha512-N3WMsuqV66lT30CrXNbEjx4GEwlow3v6rr4mCcv6prnfwhS01rkgyFdjPNBYd9br7LpXV1+Emh01fHnq2Gdgrw==}
- dev: false
-
/tr46/1.0.1:
resolution: {integrity: sha512-dTpowEjclQ7Kgx5SdBkqRzVhERQXov8/l9Ft9dVM9fmg0W0KQSVaXX9T4i6twCPNtYiZM53lpSSUAwJbFPOHxA==}
dependencies:
@@ -3522,6 +3483,15 @@ packages:
which-boxed-primitive: 1.0.2
dev: true
+ /undici/5.13.0:
+ resolution: {integrity: sha512-UDZKtwb2k7KRsK4SdXWG7ErXiL7yTGgLWvk2AXO1JMjgjh404nFo6tWSCM2xMpJwMPx3J8i/vfqEh1zOqvj82Q==}
+ engines: {node: '>=12.18'}
+ requiresBuild: true
+ dependencies:
+ busboy: 1.6.0
+ dev: false
+ optional: true
+
/unified/10.1.2:
resolution: {integrity: sha512-pUSWAi/RAnVy1Pif2kAoeWNBa3JVrx0MId2LASj8G+7AiHWoKZNTomq6LG326T68U7/e263X6fTdcXIy7XnF7Q==}
dependencies:
@@ -3627,10 +3597,6 @@ packages:
defaults: 1.0.4
dev: true
- /webidl-conversions/3.0.1:
- resolution: {integrity: sha512-2JAn3z8AR6rjK8Sm8orRC0h/bcl/DqL7tRPdGZ4I1CjdF+EaMLmYxBHyXuKL849eucPFhvBoxMsflfOb8kxaeQ==}
- dev: false
-
/webidl-conversions/4.0.2:
resolution: {integrity: sha512-YQ+BmxuTgd6UXZW3+ICGfyqRyHXVlD5GtQr5+qjiNW7bF0cqrzX500HVXPBOvgXb5YnzDd+h0zqyv61KUD7+Sg==}
dev: true
@@ -3640,13 +3606,6 @@ packages:
engines: {node: '>=6'}
dev: true
- /whatwg-url/5.0.0:
- resolution: {integrity: sha512-saE57nupxk6v3HY35+jzBwYa0rKSy0XR8JSxZPwgLr7ys0IBzhGviA1/TUGJLmSVqs8pb9AnvICXEuOHLprYTw==}
- dependencies:
- tr46: 0.0.3
- webidl-conversions: 3.0.1
- dev: false
-
/whatwg-url/7.1.0:
resolution: {integrity: sha512-WUu7Rg1DroM7oQvGWfOiAK21n74Gg+T4elXEQYkOhtyLeWiJFoOGLXPKI/9gzIie9CtwVLm8wtw6YJdKyxSjeg==}
dependencies:
diff --git a/readme.md b/readme.md
index ed4cd90..3f8d795 100644
--- a/readme.md
+++ b/readme.md
@@ -11,10 +11,12 @@
- [Intro](#intro)
- [Install](#install)
- [Usage](#usage)
-- [Docs](#docs)
-- [How it works](#how-it-works)
-- [Examples](#examples)
-- [Credit](#credit)
+ - [Docs](#docs)
+ - [Demos](#demos)
+ - [Session Tokens](#session-tokens)
+- [Projects](#projects)
+- [Compatibility](#compatibility)
+- [Credits](#credits)
- [License](#license)
## Intro
@@ -36,7 +38,9 @@ import { ChatGPTAPI } from 'chatgpt'
async function example() {
// sessionToken is required; see below for details
- const api = new ChatGPTAPI({ sessionToken: process.env.SESSION_TOKEN })
+ const api = new ChatGPTAPI({
+ sessionToken: process.env.SESSION_TOKEN
+ })
// ensure the API is properly authenticated
await api.ensureAuth()
@@ -51,7 +55,7 @@ async function example() {
}
```
-By default, the response will be formatted as markdown. If you want to work with plaintext only, you can use:
+The default ChatGPT responses are formatted as markdown. If you want to work with plaintext only, you can use:
```ts
const api = new ChatGPTAPI({
@@ -60,6 +64,36 @@ const api = new ChatGPTAPI({
})
```
+If you want to automatically track the conversation, you can use `ChatGPTAPI.getConversation()`:
+
+```ts
+const api = new ChatGPTAPI({
+ sessionToken: process.env.SESSION_TOKEN
+})
+
+const conversation = api.getConversation()
+
+// send a message and wait for the response
+const response0 = await conversation.sendMessage('What is OpenAI?')
+
+// send a follow-up prompt to the previous message and wait for the response
+const response1 = await conversation.sendMessage('Can you expand on that?')
+
+// send another follow-up to the same conversation
+const response2 = await conversation.sendMessage('Oh cool; thank you')
+```
+
+Sometimes, ChatGPT will hang for an extended period of time before sending it's response. This may be due to rate limiting or it may be due to OpenAI's servers being overloaded.
+
+To mitigate this issues, you can add a timeout like this:
+
+```ts
+// timeout after 2 minutes (which will also abort the underlying HTTP request)
+const response = await api.sendMessage('this is a timeout test', {
+ timeoutMs: 2 * 60 * 1000
+})
+```
+
Usage in CommonJS (Dynamic import)
@@ -80,7 +114,13 @@ async function example() {
-A full [demo](./src/demo.ts) is included for testing purposes:
+### Docs
+
+See the [auto-generated docs](./docs/classes/ChatGPTAPI.md) for more info on methods and parameters.
+
+### Demos
+
+A [basic demo](./src/demo.ts) is included for testing purposes:
```bash
# 1. clone repo
@@ -90,11 +130,17 @@ A full [demo](./src/demo.ts) is included for testing purposes:
npx tsx src/demo.ts
```
-## Docs
+A [conversation demo](./src/demo-conversation.ts) is also included:
-See the [auto-generated docs](./docs/classes/ChatGPTAPI.md) for more info on methods and parameters.
+```bash
+# 1. clone repo
+# 2. install node deps
+# 3. set `SESSION_TOKEN` in .env
+# 4. run:
+npx tsx src/demo-conversation.ts
+```
-## How it works
+### Session Tokens
**This package requires a valid session token from ChatGPT to access it's unofficial REST API.**
@@ -114,7 +160,7 @@ If you want to run the built-in demo, store this value as `SESSION_TOKEN` in a l
> **Note**
> Prior to v1.0.0, this package used a headless browser via [Playwright](https://playwright.dev/) to automate the web UI. Here are the [docs for the initial browser version](https://github.com/transitive-bullshit/chatgpt-api/tree/v0.4.2).
-## Examples
+## Projects
All of these awesome projects are built using the `chatgpt` package. 🤯
@@ -135,7 +181,19 @@ All of these awesome projects are built using the `chatgpt` package. 🤯
If you create a cool integration, feel free to open a PR and add it to the list.
-## Credit
+## Compatibility
+
+This package is ESM-only. It supports:
+
+- Node.js >= 16.8
+ - If you need Node.js 14 support, use [`v1.4.0`](https://github.com/transitive-bullshit/chatgpt-api/releases/tag/v1.4.0)
+- Edge runtimes like CF workers and Vercel edge functions
+- Modern browsers
+ - Mainly chrome extensions where your code is protected to a degree
+ - **We do not recommend using `chatgpt` from client-side browser code** because it would expose your private session token
+ - If you want to build a website using `chatgpt`, we recommend using it only from your backend API
+
+## Credits
- Huge thanks to [@RomanHotsiy](https://github.com/RomanHotsiy), [@ElijahPepe](https://github.com/ElijahPepe), and all the other contributors 💪
- The original browser version was inspired by this [Go module](https://github.com/danielgross/whatsapp-gpt) by [Daniel Gross](https://github.com/danielgross)
@@ -145,4 +203,4 @@ If you create a cool integration, feel free to open a PR and add it to the list.
MIT © [Travis Fischer](https://transitivebullsh.it)
-If you found this project interesting, please consider supporting my open source work by [sponsoring me](https://github.com/sponsors/transitive-bullshit) or following me on twitter
+If you found this project interesting, please consider [sponsoring me](https://github.com/sponsors/transitive-bullshit) or following me on twitter
diff --git a/src/chatgpt-api.test.ts b/src/chatgpt-api.test.ts
index ea0bd18..fd4bdd6 100644
--- a/src/chatgpt-api.test.ts
+++ b/src/chatgpt-api.test.ts
@@ -36,12 +36,14 @@ test('ChatGPTAPI valid session token', async (t) => {
await t.notThrowsAsync(
(async () => {
- const api = new ChatGPTAPI({ sessionToken: process.env.SESSION_TOKEN })
+ const chatgpt = new ChatGPTAPI({
+ sessionToken: process.env.SESSION_TOKEN
+ })
// Don't make any real API calls using our session token if we're running on CI
if (!isCI) {
- await api.ensureAuth()
- const response = await api.sendMessage('test')
+ await chatgpt.ensureAuth()
+ const response = await chatgpt.sendMessage('test')
console.log('chatgpt response', response)
t.truthy(response)
@@ -63,7 +65,50 @@ if (!isCI) {
},
{
message:
- 'ChatGPT failed to refresh auth token. Error: session token has expired'
+ 'ChatGPT failed to refresh auth token. Error: session token may have expired'
+ }
+ )
+ })
+}
+
+if (!isCI) {
+ test('ChatGPTAPI timeout', async (t) => {
+ t.timeout(30 * 1000) // 30 seconds
+
+ await t.throwsAsync(
+ async () => {
+ const chatgpt = new ChatGPTAPI({
+ sessionToken: process.env.SESSION_TOKEN
+ })
+
+ await chatgpt.sendMessage('test', {
+ timeoutMs: 1
+ })
+ },
+ {
+ message: 'ChatGPT timed out waiting for response'
+ }
+ )
+ })
+
+ test('ChatGPTAPI abort', async (t) => {
+ t.timeout(30 * 1000) // 30 seconds
+
+ await t.throwsAsync(
+ async () => {
+ const chatgpt = new ChatGPTAPI({
+ sessionToken: process.env.SESSION_TOKEN
+ })
+
+ const abortController = new AbortController()
+ setTimeout(() => abortController.abort(new Error('testing abort')), 10)
+
+ await chatgpt.sendMessage('test', {
+ abortSignal: abortController.signal
+ })
+ },
+ {
+ message: 'testing abort'
}
)
})
diff --git a/src/chatgpt-api.ts b/src/chatgpt-api.ts
index 32a39a1..e02c811 100644
--- a/src/chatgpt-api.ts
+++ b/src/chatgpt-api.ts
@@ -1,7 +1,9 @@
import ExpiryMap from 'expiry-map'
+import pTimeout, { TimeoutError } from 'p-timeout'
import { v4 as uuidv4 } from 'uuid'
import * as types from './types'
+import { ChatGPTConversation } from './chatgpt-conversation'
import { fetch } from './fetch'
import { fetchSSE } from './fetch-sse'
import { markdownToText } from './utils'
@@ -10,79 +12,6 @@ const KEY_ACCESS_TOKEN = 'accessToken'
const USER_AGENT =
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/107.0.0.0 Safari/537.36'
-/**
- * A conversation wrapper around the ChatGPT API. This allows you to send
- * multiple messages to ChatGPT and receive responses, without having to
- * manually pass the conversation ID and parent message ID for each message.
- */
-class Conversation {
- api: ChatGPTAPI
- conversationId: string = undefined
- parentMessageId: string = undefined
-
- /**
- * Creates a new conversation wrapper around the ChatGPT API.
- * @param api - The ChatGPT API instance to use.
- */
- constructor(
- api: ChatGPTAPI,
- opts: { conversationId?: string; parentMessageId?: string } = {}
- ) {
- this.api = api
- this.conversationId = opts.conversationId
- this.parentMessageId = opts.parentMessageId
- }
-
- /**
- * Sends a message to ChatGPT, waits for the response to resolve, and returns
- * the response.
- * If this is the first message in the conversation, the conversation ID and
- * parent message ID will be automatically set.
- * This allows you to send multiple messages to ChatGPT and receive responses,
- * without having to manually pass the conversation ID and parent message ID
- * for each message.
- * If you want to manually pass the conversation ID and parent message ID,
- * use `api.sendMessage` instead.
- *
- * @param message - The plaintext message to send.
- * @param opts.onProgress - Optional listener which will be called every time the partial response is updated
- * @param opts.onConversationResponse - Optional listener which will be called every time a conversation response is received
- * @returns The plaintext response from ChatGPT.
- */
- async sendMessage(
- message: string,
- opts: {
- onProgress?: (partialResponse: string) => void
- onConversationResponse?: (
- response: types.ConversationResponseEvent
- ) => void
- } = {}
- ) {
- const { onProgress, onConversationResponse } = opts
- if (!this.conversationId) {
- return this.api.sendMessage(message, {
- onProgress,
- onConversationResponse: (response) => {
- this.conversationId = response.conversation_id
- this.parentMessageId = response.message.id
- onConversationResponse?.(response)
- }
- })
- }
-
- return this.api.sendMessage(message, {
- conversationId: this.conversationId,
- parentMessageId: this.parentMessageId,
- onProgress,
- onConversationResponse: (response) => {
- this.conversationId = response.conversation_id
- this.parentMessageId = response.message.id
- onConversationResponse?.(response)
- }
- })
- }
-}
-
export class ChatGPTAPI {
protected _sessionToken: string
protected _markdown: boolean
@@ -90,8 +19,9 @@ export class ChatGPTAPI {
protected _backendApiBaseUrl: string
protected _userAgent: string
- // stores access tokens for up to 10 seconds before needing to refresh
- protected _accessTokenCache = new ExpiryMap(10 * 1000)
+ // Stores access tokens for `accessTokenTTL` milliseconds before needing to refresh
+ // (defaults to 60 seconds)
+ protected _accessTokenCache: ExpiryMap
/**
* Creates a new client wrapper around the unofficial ChatGPT REST API.
@@ -100,6 +30,7 @@ export class ChatGPTAPI {
* @param apiBaseUrl - Optional override; the base URL for ChatGPT webapp's API (`/api`)
* @param backendApiBaseUrl - Optional override; the base URL for the ChatGPT backend API (`/backend-api`)
* @param userAgent - Optional override; the `user-agent` header to use with ChatGPT requests
+ * @param accessTokenTTL - Optional override; how long in milliseconds access tokens should last before being forcefully refreshed
*/
constructor(opts: {
sessionToken: string
@@ -115,13 +46,17 @@ export class ChatGPTAPI {
/** @defaultValue `'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/107.0.0.0 Safari/537.36'` **/
userAgent?: string
+
+ /** @defaultValue 60000 (60 seconds) */
+ accessTokenTTL?: number
}) {
const {
sessionToken,
markdown = true,
apiBaseUrl = 'https://chat.openai.com/api',
backendApiBaseUrl = 'https://chat.openai.com/backend-api',
- userAgent = USER_AGENT
+ userAgent = USER_AGENT,
+ accessTokenTTL = 60000 // 60 seconds
} = opts
this._sessionToken = sessionToken
@@ -130,51 +65,52 @@ export class ChatGPTAPI {
this._backendApiBaseUrl = backendApiBaseUrl
this._userAgent = userAgent
+ this._accessTokenCache = new ExpiryMap(accessTokenTTL)
+
if (!this._sessionToken) {
throw new Error('ChatGPT invalid session token')
}
}
- async getIsAuthenticated() {
- try {
- void (await this.refreshAccessToken())
- return true
- } catch (err) {
- return false
- }
- }
-
- async ensureAuth() {
- return await this.refreshAccessToken()
- }
-
/**
* Sends a message to ChatGPT, waits for the response to resolve, and returns
* the response.
*
- * @param message - The plaintext message to send.
- * @param opts.conversationId - Optional ID of the previous message in a conversation
- * @param opts.onProgress - Optional listener which will be called every time the partial response is updated
- * @param opts.onConversationResponse - Optional listener which will be called every time the partial response is updated with the full conversation response
+ * If you want to receive a stream of partial responses, use `opts.onProgress`.
+ * If you want to receive the full response, including message and conversation IDs,
+ * you can use `opts.onConversationResponse` or use the `ChatGPTAPI.getConversation`
+ * helper.
+ *
+ * @param message - The prompt message to send
+ * @param opts.conversationId - Optional ID of a conversation to continue
+ * @param opts.parentMessageId - Optional ID of the previous message in the conversation
+ * @param opts.timeoutMs - Optional timeout in milliseconds (defaults to no timeout)
+ * @param opts.onProgress - Optional callback which will be invoked every time the partial response is updated
+ * @param opts.onConversationResponse - Optional callback which will be invoked every time the partial response is updated with the full conversation response
+ * @param opts.abortSignal - Optional callback used to abort the underlying `fetch` call using an [AbortController](https://developer.mozilla.org/en-US/docs/Web/API/AbortController)
+ *
+ * @returns The response from ChatGPT
*/
async sendMessage(
message: string,
- opts: {
- conversationId?: string
- parentMessageId?: string
- onProgress?: (partialResponse: string) => void
- onConversationResponse?: (
- response: types.ConversationResponseEvent
- ) => void
- } = {}
+ opts: types.SendMessageOptions = {}
): Promise {
const {
conversationId,
parentMessageId = uuidv4(),
+ timeoutMs,
onProgress,
onConversationResponse
} = opts
+ let { abortSignal } = opts
+
+ let abortController: AbortController = null
+ if (timeoutMs && !abortSignal) {
+ abortController = new AbortController()
+ abortSignal = abortController.signal
+ }
+
const accessToken = await this.refreshAccessToken()
const body: types.ConversationJSONBody = {
@@ -198,14 +134,9 @@ export class ChatGPTAPI {
}
const url = `${this._backendApiBaseUrl}/conversation`
-
- // TODO: What's the best way to differentiate btwn wanting just the response text
- // versus wanting the full response message, so you can extract the ID and other
- // metadata?
- // let fullResponse: types.Message = null
let response = ''
- return new Promise((resolve, reject) => {
+ const responseP = new Promise((resolve, reject) => {
fetchSSE(url, {
method: 'POST',
headers: {
@@ -214,6 +145,7 @@ export class ChatGPTAPI {
'user-agent': this._userAgent
},
body: JSON.stringify(body),
+ signal: abortSignal,
onMessage: (data: string) => {
if (data === '[DONE]') {
return resolve(response)
@@ -224,6 +156,7 @@ export class ChatGPTAPI {
if (onConversationResponse) {
onConversationResponse(parsedData)
}
+
const message = parsedData.message
// console.log('event', JSON.stringify(parsedData, null, 2))
@@ -236,7 +169,6 @@ export class ChatGPTAPI {
}
response = text
- // fullResponse = message
if (onProgress) {
onProgress(text)
@@ -250,8 +182,56 @@ export class ChatGPTAPI {
}
}).catch(reject)
})
+
+ if (timeoutMs) {
+ if (abortController) {
+ // This will be called when a timeout occurs in order for us to forcibly
+ // ensure that the underlying HTTP request is aborted.
+ ;(responseP as any).cancel = () => {
+ abortController.abort()
+ }
+ }
+
+ return pTimeout(responseP, {
+ milliseconds: timeoutMs,
+ message: 'ChatGPT timed out waiting for response'
+ })
+ } else {
+ return responseP
+ }
}
+ /**
+ * @returns `true` if the client has a valid acces token or `false` if refreshing
+ * the token fails.
+ */
+ async getIsAuthenticated() {
+ try {
+ void (await this.refreshAccessToken())
+ return true
+ } catch (err) {
+ return false
+ }
+ }
+
+ /**
+ * Refreshes the client's access token which will succeed only if the session
+ * is still valid.
+ */
+ async ensureAuth() {
+ return await this.refreshAccessToken()
+ }
+
+ /**
+ * Attempts to refresh the current access token using the ChatGPT
+ * `sessionToken` cookie.
+ *
+ * Access tokens will be cached for up to `accessTokenTTL` milliseconds to
+ * prevent refreshing access tokens too frequently.
+ *
+ * @returns A valid access token
+ * @throws An error if refreshing the access token fails.
+ */
async refreshAccessToken(): Promise {
const cachedAccessToken = this._accessTokenCache.get(KEY_ACCESS_TOKEN)
if (cachedAccessToken) {
@@ -264,7 +244,13 @@ export class ChatGPTAPI {
cookie: `__Secure-next-auth.session-token=${this._sessionToken}`,
'user-agent': this._userAgent
}
- }).then((r) => r.json() as any as types.SessionResult)
+ }).then((r) => {
+ if (!r.ok) {
+ throw new Error(`${r.status} ${r.statusText}`)
+ }
+
+ return r.json() as any as types.SessionResult
+ })
const accessToken = res?.accessToken
@@ -275,7 +261,7 @@ export class ChatGPTAPI {
const error = res?.error
if (error) {
if (error === 'RefreshAccessTokenError') {
- throw new Error('session token has expired')
+ throw new Error('session token may have expired')
} else {
throw new Error(error)
}
@@ -289,15 +275,16 @@ export class ChatGPTAPI {
}
/**
- * Get a new Conversation instance, which can be used to send multiple messages as part of a single conversation.
+ * Gets a new ChatGPTConversation instance, which can be used to send multiple
+ * messages as part of a single conversation.
*
- * @param opts.conversationId - Optional Data of the previous message in a conversation
- * @param opts.parentMessageId - Optional Data of the previous message in a conversation
- * @returns a new Conversation instance
+ * @param opts.conversationId - Optional ID of the previous message in a conversation
+ * @param opts.parentMessageId - Optional ID of the previous message in a conversation
+ * @returns The new conversation instance
*/
getConversation(
opts: { conversationId?: string; parentMessageId?: string } = {}
) {
- return new Conversation(this, opts)
+ return new ChatGPTConversation(this, opts)
}
}
diff --git a/src/chatgpt-conversation.ts b/src/chatgpt-conversation.ts
new file mode 100644
index 0000000..972917a
--- /dev/null
+++ b/src/chatgpt-conversation.ts
@@ -0,0 +1,73 @@
+import * as types from './types'
+import { type ChatGPTAPI } from './chatgpt-api'
+
+/**
+ * A conversation wrapper around the ChatGPTAPI. This allows you to send
+ * multiple messages to ChatGPT and receive responses, without having to
+ * manually pass the conversation ID and parent message ID for each message.
+ */
+export class ChatGPTConversation {
+ api: ChatGPTAPI
+ conversationId: string = undefined
+ parentMessageId: string = undefined
+
+ /**
+ * Creates a new conversation wrapper around the ChatGPT API.
+ *
+ * @param api - The ChatGPT API instance to use
+ * @param opts.conversationId - Optional ID of a conversation to continue
+ * @param opts.parentMessageId - Optional ID of the previous message in the conversation
+ */
+ constructor(
+ api: ChatGPTAPI,
+ opts: { conversationId?: string; parentMessageId?: string } = {}
+ ) {
+ this.api = api
+ this.conversationId = opts.conversationId
+ this.parentMessageId = opts.parentMessageId
+ }
+
+ /**
+ * Sends a message to ChatGPT, waits for the response to resolve, and returns
+ * the response.
+ *
+ * If this is the first message in the conversation, the conversation ID and
+ * parent message ID will be automatically set.
+ *
+ * This allows you to send multiple messages to ChatGPT and receive responses,
+ * without having to manually pass the conversation ID and parent message ID
+ * for each message.
+ *
+ * @param message - The prompt message to send
+ * @param opts.onProgress - Optional callback which will be invoked every time the partial response is updated
+ * @param opts.onConversationResponse - Optional callback which will be invoked every time the partial response is updated with the full conversation response
+ * @param opts.abortSignal - Optional callback used to abort the underlying `fetch` call using an [AbortController](https://developer.mozilla.org/en-US/docs/Web/API/AbortController)
+ *
+ * @returns The response from ChatGPT
+ */
+ async sendMessage(
+ message: string,
+ opts: types.SendConversationMessageOptions = {}
+ ): Promise {
+ const { onConversationResponse, ...rest } = opts
+
+ return this.api.sendMessage(message, {
+ ...rest,
+ conversationId: this.conversationId,
+ parentMessageId: this.parentMessageId,
+ onConversationResponse: (response) => {
+ if (response.conversation_id) {
+ this.conversationId = response.conversation_id
+ }
+
+ if (response.message?.id) {
+ this.parentMessageId = response.message.id
+ }
+
+ if (onConversationResponse) {
+ return onConversationResponse(response)
+ }
+ }
+ })
+ }
+}
diff --git a/src/demo-conversation.ts b/src/demo-conversation.ts
index c0199b8..915f39b 100644
--- a/src/demo-conversation.ts
+++ b/src/demo-conversation.ts
@@ -6,10 +6,10 @@ import { ChatGPTAPI } from '.'
dotenv.config()
/**
- * Example CLI for testing functionality.
+ * Demo CLI for testing conversation support.
*
* ```
- * npx tsx src/demo.ts
+ * npx tsx src/demo-conversation.ts
* ```
*/
async function main() {
diff --git a/src/demo.ts b/src/demo.ts
index 7bae4db..dcfd028 100644
--- a/src/demo.ts
+++ b/src/demo.ts
@@ -6,7 +6,7 @@ import { ChatGPTAPI } from '.'
dotenv.config()
/**
- * Example CLI for testing functionality.
+ * Demo CLI for testing basic functionality.
*
* ```
* npx tsx src/demo.ts
diff --git a/src/fetch-sse.ts b/src/fetch-sse.ts
index 3a0b585..705480c 100644
--- a/src/fetch-sse.ts
+++ b/src/fetch-sse.ts
@@ -1,31 +1,26 @@
import { createParser } from 'eventsource-parser'
import { fetch } from './fetch'
-
-// import { streamAsyncIterable } from './stream-async-iterable'
+import { streamAsyncIterable } from './stream-async-iterable'
export async function fetchSSE(
url: string,
options: Parameters[1] & { onMessage: (data: string) => void }
) {
const { onMessage, ...fetchOptions } = options
- const resp = await fetch(url, fetchOptions)
+ const res = await fetch(url, fetchOptions)
+ if (!res.ok) {
+ throw new Error(`ChatGPTAPI error ${res.status || res.statusText}`)
+ }
+
const parser = createParser((event) => {
if (event.type === 'event') {
onMessage(event.data)
}
})
- resp.body.on('readable', () => {
- let chunk: string | Buffer
- while (null !== (chunk = resp.body.read())) {
- parser.feed(chunk.toString())
- }
- })
-
- // TODO: add support for web-compatible `fetch`
- // for await (const chunk of streamAsyncIterable(resp.body)) {
- // const str = new TextDecoder().decode(chunk)
- // parser.feed(str)
- // }
+ for await (const chunk of streamAsyncIterable(res.body)) {
+ const str = new TextDecoder().decode(chunk)
+ parser.feed(str)
+ }
}
diff --git a/src/fetch.ts b/src/fetch.ts
index fe65965..8448010 100644
--- a/src/fetch.ts
+++ b/src/fetch.ts
@@ -1,3 +1,12 @@
-import fetch from 'node-fetch'
+///
+
+// Use `undici` for node.js 16 and 17
+// Use `fetch` for node.js >= 18
+// Use `fetch` for all other environments, including browsers
+// NOTE: The top-level await is removed in a `postbuild` npm script for the
+// browser build
+const fetch =
+ globalThis.fetch ??
+ ((await import('undici')).fetch as unknown as typeof globalThis.fetch)
export { fetch }
diff --git a/src/index.ts b/src/index.ts
index 451712e..ed6a4b5 100644
--- a/src/index.ts
+++ b/src/index.ts
@@ -1,3 +1,4 @@
export * from './chatgpt-api'
+export * from './chatgpt-conversation'
export * from './types'
export * from './utils'
diff --git a/src/stream-async-iterable.ts b/src/stream-async-iterable.ts
index fbfe174..78eb497 100644
--- a/src/stream-async-iterable.ts
+++ b/src/stream-async-iterable.ts
@@ -1,6 +1,4 @@
-import { type ReadableStream } from 'stream/web'
-
-export async function* streamAsyncIterable(stream: ReadableStream) {
+export async function* streamAsyncIterable(stream: ReadableStream) {
const reader = stream.getReader()
try {
while (true) {
diff --git a/src/types.ts b/src/types.ts
index b26da3b..ae73f6c 100644
--- a/src/types.ts
+++ b/src/types.ts
@@ -273,3 +273,17 @@ export type MessageContent = {
}
export type MessageMetadata = any
+
+export type SendMessageOptions = {
+ conversationId?: string
+ parentMessageId?: string
+ timeoutMs?: number
+ onProgress?: (partialResponse: string) => void
+ onConversationResponse?: (response: ConversationResponseEvent) => void
+ abortSignal?: AbortSignal
+}
+
+export type SendConversationMessageOptions = Omit<
+ SendMessageOptions,
+ 'conversationId' | 'parentMessageId'
+>
diff --git a/tsup.config.ts b/tsup.config.ts
index 5dedfd7..321fadf 100644
--- a/tsup.config.ts
+++ b/tsup.config.ts
@@ -1,14 +1,30 @@
import { defineConfig } from 'tsup'
-export default defineConfig({
- entry: ['src/index.ts'],
- outDir: 'build',
- target: 'node14',
- platform: 'node',
- format: ['esm'],
- splitting: false,
- sourcemap: true,
- minify: true,
- shims: false,
- dts: true
-})
+export default defineConfig([
+ {
+ entry: ['src/index.ts'],
+ outDir: 'build',
+ target: 'node16',
+ platform: 'node',
+ format: ['esm'],
+ splitting: false,
+ sourcemap: true,
+ minify: false,
+ shims: true,
+ dts: true,
+ external: ['undici']
+ },
+ {
+ entry: ['src/index.ts'],
+ outDir: 'build/browser',
+ target: 'chrome89',
+ platform: 'browser',
+ format: ['esm'],
+ splitting: false,
+ sourcemap: true,
+ minify: false,
+ shims: true,
+ dts: true,
+ external: ['undici']
+ }
+])