Merge pull request #38 from transitive-bullshit/feature/native-fetch-and-edge-support

pull/44/head
Travis Fischer 2022-12-06 22:40:01 -06:00 zatwierdzone przez GitHub
commit 20c376e51f
Nie znaleziono w bazie danych klucza dla tego podpisu
ID klucza GPG: 4AEE18F83AFDEB23
19 zmienionych plików z 722 dodań i 289 usunięć

Wyświetl plik

@ -12,7 +12,6 @@ jobs:
node-version: node-version:
- 18 - 18
- 16 - 16
- 14
steps: steps:
- name: Checkout - name: Checkout

Wyświetl plik

@ -11,6 +11,7 @@
### Methods ### Methods
- [ensureAuth](ChatGPTAPI.md#ensureauth) - [ensureAuth](ChatGPTAPI.md#ensureauth)
- [getConversation](ChatGPTAPI.md#getconversation)
- [getIsAuthenticated](ChatGPTAPI.md#getisauthenticated) - [getIsAuthenticated](ChatGPTAPI.md#getisauthenticated)
- [refreshAccessToken](ChatGPTAPI.md#refreshaccesstoken) - [refreshAccessToken](ChatGPTAPI.md#refreshaccesstoken)
- [sendMessage](ChatGPTAPI.md#sendmessage) - [sendMessage](ChatGPTAPI.md#sendmessage)
@ -28,6 +29,7 @@ Creates a new client wrapper around the unofficial ChatGPT REST API.
| Name | Type | Description | | Name | Type | Description |
| :------ | :------ | :------ | | :------ | :------ | :------ |
| `opts` | `Object` | - | | `opts` | `Object` | - |
| `opts.accessTokenTTL?` | `number` | **`Default Value`** 60000 (60 seconds) |
| `opts.apiBaseUrl?` | `string` | **`Default Value`** `'https://chat.openai.com/api'` * | | `opts.apiBaseUrl?` | `string` | **`Default Value`** `'https://chat.openai.com/api'` * |
| `opts.backendApiBaseUrl?` | `string` | **`Default Value`** `'https://chat.openai.com/backend-api'` * | | `opts.backendApiBaseUrl?` | `string` | **`Default Value`** `'https://chat.openai.com/backend-api'` * |
| `opts.markdown?` | `boolean` | **`Default Value`** `true` * | | `opts.markdown?` | `boolean` | **`Default Value`** `true` * |
@ -36,7 +38,7 @@ Creates a new client wrapper around the unofficial ChatGPT REST API.
#### Defined in #### Defined in
[chatgpt-api.ts:31](https://github.com/transitive-bullshit/chatgpt-api/blob/80d77a3/src/chatgpt-api.ts#L31) [chatgpt-api.ts:35](https://github.com/transitive-bullshit/chatgpt-api/blob/042b2fe/src/chatgpt-api.ts#L35)
## Methods ## Methods
@ -44,13 +46,43 @@ Creates a new client wrapper around the unofficial ChatGPT REST API.
**ensureAuth**(): `Promise`<`string`\> **ensureAuth**(): `Promise`<`string`\>
Refreshes the client's access token which will succeed only if the session
is still valid.
#### Returns #### Returns
`Promise`<`string`\> `Promise`<`string`\>
#### Defined in #### Defined in
[chatgpt-api.ts:74](https://github.com/transitive-bullshit/chatgpt-api/blob/80d77a3/src/chatgpt-api.ts#L74) [chatgpt-api.ts:221](https://github.com/transitive-bullshit/chatgpt-api/blob/042b2fe/src/chatgpt-api.ts#L221)
___
### getConversation
**getConversation**(`opts?`): [`ChatGPTConversation`](ChatGPTConversation.md)
Gets a new ChatGPTConversation instance, which can be used to send multiple
messages as part of a single conversation.
#### Parameters
| Name | Type | Description |
| :------ | :------ | :------ |
| `opts` | `Object` | - |
| `opts.conversationId?` | `string` | Optional ID of the previous message in a conversation |
| `opts.parentMessageId?` | `string` | Optional ID of the previous message in a conversation |
#### Returns
[`ChatGPTConversation`](ChatGPTConversation.md)
The new conversation instance
#### Defined in
[chatgpt-api.ts:285](https://github.com/transitive-bullshit/chatgpt-api/blob/042b2fe/src/chatgpt-api.ts#L285)
___ ___
@ -62,9 +94,12 @@ ___
`Promise`<`boolean`\> `Promise`<`boolean`\>
`true` if the client has a valid acces token or `false` if refreshing
the token fails.
#### Defined in #### Defined in
[chatgpt-api.ts:65](https://github.com/transitive-bullshit/chatgpt-api/blob/80d77a3/src/chatgpt-api.ts#L65) [chatgpt-api.ts:208](https://github.com/transitive-bullshit/chatgpt-api/blob/042b2fe/src/chatgpt-api.ts#L208)
___ ___
@ -72,13 +107,25 @@ ___
**refreshAccessToken**(): `Promise`<`string`\> **refreshAccessToken**(): `Promise`<`string`\>
Attempts to refresh the current access token using the ChatGPT
`sessionToken` cookie.
Access tokens will be cached for up to `accessTokenTTL` milliseconds to
prevent refreshing access tokens too frequently.
**`Throws`**
An error if refreshing the access token fails.
#### Returns #### Returns
`Promise`<`string`\> `Promise`<`string`\>
A valid access token
#### Defined in #### Defined in
[chatgpt-api.ts:165](https://github.com/transitive-bullshit/chatgpt-api/blob/80d77a3/src/chatgpt-api.ts#L165) [chatgpt-api.ts:235](https://github.com/transitive-bullshit/chatgpt-api/blob/042b2fe/src/chatgpt-api.ts#L235)
___ ___
@ -89,19 +136,24 @@ ___
Sends a message to ChatGPT, waits for the response to resolve, and returns Sends a message to ChatGPT, waits for the response to resolve, and returns
the response. the response.
If you want to receive a stream of partial responses, use `opts.onProgress`.
If you want to receive the full response, including message and conversation IDs,
you can use `opts.onConversationResponse` or use the `ChatGPTAPI.getConversation`
helper.
#### Parameters #### Parameters
| Name | Type | Description | | Name | Type | Description |
| :------ | :------ | :------ | | :------ | :------ | :------ |
| `message` | `string` | The plaintext message to send. | | `message` | `string` | The prompt message to send |
| `opts` | `Object` | - | | `opts` | [`SendMessageOptions`](../modules.md#sendmessageoptions) | - |
| `opts.conversationId?` | `string` | Optional ID of the previous message in a conversation |
| `opts.onProgress?` | (`partialResponse`: `string`) => `void` | - |
#### Returns #### Returns
`Promise`<`string`\> `Promise`<`string`\>
The response from ChatGPT
#### Defined in #### Defined in
[chatgpt-api.ts:86](https://github.com/transitive-bullshit/chatgpt-api/blob/80d77a3/src/chatgpt-api.ts#L86) [chatgpt-api.ts:94](https://github.com/transitive-bullshit/chatgpt-api/blob/042b2fe/src/chatgpt-api.ts#L94)

Wyświetl plik

@ -0,0 +1,107 @@
[chatgpt](../readme.md) / [Exports](../modules.md) / ChatGPTConversation
# Class: ChatGPTConversation
A conversation wrapper around the ChatGPTAPI. This allows you to send
multiple messages to ChatGPT and receive responses, without having to
manually pass the conversation ID and parent message ID for each message.
## Table of contents
### Constructors
- [constructor](ChatGPTConversation.md#constructor)
### Properties
- [api](ChatGPTConversation.md#api)
- [conversationId](ChatGPTConversation.md#conversationid)
- [parentMessageId](ChatGPTConversation.md#parentmessageid)
### Methods
- [sendMessage](ChatGPTConversation.md#sendmessage)
## Constructors
### constructor
**new ChatGPTConversation**(`api`, `opts?`)
Creates a new conversation wrapper around the ChatGPT API.
#### Parameters
| Name | Type | Description |
| :------ | :------ | :------ |
| `api` | [`ChatGPTAPI`](ChatGPTAPI.md) | The ChatGPT API instance to use |
| `opts` | `Object` | - |
| `opts.conversationId?` | `string` | Optional ID of a conversation to continue |
| `opts.parentMessageId?` | `string` | Optional ID of the previous message in the conversation |
#### Defined in
[chatgpt-conversation.ts:21](https://github.com/transitive-bullshit/chatgpt-api/blob/042b2fe/src/chatgpt-conversation.ts#L21)
## Properties
### api
**api**: [`ChatGPTAPI`](ChatGPTAPI.md)
#### Defined in
[chatgpt-conversation.ts:10](https://github.com/transitive-bullshit/chatgpt-api/blob/042b2fe/src/chatgpt-conversation.ts#L10)
___
### conversationId
**conversationId**: `string` = `undefined`
#### Defined in
[chatgpt-conversation.ts:11](https://github.com/transitive-bullshit/chatgpt-api/blob/042b2fe/src/chatgpt-conversation.ts#L11)
___
### parentMessageId
**parentMessageId**: `string` = `undefined`
#### Defined in
[chatgpt-conversation.ts:12](https://github.com/transitive-bullshit/chatgpt-api/blob/042b2fe/src/chatgpt-conversation.ts#L12)
## Methods
### sendMessage
**sendMessage**(`message`, `opts?`): `Promise`<`string`\>
Sends a message to ChatGPT, waits for the response to resolve, and returns
the response.
If this is the first message in the conversation, the conversation ID and
parent message ID will be automatically set.
This allows you to send multiple messages to ChatGPT and receive responses,
without having to manually pass the conversation ID and parent message ID
for each message.
#### Parameters
| Name | Type | Description |
| :------ | :------ | :------ |
| `message` | `string` | The prompt message to send |
| `opts` | [`SendConversationMessageOptions`](../modules.md#sendconversationmessageoptions) | - |
#### Returns
`Promise`<`string`\>
The response from ChatGPT
#### Defined in
[chatgpt-conversation.ts:48](https://github.com/transitive-bullshit/chatgpt-api/blob/042b2fe/src/chatgpt-conversation.ts#L48)

Wyświetl plik

@ -7,6 +7,7 @@
### Classes ### Classes
- [ChatGPTAPI](classes/ChatGPTAPI.md) - [ChatGPTAPI](classes/ChatGPTAPI.md)
- [ChatGPTConversation](classes/ChatGPTConversation.md)
### Type Aliases ### Type Aliases
@ -28,6 +29,8 @@
- [Prompt](modules.md#prompt) - [Prompt](modules.md#prompt)
- [PromptContent](modules.md#promptcontent) - [PromptContent](modules.md#promptcontent)
- [Role](modules.md#role) - [Role](modules.md#role)
- [SendConversationMessageOptions](modules.md#sendconversationmessageoptions)
- [SendMessageOptions](modules.md#sendmessageoptions)
- [SessionResult](modules.md#sessionresult) - [SessionResult](modules.md#sessionresult)
- [User](modules.md#user) - [User](modules.md#user)
@ -43,7 +46,7 @@
#### Defined in #### Defined in
[types.ts:109](https://github.com/transitive-bullshit/chatgpt-api/blob/80d77a3/src/types.ts#L109) [types.ts:109](https://github.com/transitive-bullshit/chatgpt-api/blob/042b2fe/src/types.ts#L109)
___ ___
@ -53,7 +56,7 @@ ___
#### Defined in #### Defined in
[types.ts:1](https://github.com/transitive-bullshit/chatgpt-api/blob/80d77a3/src/types.ts#L1) [types.ts:1](https://github.com/transitive-bullshit/chatgpt-api/blob/042b2fe/src/types.ts#L1)
___ ___
@ -75,7 +78,7 @@ https://chat.openapi.com/backend-api/conversation
#### Defined in #### Defined in
[types.ts:134](https://github.com/transitive-bullshit/chatgpt-api/blob/80d77a3/src/types.ts#L134) [types.ts:134](https://github.com/transitive-bullshit/chatgpt-api/blob/042b2fe/src/types.ts#L134)
___ ___
@ -93,7 +96,7 @@ ___
#### Defined in #### Defined in
[types.ts:251](https://github.com/transitive-bullshit/chatgpt-api/blob/80d77a3/src/types.ts#L251) [types.ts:251](https://github.com/transitive-bullshit/chatgpt-api/blob/042b2fe/src/types.ts#L251)
___ ___
@ -118,7 +121,7 @@ ___
#### Defined in #### Defined in
[types.ts:257](https://github.com/transitive-bullshit/chatgpt-api/blob/80d77a3/src/types.ts#L257) [types.ts:257](https://github.com/transitive-bullshit/chatgpt-api/blob/042b2fe/src/types.ts#L257)
___ ___
@ -135,7 +138,7 @@ ___
#### Defined in #### Defined in
[types.ts:270](https://github.com/transitive-bullshit/chatgpt-api/blob/80d77a3/src/types.ts#L270) [types.ts:270](https://github.com/transitive-bullshit/chatgpt-api/blob/042b2fe/src/types.ts#L270)
___ ___
@ -157,7 +160,7 @@ https://chat.openapi.com/backend-api/conversation/message_feedback
#### Defined in #### Defined in
[types.ts:193](https://github.com/transitive-bullshit/chatgpt-api/blob/80d77a3/src/types.ts#L193) [types.ts:193](https://github.com/transitive-bullshit/chatgpt-api/blob/042b2fe/src/types.ts#L193)
___ ___
@ -167,7 +170,7 @@ ___
#### Defined in #### Defined in
[types.ts:249](https://github.com/transitive-bullshit/chatgpt-api/blob/80d77a3/src/types.ts#L249) [types.ts:249](https://github.com/transitive-bullshit/chatgpt-api/blob/042b2fe/src/types.ts#L249)
___ ___
@ -187,7 +190,7 @@ ___
#### Defined in #### Defined in
[types.ts:222](https://github.com/transitive-bullshit/chatgpt-api/blob/80d77a3/src/types.ts#L222) [types.ts:222](https://github.com/transitive-bullshit/chatgpt-api/blob/042b2fe/src/types.ts#L222)
___ ___
@ -197,7 +200,7 @@ ___
#### Defined in #### Defined in
[types.ts:220](https://github.com/transitive-bullshit/chatgpt-api/blob/80d77a3/src/types.ts#L220) [types.ts:220](https://github.com/transitive-bullshit/chatgpt-api/blob/042b2fe/src/types.ts#L220)
___ ___
@ -207,7 +210,7 @@ ___
#### Defined in #### Defined in
[types.ts:275](https://github.com/transitive-bullshit/chatgpt-api/blob/80d77a3/src/types.ts#L275) [types.ts:275](https://github.com/transitive-bullshit/chatgpt-api/blob/042b2fe/src/types.ts#L275)
___ ___
@ -225,7 +228,7 @@ ___
#### Defined in #### Defined in
[types.ts:77](https://github.com/transitive-bullshit/chatgpt-api/blob/80d77a3/src/types.ts#L77) [types.ts:77](https://github.com/transitive-bullshit/chatgpt-api/blob/042b2fe/src/types.ts#L77)
___ ___
@ -243,7 +246,7 @@ https://chat.openapi.com/backend-api/models
#### Defined in #### Defined in
[types.ts:70](https://github.com/transitive-bullshit/chatgpt-api/blob/80d77a3/src/types.ts#L70) [types.ts:70](https://github.com/transitive-bullshit/chatgpt-api/blob/042b2fe/src/types.ts#L70)
___ ___
@ -262,7 +265,7 @@ https://chat.openapi.com/backend-api/moderations
#### Defined in #### Defined in
[types.ts:97](https://github.com/transitive-bullshit/chatgpt-api/blob/80d77a3/src/types.ts#L97) [types.ts:97](https://github.com/transitive-bullshit/chatgpt-api/blob/042b2fe/src/types.ts#L97)
___ ___
@ -282,7 +285,7 @@ https://chat.openapi.com/backend-api/moderations
#### Defined in #### Defined in
[types.ts:114](https://github.com/transitive-bullshit/chatgpt-api/blob/80d77a3/src/types.ts#L114) [types.ts:114](https://github.com/transitive-bullshit/chatgpt-api/blob/042b2fe/src/types.ts#L114)
___ ___
@ -300,7 +303,7 @@ ___
#### Defined in #### Defined in
[types.ts:161](https://github.com/transitive-bullshit/chatgpt-api/blob/80d77a3/src/types.ts#L161) [types.ts:161](https://github.com/transitive-bullshit/chatgpt-api/blob/042b2fe/src/types.ts#L161)
___ ___
@ -317,7 +320,7 @@ ___
#### Defined in #### Defined in
[types.ts:178](https://github.com/transitive-bullshit/chatgpt-api/blob/80d77a3/src/types.ts#L178) [types.ts:178](https://github.com/transitive-bullshit/chatgpt-api/blob/042b2fe/src/types.ts#L178)
___ ___
@ -327,7 +330,38 @@ ___
#### Defined in #### Defined in
[types.ts:3](https://github.com/transitive-bullshit/chatgpt-api/blob/80d77a3/src/types.ts#L3) [types.ts:3](https://github.com/transitive-bullshit/chatgpt-api/blob/042b2fe/src/types.ts#L3)
___
### SendConversationMessageOptions
Ƭ **SendConversationMessageOptions**: `Omit`<[`SendMessageOptions`](modules.md#sendmessageoptions), ``"conversationId"`` \| ``"parentMessageId"``\>
#### Defined in
[types.ts:286](https://github.com/transitive-bullshit/chatgpt-api/blob/042b2fe/src/types.ts#L286)
___
### SendMessageOptions
Ƭ **SendMessageOptions**: `Object`
#### Type declaration
| Name | Type |
| :------ | :------ |
| `abortSignal?` | `AbortSignal` |
| `conversationId?` | `string` |
| `onConversationResponse?` | (`response`: [`ConversationResponseEvent`](modules.md#conversationresponseevent)) => `void` |
| `onProgress?` | (`partialResponse`: `string`) => `void` |
| `parentMessageId?` | `string` |
| `timeoutMs?` | `number` |
#### Defined in
[types.ts:277](https://github.com/transitive-bullshit/chatgpt-api/blob/042b2fe/src/types.ts#L277)
___ ___
@ -348,7 +382,7 @@ https://chat.openapi.com/api/auth/session
#### Defined in #### Defined in
[types.ts:8](https://github.com/transitive-bullshit/chatgpt-api/blob/80d77a3/src/types.ts#L8) [types.ts:8](https://github.com/transitive-bullshit/chatgpt-api/blob/042b2fe/src/types.ts#L8)
___ ___
@ -370,7 +404,7 @@ ___
#### Defined in #### Defined in
[types.ts:30](https://github.com/transitive-bullshit/chatgpt-api/blob/80d77a3/src/types.ts#L30) [types.ts:30](https://github.com/transitive-bullshit/chatgpt-api/blob/042b2fe/src/types.ts#L30)
## Functions ## Functions
@ -390,4 +424,4 @@ ___
#### Defined in #### Defined in
[utils.ts:4](https://github.com/transitive-bullshit/chatgpt-api/blob/80d77a3/src/utils.ts#L4) [utils.ts:4](https://github.com/transitive-bullshit/chatgpt-api/blob/042b2fe/src/utils.ts#L4)

Wyświetl plik

@ -14,9 +14,11 @@ chatgpt / [Exports](modules.md)
- [Install](#install) - [Install](#install)
- [Usage](#usage) - [Usage](#usage)
- [Docs](#docs) - [Docs](#docs)
- [How it works](#how-it-works) - [Demos](#demos)
- [Examples](#examples) - [Session Tokens](#session-tokens)
- [Credit](#credit) - [Projects](#projects)
- [Compatibility](#compatibility)
- [Credits](#credits)
- [License](#license) - [License](#license)
## Intro ## Intro
@ -38,7 +40,9 @@ import { ChatGPTAPI } from 'chatgpt'
async function example() { async function example() {
// sessionToken is required; see below for details // sessionToken is required; see below for details
const api = new ChatGPTAPI({ sessionToken: process.env.SESSION_TOKEN }) const api = new ChatGPTAPI({
sessionToken: process.env.SESSION_TOKEN
})
// ensure the API is properly authenticated // ensure the API is properly authenticated
await api.ensureAuth() await api.ensureAuth()
@ -53,7 +57,7 @@ async function example() {
} }
``` ```
By default, the response will be formatted as markdown. If you want to work with plaintext only, you can use: The default ChatGPT responses are formatted as markdown. If you want to work with plaintext only, you can use:
```ts ```ts
const api = new ChatGPTAPI({ const api = new ChatGPTAPI({
@ -62,7 +66,63 @@ const api = new ChatGPTAPI({
}) })
``` ```
A full [demo](./src/demo.ts) is included for testing purposes: If you want to automatically track the conversation, you can use `ChatGPTAPI.getConversation()`:
```ts
const api = new ChatGPTAPI({
sessionToken: process.env.SESSION_TOKEN
})
const conversation = api.getConversation()
// send a message and wait for the response
const response0 = await conversation.sendMessage('What is OpenAI?')
// send a follow-up prompt to the previous message and wait for the response
const response1 = await conversation.sendMessage('Can you expand on that?')
// send another follow-up to the same conversation
const response2 = await conversation.sendMessage('Oh cool; thank you')
```
Sometimes, ChatGPT will hang for an extended period of time before sending it's response. This may be due to rate limiting or it may be due to OpenAI's servers being overloaded.
To mitigate this issues, you can add a timeout like this:
```ts
// timeout after 2 minutes (which will also abort the underlying HTTP request)
const response = await api.sendMessage('this is a timeout test', {
timeoutMs: 2 * 60 * 1000
})
```
<details>
<summary>Usage in CommonJS (Dynamic import)</summary>
```js
async function example() {
// To use ESM in CommonJS, you can use a dynamic import
const { ChatGPTAPI } = await import('chatgpt')
const api = new ChatGPTAPI({
sessionToken: process.env.SESSION_TOKEN
})
await api.ensureAuth()
const response = await api.sendMessage('Hello World!')
console.log(response)
}
```
</details>
### Docs
See the [auto-generated docs](./docs/classes/ChatGPTAPI.md) for more info on methods and parameters.
### Demos
A [basic demo](./src/demo.ts) is included for testing purposes:
```bash ```bash
# 1. clone repo # 1. clone repo
@ -72,11 +132,17 @@ A full [demo](./src/demo.ts) is included for testing purposes:
npx tsx src/demo.ts npx tsx src/demo.ts
``` ```
## Docs A [conversation demo](./src/demo-conversation.ts) is also included:
See the [auto-generated docs](./docs/classes/ChatGPTAPI.md) for more info on methods and parameters. ```bash
# 1. clone repo
# 2. install node deps
# 3. set `SESSION_TOKEN` in .env
# 4. run:
npx tsx src/demo-conversation.ts
```
## How it works ### Session Tokens
**This package requires a valid session token from ChatGPT to access it's unofficial REST API.** **This package requires a valid session token from ChatGPT to access it's unofficial REST API.**
@ -96,24 +162,40 @@ If you want to run the built-in demo, store this value as `SESSION_TOKEN` in a l
> **Note** > **Note**
> Prior to v1.0.0, this package used a headless browser via [Playwright](https://playwright.dev/) to automate the web UI. Here are the [docs for the initial browser version](https://github.com/transitive-bullshit/chatgpt-api/tree/v0.4.2). > Prior to v1.0.0, this package used a headless browser via [Playwright](https://playwright.dev/) to automate the web UI. Here are the [docs for the initial browser version](https://github.com/transitive-bullshit/chatgpt-api/tree/v0.4.2).
## Examples ## Projects
All of these awesome projects are built using the `chatgpt` package. 🤯 All of these awesome projects are built using the `chatgpt` package. 🤯
- [Twitter Bot](https://github.com/transitive-bullshit/chatgpt-twitter-bot) powered by ChatGPT ✨ - [Twitter Bot](https://github.com/transitive-bullshit/chatgpt-twitter-bot) powered by ChatGPT ✨
- Mention [@ChatGPTBot](https://twitter.com/ChatGPTBot) on Twitter with your prompt to try it out - Mention [@ChatGPTBot](https://twitter.com/ChatGPTBot) on Twitter with your prompt to try it out
- [Chrome Extension](https://github.com/gragland/chatgpt-everywhere) ([demo](https://twitter.com/gabe_ragland/status/1599466486422470656)) - [Chrome Extension](https://github.com/gragland/chatgpt-everywhere) ([demo](https://twitter.com/gabe_ragland/status/1599466486422470656))
- [VSCode Extension](https://github.com/mpociot/chatgpt-vscode) ([demo](https://twitter.com/marcelpociot/status/1599180144551526400)) - [VSCode Extension #1](https://github.com/mpociot/chatgpt-vscode) ([demo](https://twitter.com/marcelpociot/status/1599180144551526400))
- [VSCode Extension #2](https://github.com/barnesoir/chatgpt-vscode-plugin) ([marketplace](https://marketplace.visualstudio.com/items?itemName=JayBarnes.chatgpt-vscode-plugin))
- [VSCode Extension #3](https://github.com/gencay/vscode-chatgpt) ([marketplace](https://marketplace.visualstudio.com/items?itemName=gencay.vscode-chatgpt))
- [Raycast Extension](https://github.com/abielzulio/chatgpt-raycast) ([demo](https://twitter.com/abielzulio/status/1600176002042191875))
- [Go Telegram Bot](https://github.com/m1guelpf/chatgpt-telegram) - [Go Telegram Bot](https://github.com/m1guelpf/chatgpt-telegram)
- [GitHub ProBot](https://github.com/oceanlvr/ChatGPTBot) - [GitHub ProBot](https://github.com/oceanlvr/ChatGPTBot)
- [Discord Bot](https://github.com/onury5506/Discord-ChatGPT-Bot) - [Discord Bot](https://github.com/onury5506/Discord-ChatGPT-Bot)
- [WeChat Bot](https://github.com/AutumnWhj/ChatGPT-wechat-bot) - [WeChat Bot](https://github.com/AutumnWhj/ChatGPT-wechat-bot)
- [Lovelines.xyz](https://lovelines.xyz) - [Lovelines.xyz](https://lovelines.xyz)
- [EXM smart contracts](https://github.com/decentldotland/molecule) - [EXM smart contracts](https://github.com/decentldotland/molecule)
- [Flutter ChatGPT API](https://github.com/coskuncay/flutter_chatgpt_api)
If you create a cool integration, feel free to open a PR and add it to the list. If you create a cool integration, feel free to open a PR and add it to the list.
## Credit ## Compatibility
This package is ESM-only. It supports:
- Node.js >= 16.8
- If you need Node.js 14 support, use [`v1.4.0`](https://github.com/transitive-bullshit/chatgpt-api/releases/tag/v1.4.0)
- Edge runtimes like CF workers and Vercel edge functions
- Modern browsers
- Mainly chrome extensions where your code is protected to a degree
- **We do not recommend using `chatgpt` from client-side browser code** because it would expose your private session token
- If you want to build a website using `chatgpt`, we recommend using it only from your backend API
## Credits
- Huge thanks to [@RomanHotsiy](https://github.com/RomanHotsiy), [@ElijahPepe](https://github.com/ElijahPepe), and all the other contributors 💪 - Huge thanks to [@RomanHotsiy](https://github.com/RomanHotsiy), [@ElijahPepe](https://github.com/ElijahPepe), and all the other contributors 💪
- The original browser version was inspired by this [Go module](https://github.com/danielgross/whatsapp-gpt) by [Daniel Gross](https://github.com/danielgross) - The original browser version was inspired by this [Go module](https://github.com/danielgross/whatsapp-gpt) by [Daniel Gross](https://github.com/danielgross)
@ -123,4 +205,4 @@ If you create a cool integration, feel free to open a PR and add it to the list.
MIT © [Travis Fischer](https://transitivebullsh.it) MIT © [Travis Fischer](https://transitivebullsh.it)
If you found this project interesting, please consider supporting my open source work by [sponsoring me](https://github.com/sponsors/transitive-bullshit) or <a href="https://twitter.com/transitive_bs">following me on twitter <img src="https://storage.googleapis.com/saasify-assets/twitter-logo.svg" alt="twitter" height="24px" align="center"></a> If you found this project interesting, please consider [sponsoring me](https://github.com/sponsors/transitive-bullshit) or <a href="https://twitter.com/transitive_bs">following me on twitter <img src="https://storage.googleapis.com/saasify-assets/twitter-logo.svg" alt="twitter" height="24px" align="center"></a>

Wyświetl plik

@ -10,6 +10,7 @@
"types": "./build/index.d.ts", "types": "./build/index.d.ts",
"exports": { "exports": {
".": { ".": {
"browser": "./build/browser/index.js",
"import": "./build/index.js", "import": "./build/index.js",
"types": "./build/index.d.ts", "types": "./build/index.d.ts",
"default": "./build/index.js" "default": "./build/index.js"
@ -19,13 +20,14 @@
"build" "build"
], ],
"engines": { "engines": {
"node": ">=14" "node": ">=16.8"
}, },
"scripts": { "scripts": {
"build": "tsup", "build": "tsup",
"dev": "tsup --watch", "dev": "tsup --watch",
"clean": "del build", "clean": "del build",
"prebuild": "run-s clean", "prebuild": "run-s clean",
"postbuild": "[ -n CI ] && sed -i '' 's/ *\\?\\? *(await import(\"undici\")).fetch//' build/browser/index.js || echo 'skipping postbuild on CI'",
"predev": "run-s clean", "predev": "run-s clean",
"pretest": "run-s build", "pretest": "run-s build",
"docs": "typedoc", "docs": "typedoc",
@ -38,7 +40,7 @@
"dependencies": { "dependencies": {
"eventsource-parser": "^0.0.5", "eventsource-parser": "^0.0.5",
"expiry-map": "^2.0.0", "expiry-map": "^2.0.0",
"node-fetch": "2", "p-timeout": "^6.0.0",
"remark": "^14.0.2", "remark": "^14.0.2",
"strip-markdown": "^5.0.0", "strip-markdown": "^5.0.0",
"uuid": "^9.0.0" "uuid": "^9.0.0"
@ -46,7 +48,6 @@
"devDependencies": { "devDependencies": {
"@trivago/prettier-plugin-sort-imports": "^4.0.0", "@trivago/prettier-plugin-sort-imports": "^4.0.0",
"@types/node": "^18.11.9", "@types/node": "^18.11.9",
"@types/node-fetch": "2",
"@types/uuid": "^9.0.0", "@types/uuid": "^9.0.0",
"ava": "^5.1.0", "ava": "^5.1.0",
"del-cli": "^5.0.0", "del-cli": "^5.0.0",
@ -62,6 +63,9 @@
"typedoc-plugin-markdown": "^3.13.6", "typedoc-plugin-markdown": "^3.13.6",
"typescript": "^4.9.3" "typescript": "^4.9.3"
}, },
"optionalDependencies": {
"undici": "^5.13.0"
},
"lint-staged": { "lint-staged": {
"*.{ts,tsx}": [ "*.{ts,tsx}": [
"prettier --write" "prettier --write"

Wyświetl plik

@ -3,7 +3,6 @@ lockfileVersion: 5.4
specifiers: specifiers:
'@trivago/prettier-plugin-sort-imports': ^4.0.0 '@trivago/prettier-plugin-sort-imports': ^4.0.0
'@types/node': ^18.11.9 '@types/node': ^18.11.9
'@types/node-fetch': '2'
'@types/uuid': ^9.0.0 '@types/uuid': ^9.0.0
ava: ^5.1.0 ava: ^5.1.0
del-cli: ^5.0.0 del-cli: ^5.0.0
@ -12,9 +11,9 @@ specifiers:
expiry-map: ^2.0.0 expiry-map: ^2.0.0
husky: ^8.0.2 husky: ^8.0.2
lint-staged: ^13.0.3 lint-staged: ^13.0.3
node-fetch: '2'
npm-run-all: ^4.1.5 npm-run-all: ^4.1.5
ora: ^6.1.2 ora: ^6.1.2
p-timeout: ^6.0.0
prettier: ^2.8.0 prettier: ^2.8.0
remark: ^14.0.2 remark: ^14.0.2
strip-markdown: ^5.0.0 strip-markdown: ^5.0.0
@ -23,20 +22,23 @@ specifiers:
typedoc: ^0.23.21 typedoc: ^0.23.21
typedoc-plugin-markdown: ^3.13.6 typedoc-plugin-markdown: ^3.13.6
typescript: ^4.9.3 typescript: ^4.9.3
undici: ^5.13.0
uuid: ^9.0.0 uuid: ^9.0.0
dependencies: dependencies:
eventsource-parser: 0.0.5 eventsource-parser: 0.0.5
expiry-map: 2.0.0 expiry-map: 2.0.0
node-fetch: 2.6.7 p-timeout: 6.0.0
remark: 14.0.2 remark: 14.0.2
strip-markdown: 5.0.0 strip-markdown: 5.0.0
uuid: 9.0.0 uuid: 9.0.0
optionalDependencies:
undici: 5.13.0
devDependencies: devDependencies:
'@trivago/prettier-plugin-sort-imports': 4.0.0_prettier@2.8.0 '@trivago/prettier-plugin-sort-imports': 4.0.0_prettier@2.8.0
'@types/node': 18.11.10 '@types/node': 18.11.10
'@types/node-fetch': 2.6.2
'@types/uuid': 9.0.0 '@types/uuid': 9.0.0
ava: 5.1.0 ava: 5.1.0
del-cli: 5.0.0 del-cli: 5.0.0
@ -434,13 +436,6 @@ packages:
resolution: {integrity: sha512-iiUgKzV9AuaEkZqkOLDIvlQiL6ltuZd9tGcW3gwpnX8JbuiuhFlEGmmFXEXkN50Cvq7Os88IY2v0dkDqXYWVgA==} resolution: {integrity: sha512-iiUgKzV9AuaEkZqkOLDIvlQiL6ltuZd9tGcW3gwpnX8JbuiuhFlEGmmFXEXkN50Cvq7Os88IY2v0dkDqXYWVgA==}
dev: false dev: false
/@types/node-fetch/2.6.2:
resolution: {integrity: sha512-DHqhlq5jeESLy19TYhLakJ07kNumXWjcDdxXsLUMJZ6ue8VZJj4kLPQVE/2mdHh3xZziNF1xppu5lwmS53HR+A==}
dependencies:
'@types/node': 18.11.10
form-data: 3.0.1
dev: true
/@types/node/18.11.10: /@types/node/18.11.10:
resolution: {integrity: sha512-juG3RWMBOqcOuXC643OAdSA525V44cVgGV6dUDuiFtss+8Fk5x1hI93Rsld43VeJVIeqlP9I7Fn9/qaVqoEAuQ==} resolution: {integrity: sha512-juG3RWMBOqcOuXC643OAdSA525V44cVgGV6dUDuiFtss+8Fk5x1hI93Rsld43VeJVIeqlP9I7Fn9/qaVqoEAuQ==}
dev: true dev: true
@ -568,10 +563,6 @@ packages:
engines: {node: '>=8'} engines: {node: '>=8'}
dev: true dev: true
/asynckit/0.4.0:
resolution: {integrity: sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q==}
dev: true
/ava/5.1.0: /ava/5.1.0:
resolution: {integrity: sha512-e5VFrSQ0WBPyZJWRXVrO7RFOizFeNM0t2PORwrPvWtApgkORI6cvGnY3GX1G+lzpd0HjqNx5Jus22AhxVnUMNA==} resolution: {integrity: sha512-e5VFrSQ0WBPyZJWRXVrO7RFOizFeNM0t2PORwrPvWtApgkORI6cvGnY3GX1G+lzpd0HjqNx5Jus22AhxVnUMNA==}
engines: {node: '>=14.19 <15 || >=16.15 <17 || >=18'} engines: {node: '>=14.19 <15 || >=16.15 <17 || >=18'}
@ -712,6 +703,14 @@ packages:
load-tsconfig: 0.2.3 load-tsconfig: 0.2.3
dev: true dev: true
/busboy/1.6.0:
resolution: {integrity: sha512-8SFQbg/0hQ9xy3UNTB0YEnsNBbWfhf7RtnzpL7TkBiTBRfrQ9Fxcnz7VJsleJpyp6rVLvXiuORqjlHi5q+PYuA==}
engines: {node: '>=10.16.0'}
dependencies:
streamsearch: 1.1.0
dev: false
optional: true
/cac/6.7.14: /cac/6.7.14:
resolution: {integrity: sha512-b6Ilus+c3RrdDk+JhLKUAQfzzgLEPy6wcXqS7f/xe1EETvsDP6GORG7SFuOs6cID5YkqchW/LXZbX5bc8j7ZcQ==} resolution: {integrity: sha512-b6Ilus+c3RrdDk+JhLKUAQfzzgLEPy6wcXqS7f/xe1EETvsDP6GORG7SFuOs6cID5YkqchW/LXZbX5bc8j7ZcQ==}
engines: {node: '>=8'} engines: {node: '>=8'}
@ -899,13 +898,6 @@ packages:
resolution: {integrity: sha512-3tlv/dIP7FWvj3BsbHrGLJ6l/oKh1O3TcgBqMn+yyCagOxc23fyzDS6HypQbgxWbkpDnf52p1LuR4eWDQ/K9WQ==} resolution: {integrity: sha512-3tlv/dIP7FWvj3BsbHrGLJ6l/oKh1O3TcgBqMn+yyCagOxc23fyzDS6HypQbgxWbkpDnf52p1LuR4eWDQ/K9WQ==}
dev: true dev: true
/combined-stream/1.0.8:
resolution: {integrity: sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg==}
engines: {node: '>= 0.8'}
dependencies:
delayed-stream: 1.0.0
dev: true
/commander/4.1.1: /commander/4.1.1:
resolution: {integrity: sha512-NOKm8xhkzAjzFx8B2v5OAHT+u5pRQc2UCa2Vq9jYL/31o2wi9mxBA7LIFs3sV5VSC49z6pEhfbMULvShKj26WA==} resolution: {integrity: sha512-NOKm8xhkzAjzFx8B2v5OAHT+u5pRQc2UCa2Vq9jYL/31o2wi9mxBA7LIFs3sV5VSC49z6pEhfbMULvShKj26WA==}
engines: {node: '>= 6'} engines: {node: '>= 6'}
@ -1053,11 +1045,6 @@ packages:
slash: 4.0.0 slash: 4.0.0
dev: true dev: true
/delayed-stream/1.0.0:
resolution: {integrity: sha512-ZySD7Nf91aLB0RxL4KGrKHBXl7Eds1DAmEdcoVawXnLD7SDhpNgtuII2aAkg7a7QS41jxPSZ17p4VdGnMHk3MQ==}
engines: {node: '>=0.4.0'}
dev: true
/dequal/2.0.3: /dequal/2.0.3:
resolution: {integrity: sha512-0je+qPKHEMohvfRTCEo3CrPG6cAzAYgmzKyxRiYSSDkS6eGJdyVJm7WaYA5ECaAD9wLB2T4EEeymA5aFVcYXCA==} resolution: {integrity: sha512-0je+qPKHEMohvfRTCEo3CrPG6cAzAYgmzKyxRiYSSDkS6eGJdyVJm7WaYA5ECaAD9wLB2T4EEeymA5aFVcYXCA==}
engines: {node: '>=6'} engines: {node: '>=6'}
@ -1491,15 +1478,6 @@ packages:
path-exists: 5.0.0 path-exists: 5.0.0
dev: true dev: true
/form-data/3.0.1:
resolution: {integrity: sha512-RHkBKtLWUVwd7SqRIvCZMEvAMoGUp0XU+seQiZejj0COz3RI3hWP4sCv3gZWWLjJTd7rGwcsF5eKZGii0r/hbg==}
engines: {node: '>= 6'}
dependencies:
asynckit: 0.4.0
combined-stream: 1.0.8
mime-types: 2.1.35
dev: true
/fs.realpath/1.0.0: /fs.realpath/1.0.0:
resolution: {integrity: sha512-OO0pH2lK6a0hZnAdau5ItzHPI6pUlvI7jMVnxUQRtw4owF2wk8lOSabtGDCTP4Ggrg2MbGnWO9X8K1t4+fGMDw==} resolution: {integrity: sha512-OO0pH2lK6a0hZnAdau5ItzHPI6pUlvI7jMVnxUQRtw4owF2wk8lOSabtGDCTP4Ggrg2MbGnWO9X8K1t4+fGMDw==}
dev: true dev: true
@ -2426,18 +2404,6 @@ packages:
picomatch: 2.3.1 picomatch: 2.3.1
dev: true dev: true
/mime-db/1.52.0:
resolution: {integrity: sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg==}
engines: {node: '>= 0.6'}
dev: true
/mime-types/2.1.35:
resolution: {integrity: sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==}
engines: {node: '>= 0.6'}
dependencies:
mime-db: 1.52.0
dev: true
/mimic-fn/2.1.0: /mimic-fn/2.1.0:
resolution: {integrity: sha512-OqbOk5oEQeAZ8WXWydlu9HJjz9WVdEIvamMCcXmuqUYjTknH/sqsWvhQ3vgwKFRR1HpjvNBKQ37nbJgYzGqGcg==} resolution: {integrity: sha512-OqbOk5oEQeAZ8WXWydlu9HJjz9WVdEIvamMCcXmuqUYjTknH/sqsWvhQ3vgwKFRR1HpjvNBKQ37nbJgYzGqGcg==}
engines: {node: '>=6'} engines: {node: '>=6'}
@ -2507,18 +2473,6 @@ packages:
resolution: {integrity: sha512-1nh45deeb5olNY7eX82BkPO7SSxR5SSYJiPTrTdFUVYwAl8CKMA5N9PjTYkHiRjisVcxcQ1HXdLhx2qxxJzLNQ==} resolution: {integrity: sha512-1nh45deeb5olNY7eX82BkPO7SSxR5SSYJiPTrTdFUVYwAl8CKMA5N9PjTYkHiRjisVcxcQ1HXdLhx2qxxJzLNQ==}
dev: true dev: true
/node-fetch/2.6.7:
resolution: {integrity: sha512-ZjMPFEfVx5j+y2yF35Kzx5sF7kDzxuDj6ziH4FFbOp87zKDZNx8yExJIb05OGF4Nlt9IHFIMBkRl41VdvcNdbQ==}
engines: {node: 4.x || >=6.0.0}
peerDependencies:
encoding: ^0.1.0
peerDependenciesMeta:
encoding:
optional: true
dependencies:
whatwg-url: 5.0.0
dev: false
/node-releases/2.0.6: /node-releases/2.0.6:
resolution: {integrity: sha512-PiVXnNuFm5+iYkLBNeq5211hvO38y63T0i2KKh2KnUs3RpzJ+JtODFjkD8yjLwnDkTYF1eKXheUwdssR+NRZdg==} resolution: {integrity: sha512-PiVXnNuFm5+iYkLBNeq5211hvO38y63T0i2KKh2KnUs3RpzJ+JtODFjkD8yjLwnDkTYF1eKXheUwdssR+NRZdg==}
dev: true dev: true
@ -2699,6 +2653,11 @@ packages:
engines: {node: '>=12'} engines: {node: '>=12'}
dev: true dev: true
/p-timeout/6.0.0:
resolution: {integrity: sha512-5iS61MOdUMemWH9CORQRxVXTp9g5K8rPnI9uQpo97aWgsH3vVXKjkIhDi+OgIDmN3Ly9+AZ2fZV01Wut1yzfKA==}
engines: {node: '>=14.16'}
dev: false
/parse-json/4.0.0: /parse-json/4.0.0:
resolution: {integrity: sha512-aOIos8bujGN93/8Ox/jPLh7RwVnPEysynVFE+fQZyg6jKELEHwzgKdLRFHUgXJL6kylijVSBC4BvN9OmsB48Rw==} resolution: {integrity: sha512-aOIos8bujGN93/8Ox/jPLh7RwVnPEysynVFE+fQZyg6jKELEHwzgKdLRFHUgXJL6kylijVSBC4BvN9OmsB48Rw==}
engines: {node: '>=4'} engines: {node: '>=4'}
@ -3210,6 +3169,12 @@ packages:
escape-string-regexp: 2.0.0 escape-string-regexp: 2.0.0
dev: true dev: true
/streamsearch/1.1.0:
resolution: {integrity: sha512-Mcc5wHehp9aXz1ax6bZUyY5afg9u2rv5cqQI3mRrYkGC8rW2hM02jWuwjtL++LS5qinSyhj2QfLyNsuc+VsExg==}
engines: {node: '>=10.0.0'}
dev: false
optional: true
/string-argv/0.3.1: /string-argv/0.3.1:
resolution: {integrity: sha512-a1uQGz7IyVy9YwhqjZIZu1c8JO8dNIe20xBmSS6qu9kv++k3JGzCVmprbNN5Kn+BgzD5E7YYwg1CcjuJMRNsvg==} resolution: {integrity: sha512-a1uQGz7IyVy9YwhqjZIZu1c8JO8dNIe20xBmSS6qu9kv++k3JGzCVmprbNN5Kn+BgzD5E7YYwg1CcjuJMRNsvg==}
engines: {node: '>=0.6.19'} engines: {node: '>=0.6.19'}
@ -3382,10 +3347,6 @@ packages:
is-number: 7.0.0 is-number: 7.0.0
dev: true dev: true
/tr46/0.0.3:
resolution: {integrity: sha512-N3WMsuqV66lT30CrXNbEjx4GEwlow3v6rr4mCcv6prnfwhS01rkgyFdjPNBYd9br7LpXV1+Emh01fHnq2Gdgrw==}
dev: false
/tr46/1.0.1: /tr46/1.0.1:
resolution: {integrity: sha512-dTpowEjclQ7Kgx5SdBkqRzVhERQXov8/l9Ft9dVM9fmg0W0KQSVaXX9T4i6twCPNtYiZM53lpSSUAwJbFPOHxA==} resolution: {integrity: sha512-dTpowEjclQ7Kgx5SdBkqRzVhERQXov8/l9Ft9dVM9fmg0W0KQSVaXX9T4i6twCPNtYiZM53lpSSUAwJbFPOHxA==}
dependencies: dependencies:
@ -3522,6 +3483,15 @@ packages:
which-boxed-primitive: 1.0.2 which-boxed-primitive: 1.0.2
dev: true dev: true
/undici/5.13.0:
resolution: {integrity: sha512-UDZKtwb2k7KRsK4SdXWG7ErXiL7yTGgLWvk2AXO1JMjgjh404nFo6tWSCM2xMpJwMPx3J8i/vfqEh1zOqvj82Q==}
engines: {node: '>=12.18'}
requiresBuild: true
dependencies:
busboy: 1.6.0
dev: false
optional: true
/unified/10.1.2: /unified/10.1.2:
resolution: {integrity: sha512-pUSWAi/RAnVy1Pif2kAoeWNBa3JVrx0MId2LASj8G+7AiHWoKZNTomq6LG326T68U7/e263X6fTdcXIy7XnF7Q==} resolution: {integrity: sha512-pUSWAi/RAnVy1Pif2kAoeWNBa3JVrx0MId2LASj8G+7AiHWoKZNTomq6LG326T68U7/e263X6fTdcXIy7XnF7Q==}
dependencies: dependencies:
@ -3627,10 +3597,6 @@ packages:
defaults: 1.0.4 defaults: 1.0.4
dev: true dev: true
/webidl-conversions/3.0.1:
resolution: {integrity: sha512-2JAn3z8AR6rjK8Sm8orRC0h/bcl/DqL7tRPdGZ4I1CjdF+EaMLmYxBHyXuKL849eucPFhvBoxMsflfOb8kxaeQ==}
dev: false
/webidl-conversions/4.0.2: /webidl-conversions/4.0.2:
resolution: {integrity: sha512-YQ+BmxuTgd6UXZW3+ICGfyqRyHXVlD5GtQr5+qjiNW7bF0cqrzX500HVXPBOvgXb5YnzDd+h0zqyv61KUD7+Sg==} resolution: {integrity: sha512-YQ+BmxuTgd6UXZW3+ICGfyqRyHXVlD5GtQr5+qjiNW7bF0cqrzX500HVXPBOvgXb5YnzDd+h0zqyv61KUD7+Sg==}
dev: true dev: true
@ -3640,13 +3606,6 @@ packages:
engines: {node: '>=6'} engines: {node: '>=6'}
dev: true dev: true
/whatwg-url/5.0.0:
resolution: {integrity: sha512-saE57nupxk6v3HY35+jzBwYa0rKSy0XR8JSxZPwgLr7ys0IBzhGviA1/TUGJLmSVqs8pb9AnvICXEuOHLprYTw==}
dependencies:
tr46: 0.0.3
webidl-conversions: 3.0.1
dev: false
/whatwg-url/7.1.0: /whatwg-url/7.1.0:
resolution: {integrity: sha512-WUu7Rg1DroM7oQvGWfOiAK21n74Gg+T4elXEQYkOhtyLeWiJFoOGLXPKI/9gzIie9CtwVLm8wtw6YJdKyxSjeg==} resolution: {integrity: sha512-WUu7Rg1DroM7oQvGWfOiAK21n74Gg+T4elXEQYkOhtyLeWiJFoOGLXPKI/9gzIie9CtwVLm8wtw6YJdKyxSjeg==}
dependencies: dependencies:

Wyświetl plik

@ -12,9 +12,11 @@
- [Install](#install) - [Install](#install)
- [Usage](#usage) - [Usage](#usage)
- [Docs](#docs) - [Docs](#docs)
- [How it works](#how-it-works) - [Demos](#demos)
- [Examples](#examples) - [Session Tokens](#session-tokens)
- [Credit](#credit) - [Projects](#projects)
- [Compatibility](#compatibility)
- [Credits](#credits)
- [License](#license) - [License](#license)
## Intro ## Intro
@ -36,7 +38,9 @@ import { ChatGPTAPI } from 'chatgpt'
async function example() { async function example() {
// sessionToken is required; see below for details // sessionToken is required; see below for details
const api = new ChatGPTAPI({ sessionToken: process.env.SESSION_TOKEN }) const api = new ChatGPTAPI({
sessionToken: process.env.SESSION_TOKEN
})
// ensure the API is properly authenticated // ensure the API is properly authenticated
await api.ensureAuth() await api.ensureAuth()
@ -51,7 +55,7 @@ async function example() {
} }
``` ```
By default, the response will be formatted as markdown. If you want to work with plaintext only, you can use: The default ChatGPT responses are formatted as markdown. If you want to work with plaintext only, you can use:
```ts ```ts
const api = new ChatGPTAPI({ const api = new ChatGPTAPI({
@ -60,6 +64,36 @@ const api = new ChatGPTAPI({
}) })
``` ```
If you want to automatically track the conversation, you can use `ChatGPTAPI.getConversation()`:
```ts
const api = new ChatGPTAPI({
sessionToken: process.env.SESSION_TOKEN
})
const conversation = api.getConversation()
// send a message and wait for the response
const response0 = await conversation.sendMessage('What is OpenAI?')
// send a follow-up prompt to the previous message and wait for the response
const response1 = await conversation.sendMessage('Can you expand on that?')
// send another follow-up to the same conversation
const response2 = await conversation.sendMessage('Oh cool; thank you')
```
Sometimes, ChatGPT will hang for an extended period of time before sending it's response. This may be due to rate limiting or it may be due to OpenAI's servers being overloaded.
To mitigate this issues, you can add a timeout like this:
```ts
// timeout after 2 minutes (which will also abort the underlying HTTP request)
const response = await api.sendMessage('this is a timeout test', {
timeoutMs: 2 * 60 * 1000
})
```
<details> <details>
<summary>Usage in CommonJS (Dynamic import)</summary> <summary>Usage in CommonJS (Dynamic import)</summary>
@ -80,7 +114,13 @@ async function example() {
</details> </details>
A full [demo](./src/demo.ts) is included for testing purposes: ### Docs
See the [auto-generated docs](./docs/classes/ChatGPTAPI.md) for more info on methods and parameters.
### Demos
A [basic demo](./src/demo.ts) is included for testing purposes:
```bash ```bash
# 1. clone repo # 1. clone repo
@ -90,11 +130,17 @@ A full [demo](./src/demo.ts) is included for testing purposes:
npx tsx src/demo.ts npx tsx src/demo.ts
``` ```
## Docs A [conversation demo](./src/demo-conversation.ts) is also included:
See the [auto-generated docs](./docs/classes/ChatGPTAPI.md) for more info on methods and parameters. ```bash
# 1. clone repo
# 2. install node deps
# 3. set `SESSION_TOKEN` in .env
# 4. run:
npx tsx src/demo-conversation.ts
```
## How it works ### Session Tokens
**This package requires a valid session token from ChatGPT to access it's unofficial REST API.** **This package requires a valid session token from ChatGPT to access it's unofficial REST API.**
@ -114,7 +160,7 @@ If you want to run the built-in demo, store this value as `SESSION_TOKEN` in a l
> **Note** > **Note**
> Prior to v1.0.0, this package used a headless browser via [Playwright](https://playwright.dev/) to automate the web UI. Here are the [docs for the initial browser version](https://github.com/transitive-bullshit/chatgpt-api/tree/v0.4.2). > Prior to v1.0.0, this package used a headless browser via [Playwright](https://playwright.dev/) to automate the web UI. Here are the [docs for the initial browser version](https://github.com/transitive-bullshit/chatgpt-api/tree/v0.4.2).
## Examples ## Projects
All of these awesome projects are built using the `chatgpt` package. 🤯 All of these awesome projects are built using the `chatgpt` package. 🤯
@ -135,7 +181,19 @@ All of these awesome projects are built using the `chatgpt` package. 🤯
If you create a cool integration, feel free to open a PR and add it to the list. If you create a cool integration, feel free to open a PR and add it to the list.
## Credit ## Compatibility
This package is ESM-only. It supports:
- Node.js >= 16.8
- If you need Node.js 14 support, use [`v1.4.0`](https://github.com/transitive-bullshit/chatgpt-api/releases/tag/v1.4.0)
- Edge runtimes like CF workers and Vercel edge functions
- Modern browsers
- Mainly chrome extensions where your code is protected to a degree
- **We do not recommend using `chatgpt` from client-side browser code** because it would expose your private session token
- If you want to build a website using `chatgpt`, we recommend using it only from your backend API
## Credits
- Huge thanks to [@RomanHotsiy](https://github.com/RomanHotsiy), [@ElijahPepe](https://github.com/ElijahPepe), and all the other contributors 💪 - Huge thanks to [@RomanHotsiy](https://github.com/RomanHotsiy), [@ElijahPepe](https://github.com/ElijahPepe), and all the other contributors 💪
- The original browser version was inspired by this [Go module](https://github.com/danielgross/whatsapp-gpt) by [Daniel Gross](https://github.com/danielgross) - The original browser version was inspired by this [Go module](https://github.com/danielgross/whatsapp-gpt) by [Daniel Gross](https://github.com/danielgross)
@ -145,4 +203,4 @@ If you create a cool integration, feel free to open a PR and add it to the list.
MIT © [Travis Fischer](https://transitivebullsh.it) MIT © [Travis Fischer](https://transitivebullsh.it)
If you found this project interesting, please consider supporting my open source work by [sponsoring me](https://github.com/sponsors/transitive-bullshit) or <a href="https://twitter.com/transitive_bs">following me on twitter <img src="https://storage.googleapis.com/saasify-assets/twitter-logo.svg" alt="twitter" height="24px" align="center"></a> If you found this project interesting, please consider [sponsoring me](https://github.com/sponsors/transitive-bullshit) or <a href="https://twitter.com/transitive_bs">following me on twitter <img src="https://storage.googleapis.com/saasify-assets/twitter-logo.svg" alt="twitter" height="24px" align="center"></a>

Wyświetl plik

@ -36,12 +36,14 @@ test('ChatGPTAPI valid session token', async (t) => {
await t.notThrowsAsync( await t.notThrowsAsync(
(async () => { (async () => {
const api = new ChatGPTAPI({ sessionToken: process.env.SESSION_TOKEN }) const chatgpt = new ChatGPTAPI({
sessionToken: process.env.SESSION_TOKEN
})
// Don't make any real API calls using our session token if we're running on CI // Don't make any real API calls using our session token if we're running on CI
if (!isCI) { if (!isCI) {
await api.ensureAuth() await chatgpt.ensureAuth()
const response = await api.sendMessage('test') const response = await chatgpt.sendMessage('test')
console.log('chatgpt response', response) console.log('chatgpt response', response)
t.truthy(response) t.truthy(response)
@ -63,7 +65,50 @@ if (!isCI) {
}, },
{ {
message: message:
'ChatGPT failed to refresh auth token. Error: session token has expired' 'ChatGPT failed to refresh auth token. Error: session token may have expired'
}
)
})
}
if (!isCI) {
test('ChatGPTAPI timeout', async (t) => {
t.timeout(30 * 1000) // 30 seconds
await t.throwsAsync(
async () => {
const chatgpt = new ChatGPTAPI({
sessionToken: process.env.SESSION_TOKEN
})
await chatgpt.sendMessage('test', {
timeoutMs: 1
})
},
{
message: 'ChatGPT timed out waiting for response'
}
)
})
test('ChatGPTAPI abort', async (t) => {
t.timeout(30 * 1000) // 30 seconds
await t.throwsAsync(
async () => {
const chatgpt = new ChatGPTAPI({
sessionToken: process.env.SESSION_TOKEN
})
const abortController = new AbortController()
setTimeout(() => abortController.abort(new Error('testing abort')), 10)
await chatgpt.sendMessage('test', {
abortSignal: abortController.signal
})
},
{
message: 'testing abort'
} }
) )
}) })

Wyświetl plik

@ -1,7 +1,9 @@
import ExpiryMap from 'expiry-map' import ExpiryMap from 'expiry-map'
import pTimeout, { TimeoutError } from 'p-timeout'
import { v4 as uuidv4 } from 'uuid' import { v4 as uuidv4 } from 'uuid'
import * as types from './types' import * as types from './types'
import { ChatGPTConversation } from './chatgpt-conversation'
import { fetch } from './fetch' import { fetch } from './fetch'
import { fetchSSE } from './fetch-sse' import { fetchSSE } from './fetch-sse'
import { markdownToText } from './utils' import { markdownToText } from './utils'
@ -10,79 +12,6 @@ const KEY_ACCESS_TOKEN = 'accessToken'
const USER_AGENT = const USER_AGENT =
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/107.0.0.0 Safari/537.36' 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/107.0.0.0 Safari/537.36'
/**
* A conversation wrapper around the ChatGPT API. This allows you to send
* multiple messages to ChatGPT and receive responses, without having to
* manually pass the conversation ID and parent message ID for each message.
*/
class Conversation {
api: ChatGPTAPI
conversationId: string = undefined
parentMessageId: string = undefined
/**
* Creates a new conversation wrapper around the ChatGPT API.
* @param api - The ChatGPT API instance to use.
*/
constructor(
api: ChatGPTAPI,
opts: { conversationId?: string; parentMessageId?: string } = {}
) {
this.api = api
this.conversationId = opts.conversationId
this.parentMessageId = opts.parentMessageId
}
/**
* Sends a message to ChatGPT, waits for the response to resolve, and returns
* the response.
* If this is the first message in the conversation, the conversation ID and
* parent message ID will be automatically set.
* This allows you to send multiple messages to ChatGPT and receive responses,
* without having to manually pass the conversation ID and parent message ID
* for each message.
* If you want to manually pass the conversation ID and parent message ID,
* use `api.sendMessage` instead.
*
* @param message - The plaintext message to send.
* @param opts.onProgress - Optional listener which will be called every time the partial response is updated
* @param opts.onConversationResponse - Optional listener which will be called every time a conversation response is received
* @returns The plaintext response from ChatGPT.
*/
async sendMessage(
message: string,
opts: {
onProgress?: (partialResponse: string) => void
onConversationResponse?: (
response: types.ConversationResponseEvent
) => void
} = {}
) {
const { onProgress, onConversationResponse } = opts
if (!this.conversationId) {
return this.api.sendMessage(message, {
onProgress,
onConversationResponse: (response) => {
this.conversationId = response.conversation_id
this.parentMessageId = response.message.id
onConversationResponse?.(response)
}
})
}
return this.api.sendMessage(message, {
conversationId: this.conversationId,
parentMessageId: this.parentMessageId,
onProgress,
onConversationResponse: (response) => {
this.conversationId = response.conversation_id
this.parentMessageId = response.message.id
onConversationResponse?.(response)
}
})
}
}
export class ChatGPTAPI { export class ChatGPTAPI {
protected _sessionToken: string protected _sessionToken: string
protected _markdown: boolean protected _markdown: boolean
@ -90,8 +19,9 @@ export class ChatGPTAPI {
protected _backendApiBaseUrl: string protected _backendApiBaseUrl: string
protected _userAgent: string protected _userAgent: string
// stores access tokens for up to 10 seconds before needing to refresh // Stores access tokens for `accessTokenTTL` milliseconds before needing to refresh
protected _accessTokenCache = new ExpiryMap<string, string>(10 * 1000) // (defaults to 60 seconds)
protected _accessTokenCache: ExpiryMap<string, string>
/** /**
* Creates a new client wrapper around the unofficial ChatGPT REST API. * Creates a new client wrapper around the unofficial ChatGPT REST API.
@ -100,6 +30,7 @@ export class ChatGPTAPI {
* @param apiBaseUrl - Optional override; the base URL for ChatGPT webapp's API (`/api`) * @param apiBaseUrl - Optional override; the base URL for ChatGPT webapp's API (`/api`)
* @param backendApiBaseUrl - Optional override; the base URL for the ChatGPT backend API (`/backend-api`) * @param backendApiBaseUrl - Optional override; the base URL for the ChatGPT backend API (`/backend-api`)
* @param userAgent - Optional override; the `user-agent` header to use with ChatGPT requests * @param userAgent - Optional override; the `user-agent` header to use with ChatGPT requests
* @param accessTokenTTL - Optional override; how long in milliseconds access tokens should last before being forcefully refreshed
*/ */
constructor(opts: { constructor(opts: {
sessionToken: string sessionToken: string
@ -115,13 +46,17 @@ export class ChatGPTAPI {
/** @defaultValue `'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/107.0.0.0 Safari/537.36'` **/ /** @defaultValue `'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/107.0.0.0 Safari/537.36'` **/
userAgent?: string userAgent?: string
/** @defaultValue 60000 (60 seconds) */
accessTokenTTL?: number
}) { }) {
const { const {
sessionToken, sessionToken,
markdown = true, markdown = true,
apiBaseUrl = 'https://chat.openai.com/api', apiBaseUrl = 'https://chat.openai.com/api',
backendApiBaseUrl = 'https://chat.openai.com/backend-api', backendApiBaseUrl = 'https://chat.openai.com/backend-api',
userAgent = USER_AGENT userAgent = USER_AGENT,
accessTokenTTL = 60000 // 60 seconds
} = opts } = opts
this._sessionToken = sessionToken this._sessionToken = sessionToken
@ -130,51 +65,52 @@ export class ChatGPTAPI {
this._backendApiBaseUrl = backendApiBaseUrl this._backendApiBaseUrl = backendApiBaseUrl
this._userAgent = userAgent this._userAgent = userAgent
this._accessTokenCache = new ExpiryMap<string, string>(accessTokenTTL)
if (!this._sessionToken) { if (!this._sessionToken) {
throw new Error('ChatGPT invalid session token') throw new Error('ChatGPT invalid session token')
} }
} }
async getIsAuthenticated() {
try {
void (await this.refreshAccessToken())
return true
} catch (err) {
return false
}
}
async ensureAuth() {
return await this.refreshAccessToken()
}
/** /**
* Sends a message to ChatGPT, waits for the response to resolve, and returns * Sends a message to ChatGPT, waits for the response to resolve, and returns
* the response. * the response.
* *
* @param message - The plaintext message to send. * If you want to receive a stream of partial responses, use `opts.onProgress`.
* @param opts.conversationId - Optional ID of the previous message in a conversation * If you want to receive the full response, including message and conversation IDs,
* @param opts.onProgress - Optional listener which will be called every time the partial response is updated * you can use `opts.onConversationResponse` or use the `ChatGPTAPI.getConversation`
* @param opts.onConversationResponse - Optional listener which will be called every time the partial response is updated with the full conversation response * helper.
*
* @param message - The prompt message to send
* @param opts.conversationId - Optional ID of a conversation to continue
* @param opts.parentMessageId - Optional ID of the previous message in the conversation
* @param opts.timeoutMs - Optional timeout in milliseconds (defaults to no timeout)
* @param opts.onProgress - Optional callback which will be invoked every time the partial response is updated
* @param opts.onConversationResponse - Optional callback which will be invoked every time the partial response is updated with the full conversation response
* @param opts.abortSignal - Optional callback used to abort the underlying `fetch` call using an [AbortController](https://developer.mozilla.org/en-US/docs/Web/API/AbortController)
*
* @returns The response from ChatGPT
*/ */
async sendMessage( async sendMessage(
message: string, message: string,
opts: { opts: types.SendMessageOptions = {}
conversationId?: string
parentMessageId?: string
onProgress?: (partialResponse: string) => void
onConversationResponse?: (
response: types.ConversationResponseEvent
) => void
} = {}
): Promise<string> { ): Promise<string> {
const { const {
conversationId, conversationId,
parentMessageId = uuidv4(), parentMessageId = uuidv4(),
timeoutMs,
onProgress, onProgress,
onConversationResponse onConversationResponse
} = opts } = opts
let { abortSignal } = opts
let abortController: AbortController = null
if (timeoutMs && !abortSignal) {
abortController = new AbortController()
abortSignal = abortController.signal
}
const accessToken = await this.refreshAccessToken() const accessToken = await this.refreshAccessToken()
const body: types.ConversationJSONBody = { const body: types.ConversationJSONBody = {
@ -198,14 +134,9 @@ export class ChatGPTAPI {
} }
const url = `${this._backendApiBaseUrl}/conversation` const url = `${this._backendApiBaseUrl}/conversation`
// TODO: What's the best way to differentiate btwn wanting just the response text
// versus wanting the full response message, so you can extract the ID and other
// metadata?
// let fullResponse: types.Message = null
let response = '' let response = ''
return new Promise((resolve, reject) => { const responseP = new Promise<string>((resolve, reject) => {
fetchSSE(url, { fetchSSE(url, {
method: 'POST', method: 'POST',
headers: { headers: {
@ -214,6 +145,7 @@ export class ChatGPTAPI {
'user-agent': this._userAgent 'user-agent': this._userAgent
}, },
body: JSON.stringify(body), body: JSON.stringify(body),
signal: abortSignal,
onMessage: (data: string) => { onMessage: (data: string) => {
if (data === '[DONE]') { if (data === '[DONE]') {
return resolve(response) return resolve(response)
@ -224,6 +156,7 @@ export class ChatGPTAPI {
if (onConversationResponse) { if (onConversationResponse) {
onConversationResponse(parsedData) onConversationResponse(parsedData)
} }
const message = parsedData.message const message = parsedData.message
// console.log('event', JSON.stringify(parsedData, null, 2)) // console.log('event', JSON.stringify(parsedData, null, 2))
@ -236,7 +169,6 @@ export class ChatGPTAPI {
} }
response = text response = text
// fullResponse = message
if (onProgress) { if (onProgress) {
onProgress(text) onProgress(text)
@ -250,8 +182,56 @@ export class ChatGPTAPI {
} }
}).catch(reject) }).catch(reject)
}) })
if (timeoutMs) {
if (abortController) {
// This will be called when a timeout occurs in order for us to forcibly
// ensure that the underlying HTTP request is aborted.
;(responseP as any).cancel = () => {
abortController.abort()
}
} }
return pTimeout(responseP, {
milliseconds: timeoutMs,
message: 'ChatGPT timed out waiting for response'
})
} else {
return responseP
}
}
/**
* @returns `true` if the client has a valid acces token or `false` if refreshing
* the token fails.
*/
async getIsAuthenticated() {
try {
void (await this.refreshAccessToken())
return true
} catch (err) {
return false
}
}
/**
* Refreshes the client's access token which will succeed only if the session
* is still valid.
*/
async ensureAuth() {
return await this.refreshAccessToken()
}
/**
* Attempts to refresh the current access token using the ChatGPT
* `sessionToken` cookie.
*
* Access tokens will be cached for up to `accessTokenTTL` milliseconds to
* prevent refreshing access tokens too frequently.
*
* @returns A valid access token
* @throws An error if refreshing the access token fails.
*/
async refreshAccessToken(): Promise<string> { async refreshAccessToken(): Promise<string> {
const cachedAccessToken = this._accessTokenCache.get(KEY_ACCESS_TOKEN) const cachedAccessToken = this._accessTokenCache.get(KEY_ACCESS_TOKEN)
if (cachedAccessToken) { if (cachedAccessToken) {
@ -264,7 +244,13 @@ export class ChatGPTAPI {
cookie: `__Secure-next-auth.session-token=${this._sessionToken}`, cookie: `__Secure-next-auth.session-token=${this._sessionToken}`,
'user-agent': this._userAgent 'user-agent': this._userAgent
} }
}).then((r) => r.json() as any as types.SessionResult) }).then((r) => {
if (!r.ok) {
throw new Error(`${r.status} ${r.statusText}`)
}
return r.json() as any as types.SessionResult
})
const accessToken = res?.accessToken const accessToken = res?.accessToken
@ -275,7 +261,7 @@ export class ChatGPTAPI {
const error = res?.error const error = res?.error
if (error) { if (error) {
if (error === 'RefreshAccessTokenError') { if (error === 'RefreshAccessTokenError') {
throw new Error('session token has expired') throw new Error('session token may have expired')
} else { } else {
throw new Error(error) throw new Error(error)
} }
@ -289,15 +275,16 @@ export class ChatGPTAPI {
} }
/** /**
* Get a new Conversation instance, which can be used to send multiple messages as part of a single conversation. * Gets a new ChatGPTConversation instance, which can be used to send multiple
* messages as part of a single conversation.
* *
* @param opts.conversationId - Optional Data of the previous message in a conversation * @param opts.conversationId - Optional ID of the previous message in a conversation
* @param opts.parentMessageId - Optional Data of the previous message in a conversation * @param opts.parentMessageId - Optional ID of the previous message in a conversation
* @returns a new Conversation instance * @returns The new conversation instance
*/ */
getConversation( getConversation(
opts: { conversationId?: string; parentMessageId?: string } = {} opts: { conversationId?: string; parentMessageId?: string } = {}
) { ) {
return new Conversation(this, opts) return new ChatGPTConversation(this, opts)
} }
} }

Wyświetl plik

@ -0,0 +1,73 @@
import * as types from './types'
import { type ChatGPTAPI } from './chatgpt-api'
/**
* A conversation wrapper around the ChatGPTAPI. This allows you to send
* multiple messages to ChatGPT and receive responses, without having to
* manually pass the conversation ID and parent message ID for each message.
*/
export class ChatGPTConversation {
api: ChatGPTAPI
conversationId: string = undefined
parentMessageId: string = undefined
/**
* Creates a new conversation wrapper around the ChatGPT API.
*
* @param api - The ChatGPT API instance to use
* @param opts.conversationId - Optional ID of a conversation to continue
* @param opts.parentMessageId - Optional ID of the previous message in the conversation
*/
constructor(
api: ChatGPTAPI,
opts: { conversationId?: string; parentMessageId?: string } = {}
) {
this.api = api
this.conversationId = opts.conversationId
this.parentMessageId = opts.parentMessageId
}
/**
* Sends a message to ChatGPT, waits for the response to resolve, and returns
* the response.
*
* If this is the first message in the conversation, the conversation ID and
* parent message ID will be automatically set.
*
* This allows you to send multiple messages to ChatGPT and receive responses,
* without having to manually pass the conversation ID and parent message ID
* for each message.
*
* @param message - The prompt message to send
* @param opts.onProgress - Optional callback which will be invoked every time the partial response is updated
* @param opts.onConversationResponse - Optional callback which will be invoked every time the partial response is updated with the full conversation response
* @param opts.abortSignal - Optional callback used to abort the underlying `fetch` call using an [AbortController](https://developer.mozilla.org/en-US/docs/Web/API/AbortController)
*
* @returns The response from ChatGPT
*/
async sendMessage(
message: string,
opts: types.SendConversationMessageOptions = {}
): Promise<string> {
const { onConversationResponse, ...rest } = opts
return this.api.sendMessage(message, {
...rest,
conversationId: this.conversationId,
parentMessageId: this.parentMessageId,
onConversationResponse: (response) => {
if (response.conversation_id) {
this.conversationId = response.conversation_id
}
if (response.message?.id) {
this.parentMessageId = response.message.id
}
if (onConversationResponse) {
return onConversationResponse(response)
}
}
})
}
}

Wyświetl plik

@ -6,10 +6,10 @@ import { ChatGPTAPI } from '.'
dotenv.config() dotenv.config()
/** /**
* Example CLI for testing functionality. * Demo CLI for testing conversation support.
* *
* ``` * ```
* npx tsx src/demo.ts * npx tsx src/demo-conversation.ts
* ``` * ```
*/ */
async function main() { async function main() {

Wyświetl plik

@ -6,7 +6,7 @@ import { ChatGPTAPI } from '.'
dotenv.config() dotenv.config()
/** /**
* Example CLI for testing functionality. * Demo CLI for testing basic functionality.
* *
* ``` * ```
* npx tsx src/demo.ts * npx tsx src/demo.ts

Wyświetl plik

@ -1,31 +1,26 @@
import { createParser } from 'eventsource-parser' import { createParser } from 'eventsource-parser'
import { fetch } from './fetch' import { fetch } from './fetch'
import { streamAsyncIterable } from './stream-async-iterable'
// import { streamAsyncIterable } from './stream-async-iterable'
export async function fetchSSE( export async function fetchSSE(
url: string, url: string,
options: Parameters<typeof fetch>[1] & { onMessage: (data: string) => void } options: Parameters<typeof fetch>[1] & { onMessage: (data: string) => void }
) { ) {
const { onMessage, ...fetchOptions } = options const { onMessage, ...fetchOptions } = options
const resp = await fetch(url, fetchOptions) const res = await fetch(url, fetchOptions)
if (!res.ok) {
throw new Error(`ChatGPTAPI error ${res.status || res.statusText}`)
}
const parser = createParser((event) => { const parser = createParser((event) => {
if (event.type === 'event') { if (event.type === 'event') {
onMessage(event.data) onMessage(event.data)
} }
}) })
resp.body.on('readable', () => { for await (const chunk of streamAsyncIterable(res.body)) {
let chunk: string | Buffer const str = new TextDecoder().decode(chunk)
while (null !== (chunk = resp.body.read())) { parser.feed(str)
parser.feed(chunk.toString())
} }
})
// TODO: add support for web-compatible `fetch`
// for await (const chunk of streamAsyncIterable(resp.body)) {
// const str = new TextDecoder().decode(chunk)
// parser.feed(str)
// }
} }

Wyświetl plik

@ -1,3 +1,12 @@
import fetch from 'node-fetch' /// <reference lib="dom" />
// Use `undici` for node.js 16 and 17
// Use `fetch` for node.js >= 18
// Use `fetch` for all other environments, including browsers
// NOTE: The top-level await is removed in a `postbuild` npm script for the
// browser build
const fetch =
globalThis.fetch ??
((await import('undici')).fetch as unknown as typeof globalThis.fetch)
export { fetch } export { fetch }

Wyświetl plik

@ -1,3 +1,4 @@
export * from './chatgpt-api' export * from './chatgpt-api'
export * from './chatgpt-conversation'
export * from './types' export * from './types'
export * from './utils' export * from './utils'

Wyświetl plik

@ -1,6 +1,4 @@
import { type ReadableStream } from 'stream/web' export async function* streamAsyncIterable<T>(stream: ReadableStream<T>) {
export async function* streamAsyncIterable(stream: ReadableStream) {
const reader = stream.getReader() const reader = stream.getReader()
try { try {
while (true) { while (true) {

Wyświetl plik

@ -273,3 +273,17 @@ export type MessageContent = {
} }
export type MessageMetadata = any export type MessageMetadata = any
export type SendMessageOptions = {
conversationId?: string
parentMessageId?: string
timeoutMs?: number
onProgress?: (partialResponse: string) => void
onConversationResponse?: (response: ConversationResponseEvent) => void
abortSignal?: AbortSignal
}
export type SendConversationMessageOptions = Omit<
SendMessageOptions,
'conversationId' | 'parentMessageId'
>

Wyświetl plik

@ -1,14 +1,30 @@
import { defineConfig } from 'tsup' import { defineConfig } from 'tsup'
export default defineConfig({ export default defineConfig([
{
entry: ['src/index.ts'], entry: ['src/index.ts'],
outDir: 'build', outDir: 'build',
target: 'node14', target: 'node16',
platform: 'node', platform: 'node',
format: ['esm'], format: ['esm'],
splitting: false, splitting: false,
sourcemap: true, sourcemap: true,
minify: true, minify: false,
shims: false, shims: true,
dts: true dts: true,
}) external: ['undici']
},
{
entry: ['src/index.ts'],
outDir: 'build/browser',
target: 'chrome89',
platform: 'browser',
format: ['esm'],
splitting: false,
sourcemap: true,
minify: false,
shims: true,
dts: true,
external: ['undici']
}
])