From 2bf3c7e1ed73d70370a98b55f9a7f9624a21d993 Mon Sep 17 00:00:00 2001 From: Travis Fischer Date: Sun, 12 Feb 2023 22:53:40 -0600 Subject: [PATCH] =?UTF-8?q?=F0=9F=A5=81?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- legacy/readme.md | 1 - legacy/src/chatgpt-api.ts | 6 ++++-- 2 files changed, 4 insertions(+), 3 deletions(-) diff --git a/legacy/readme.md b/legacy/readme.md index dad5482a..448313cb 100644 --- a/legacy/readme.md +++ b/legacy/readme.md @@ -104,7 +104,6 @@ console.log(res.text) You can add streaming via the `onProgress` handler: ```ts -// timeout after 2 minutes (which will also abort the underlying HTTP request) const res = await api.sendMessage('Write a 500 word essay on frogs.', { // print the partial response as the AI is "typing" onProgress: (partialResponse) => console.log(partialResponse.text) diff --git a/legacy/src/chatgpt-api.ts b/legacy/src/chatgpt-api.ts index 2bbedeed..100069cb 100644 --- a/legacy/src/chatgpt-api.ts +++ b/legacy/src/chatgpt-api.ts @@ -10,10 +10,12 @@ import { fetchSSE } from './fetch-sse' // NOTE: this is not a public model, but it was leaked by the ChatGPT webapp. // const CHATGPT_MODEL = 'text-chat-davinci-002-20230126' -const CHATGPT_MODEL = 'text-chat-davinci-002-20221122' +// const CHATGPT_MODEL = 'text-chat-davinci-002-20221122' +// const CHATGPT_MODEL = 'text-chat-davinci-002-sensitive-20230126' +// const CHATGPT_MODEL = 'text-chat-davinci-002-sh-alpha-aoruigiofdj83' // Official model (costs money and is not fine-tuned for chat) -// const CHATGPT_MODEL = 'text-davinci-003' +const CHATGPT_MODEL = 'text-davinci-003' const USER_LABEL_DEFAULT = 'User' const ASSISTANT_LABEL_DEFAULT = 'ChatGPT'