From ceff46ac5222cc820be6e26b247d4e6cd9ae723f Mon Sep 17 00:00:00 2001 From: Travis Fischer Date: Wed, 28 Jun 2023 20:17:58 -0700 Subject: [PATCH] =?UTF-8?q?=F0=9F=A4=94?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- legacy/test/tokenizer.test.ts | 12 ++++++++---- 1 file changed, 8 insertions(+), 4 deletions(-) diff --git a/legacy/test/tokenizer.test.ts b/legacy/test/tokenizer.test.ts index 804b242f..4dece9c4 100644 --- a/legacy/test/tokenizer.test.ts +++ b/legacy/test/tokenizer.test.ts @@ -7,14 +7,18 @@ import './_utils' const models = [ 'gpt-3.5-turbo', 'gpt-4', - 'gpt-4', - 'gpt-4', - 'gpt-4', + // the reason why we're including duplicates here is because we want to test + // the caching and idempotency of the tokenizer loading 'gpt-4', 'gpt-4', 'gpt-4-0613', 'text-davinci-003', - 'code-davinci-002' + 'code-davinci-002', + 'gpt-4', + 'gpt-4', + 'gpt-4', + 'gpt-4', + 'gpt-4' ] for (let i = 0; i < models.length; ++i) {