From 7e31cb373066bf9c00720a448d99640cbacc2abf Mon Sep 17 00:00:00 2001 From: Travis Fischer Date: Wed, 7 Jun 2023 12:09:00 -0700 Subject: [PATCH] feat: refactoring llms to subfolder --- legacy/src/agentic.ts | 3 +-- legacy/src/index.ts | 4 +--- legacy/src/{ => llms}/anthropic.ts | 5 +++-- legacy/src/llms/index.ts | 3 +++ legacy/src/{ => llms}/llm.ts | 13 ++++++++----- legacy/src/{ => llms}/openai.ts | 5 +++-- legacy/src/tokenizer.ts | 4 ++-- legacy/src/tools/metaphor.ts | 8 ++++---- legacy/test/_utils.ts | 2 +- legacy/test/anthropic.test.ts | 3 ++- legacy/test/openai.test.ts | 3 ++- legacy/test/serpapi.test.ts | 3 ++- legacy/tsconfig.json | 5 ++++- 13 files changed, 36 insertions(+), 25 deletions(-) rename legacy/src/{ => llms}/anthropic.ts (96%) create mode 100644 legacy/src/llms/index.ts rename legacy/src/{ => llms}/llm.ts (97%) rename legacy/src/{ => llms}/openai.ts (93%) diff --git a/legacy/src/agentic.ts b/legacy/src/agentic.ts index 3f23c7a3..389e265b 100644 --- a/legacy/src/agentic.ts +++ b/legacy/src/agentic.ts @@ -1,11 +1,10 @@ import * as types from './types' import { defaultOpenAIModel } from './constants' -// import { BaseTask } from './task' import { HumanFeedbackMechanism, HumanFeedbackMechanismCLI } from './human-feedback' -import { OpenAIChatModel } from './openai' +import { OpenAIChatModel } from './llms/openai' export class Agentic { // _taskMap: WeakMap> diff --git a/legacy/src/index.ts b/legacy/src/index.ts index 0bfce3e5..33b3a843 100644 --- a/legacy/src/index.ts +++ b/legacy/src/index.ts @@ -1,8 +1,6 @@ export * from './agentic' export * from './task' -export * from './llm' -export * from './openai' -export * from './anthropic' +export * from './llms' export * from './tokenizer' export * from './human-feedback' diff --git a/legacy/src/anthropic.ts b/legacy/src/llms/anthropic.ts similarity index 96% rename from legacy/src/anthropic.ts rename to legacy/src/llms/anthropic.ts index b1dd008d..87e8e673 100644 --- a/legacy/src/anthropic.ts +++ b/legacy/src/llms/anthropic.ts @@ -2,8 +2,9 @@ import * as anthropic from '@anthropic-ai/sdk' import { type SetOptional } from 'type-fest' import { ZodTypeAny, z } from 'zod' -import * as types from './types' -import { defaultAnthropicModel } from './constants' +import * as types from '@/types' +import { defaultAnthropicModel } from '@/constants' + import { BaseChatModel } from './llm' const defaultStopSequences = [anthropic.HUMAN_PROMPT] diff --git a/legacy/src/llms/index.ts b/legacy/src/llms/index.ts new file mode 100644 index 00000000..e2425cb9 --- /dev/null +++ b/legacy/src/llms/index.ts @@ -0,0 +1,3 @@ +export * from './llm' +export * from './openai' +export * from './anthropic' diff --git a/legacy/src/llm.ts b/legacy/src/llms/llm.ts similarity index 97% rename from legacy/src/llm.ts rename to legacy/src/llms/llm.ts index 09a2db19..40046122 100644 --- a/legacy/src/llm.ts +++ b/legacy/src/llms/llm.ts @@ -5,18 +5,18 @@ import { type SetRequired } from 'type-fest' import { ZodRawShape, ZodTypeAny, z } from 'zod' import { printNode, zodToTs } from 'zod-to-ts' -import * as types from './types' -import { BaseTask } from './task' -import { getCompiledTemplate } from './template' +import * as types from '@/types' +import { BaseTask } from '@/task' +import { getCompiledTemplate } from '@/template' import { Tokenizer, getModelNameForTiktoken, getTokenizerForModel -} from './tokenizer' +} from '@/tokenizer' import { extractJSONArrayFromString, extractJSONObjectFromString -} from './utils' +} from '@/utils' export abstract class BaseLLM< TInput extends ZodRawShape | ZodTypeAny = z.ZodVoid, @@ -317,6 +317,8 @@ export abstract class BaseChatModel< tokensPerName = 1 } else { // TODO + tokensPerMessage = 4 + tokensPerName = -1 } const numTokensPerMessage = await pMap( @@ -342,6 +344,7 @@ export abstract class BaseChatModel< } ) + // TODO numTokensTotal += 3 // every reply is primed with <|start|>assistant<|message|> return { numTokensTotal, numTokensPerMessage } diff --git a/legacy/src/openai.ts b/legacy/src/llms/openai.ts similarity index 93% rename from legacy/src/openai.ts rename to legacy/src/llms/openai.ts index 9785341d..2d988040 100644 --- a/legacy/src/openai.ts +++ b/legacy/src/llms/openai.ts @@ -1,8 +1,9 @@ import { type SetOptional } from 'type-fest' import { ZodTypeAny, z } from 'zod' -import * as types from './types' -import { defaultOpenAIModel } from './constants' +import * as types from '@/types' +import { defaultOpenAIModel } from '@/constants' + import { BaseChatModel } from './llm' export class OpenAIChatModel< diff --git a/legacy/src/tokenizer.ts b/legacy/src/tokenizer.ts index 3c4408bd..2a02da09 100644 --- a/legacy/src/tokenizer.ts +++ b/legacy/src/tokenizer.ts @@ -68,8 +68,8 @@ export async function getTokenizerForEncoding( encoding: TiktokenEncoding, options?: { signal?: AbortSignal - extendedSpecialTokens?: Record timeoutMs?: number + extendedSpecialTokens?: Record } ) { const tiktokenBPE = await getTiktokenBPE(encoding, options) @@ -81,8 +81,8 @@ export async function getTokenizerForModel( model: string, options?: { signal?: AbortSignal - extendedSpecialTokens?: Record timeoutMs?: number + extendedSpecialTokens?: Record } ) { const modelName = getModelNameForTiktoken(model) diff --git a/legacy/src/tools/metaphor.ts b/legacy/src/tools/metaphor.ts index 0680ab5f..45155f16 100644 --- a/legacy/src/tools/metaphor.ts +++ b/legacy/src/tools/metaphor.ts @@ -1,9 +1,9 @@ import { z } from 'zod' -import * as types from '../types' -import { Agentic } from '../agentic' -import { MetaphorClient } from '../services/metaphor' -import { BaseTask } from '../task' +import * as types from '@/types' +import { Agentic } from '@/agentic' +import { MetaphorClient } from '@/services/metaphor' +import { BaseTask } from '@/task' export const MetaphorSearchToolInputSchema = z.object({ query: z.string(), diff --git a/legacy/test/_utils.ts b/legacy/test/_utils.ts index 411f75d4..890d7293 100644 --- a/legacy/test/_utils.ts +++ b/legacy/test/_utils.ts @@ -7,7 +7,7 @@ import Keyv from 'keyv' import { OpenAIClient } from 'openai-fetch' import pMemoize from 'p-memoize' -import { Agentic } from '../src' +import { Agentic } from '@/agentic' export const fakeOpenAIAPIKey = 'fake-openai-api-key' export const fakeAnthropicAPIKey = 'fake-anthropic-api-key' diff --git a/legacy/test/anthropic.test.ts b/legacy/test/anthropic.test.ts index c7f19a11..00f4e2d4 100644 --- a/legacy/test/anthropic.test.ts +++ b/legacy/test/anthropic.test.ts @@ -1,7 +1,8 @@ import test from 'ava' import { expectTypeOf } from 'expect-type' -import { AnthropicChatModel } from '../src' +import { AnthropicChatModel } from '@/llms/anthropic' + import { createTestAgenticRuntime } from './_utils' test('AnthropicChatModel ⇒ string output', async (t) => { diff --git a/legacy/test/openai.test.ts b/legacy/test/openai.test.ts index 398f437a..7034b65b 100644 --- a/legacy/test/openai.test.ts +++ b/legacy/test/openai.test.ts @@ -2,7 +2,8 @@ import test from 'ava' import { expectTypeOf } from 'expect-type' import { z } from 'zod' -import { OpenAIChatModel } from '../src' +import { OpenAIChatModel } from '@/llms/openai' + import { createTestAgenticRuntime } from './_utils' test('OpenAIChatModel ⇒ string output', async (t) => { diff --git a/legacy/test/serpapi.test.ts b/legacy/test/serpapi.test.ts index 2f5ddad9..5adf4856 100644 --- a/legacy/test/serpapi.test.ts +++ b/legacy/test/serpapi.test.ts @@ -1,6 +1,7 @@ import test from 'ava' -import { SerpAPIClient } from '../src/services/serpapi' +import { SerpAPIClient } from '@/services/serpapi' + import './_utils' test('SerpAPIClient.search', async (t) => { diff --git a/legacy/tsconfig.json b/legacy/tsconfig.json index 4b278dba..f18422ba 100644 --- a/legacy/tsconfig.json +++ b/legacy/tsconfig.json @@ -15,7 +15,10 @@ "jsx": "preserve", "baseUrl": ".", "outDir": "build", - "noEmit": true + "noEmit": true, + "paths": { + "@/*": ["./src/*"] + } }, "exclude": ["node_modules", "build"], "include": ["**/*.ts", "**/*.tsx"]