kopia lustrzana https://github.com/transitive-bullshit/chatgpt-api
chore: refactoring
rodzic
154929d8e9
commit
2e4d681ce3
|
@ -34,7 +34,7 @@
|
|||
"pre-commit": "lint-staged",
|
||||
"test": "run-p test:*",
|
||||
"test:unit": "ava",
|
||||
"test:prettier": "prettier **/*.{js,jsx,ts,tsx} --check",
|
||||
"test:prettier": "prettier '**/*.{js,jsx,ts,tsx}' --check",
|
||||
"test:eslint": "eslint '**/*.ts'"
|
||||
},
|
||||
"dependencies": {
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
import * as types from './types'
|
||||
import { defaultOpenAIModel } from './constants'
|
||||
import { OpenAIChatModelBuilder } from './openai'
|
||||
import { OpenAIChatModel } from './openai'
|
||||
|
||||
export class Agentic {
|
||||
_client: types.openai.OpenAIClient
|
||||
|
@ -58,7 +58,7 @@ export class Agentic {
|
|||
}
|
||||
}
|
||||
|
||||
return new OpenAIChatModelBuilder(this._client, {
|
||||
return new OpenAIChatModel(this._client, {
|
||||
...(this._defaults as any), // TODO
|
||||
...options
|
||||
})
|
||||
|
@ -88,7 +88,7 @@ export class Agentic {
|
|||
}
|
||||
}
|
||||
|
||||
return new OpenAIChatModelBuilder(this._client, {
|
||||
return new OpenAIChatModel(this._client, {
|
||||
...(this._defaults as any), // TODO
|
||||
model: 'gpt-3.5-turbo',
|
||||
...options
|
||||
|
@ -119,7 +119,7 @@ export class Agentic {
|
|||
}
|
||||
}
|
||||
|
||||
return new OpenAIChatModelBuilder(this._client, {
|
||||
return new OpenAIChatModel(this._client, {
|
||||
...(this._defaults as any), // TODO
|
||||
model: 'gpt-4',
|
||||
...options
|
||||
|
|
|
@ -4,14 +4,14 @@ import { ZodTypeAny, z } from 'zod'
|
|||
|
||||
import * as types from './types'
|
||||
import { defaultAnthropicModel } from './constants'
|
||||
import { BaseChatModelBuilder } from './llm'
|
||||
import { BaseChatModel } from './llm'
|
||||
|
||||
const defaultStopSequences = [anthropic.HUMAN_PROMPT]
|
||||
|
||||
export class AnthropicChatModelBuilder<
|
||||
export class AnthropicChatModel<
|
||||
TInput extends ZodTypeAny = ZodTypeAny,
|
||||
TOutput extends ZodTypeAny = z.ZodType<string>
|
||||
> extends BaseChatModelBuilder<
|
||||
> extends BaseChatModel<
|
||||
TInput,
|
||||
TOutput,
|
||||
SetOptional<
|
||||
|
|
48
src/llm.ts
48
src/llm.ts
|
@ -5,18 +5,18 @@ import { ZodRawShape, ZodTypeAny, z } from 'zod'
|
|||
import { printNode, zodToTs } from 'zod-to-ts'
|
||||
|
||||
import * as types from './types'
|
||||
import { BaseTaskCallBuilder } from './task'
|
||||
import { BaseTask } from './task'
|
||||
import { getCompiledTemplate } from './template'
|
||||
import {
|
||||
extractJSONArrayFromString,
|
||||
extractJSONObjectFromString
|
||||
} from './utils'
|
||||
|
||||
export abstract class BaseLLMCallBuilder<
|
||||
export abstract class BaseLLM<
|
||||
TInput extends ZodRawShape | ZodTypeAny = z.ZodVoid,
|
||||
TOutput extends ZodRawShape | ZodTypeAny = z.ZodType<string>,
|
||||
TModelParams extends Record<string, any> = Record<string, any>
|
||||
> extends BaseTaskCallBuilder<TInput, TOutput> {
|
||||
> extends BaseTask<TInput, TOutput> {
|
||||
protected _inputSchema: TInput | undefined
|
||||
protected _outputSchema: TOutput | undefined
|
||||
|
||||
|
@ -44,20 +44,18 @@ export abstract class BaseLLMCallBuilder<
|
|||
|
||||
input<U extends ZodRawShape | ZodTypeAny = TInput>(
|
||||
inputSchema: U
|
||||
): BaseLLMCallBuilder<U, TOutput, TModelParams> {
|
||||
;(
|
||||
this as unknown as BaseLLMCallBuilder<U, TOutput, TModelParams>
|
||||
)._inputSchema = inputSchema
|
||||
return this as unknown as BaseLLMCallBuilder<U, TOutput, TModelParams>
|
||||
): BaseLLM<U, TOutput, TModelParams> {
|
||||
;(this as unknown as BaseLLM<U, TOutput, TModelParams>)._inputSchema =
|
||||
inputSchema
|
||||
return this as unknown as BaseLLM<U, TOutput, TModelParams>
|
||||
}
|
||||
|
||||
output<U extends ZodRawShape | ZodTypeAny = TOutput>(
|
||||
outputSchema: U
|
||||
): BaseLLMCallBuilder<TInput, U, TModelParams> {
|
||||
;(
|
||||
this as unknown as BaseLLMCallBuilder<TInput, U, TModelParams>
|
||||
)._outputSchema = outputSchema
|
||||
return this as unknown as BaseLLMCallBuilder<TInput, U, TModelParams>
|
||||
): BaseLLM<TInput, U, TModelParams> {
|
||||
;(this as unknown as BaseLLM<TInput, U, TModelParams>)._outputSchema =
|
||||
outputSchema
|
||||
return this as unknown as BaseLLM<TInput, U, TModelParams>
|
||||
}
|
||||
|
||||
public override get inputSchema(): TInput {
|
||||
|
@ -96,12 +94,12 @@ export abstract class BaseLLMCallBuilder<
|
|||
// }): Promise<TOutput>
|
||||
}
|
||||
|
||||
export abstract class BaseChatModelBuilder<
|
||||
export abstract class BaseChatModel<
|
||||
TInput extends ZodRawShape | ZodTypeAny = ZodTypeAny,
|
||||
TOutput extends ZodRawShape | ZodTypeAny = z.ZodType<string>,
|
||||
TModelParams extends Record<string, any> = Record<string, any>,
|
||||
TChatCompletionResponse extends Record<string, any> = Record<string, any>
|
||||
> extends BaseLLMCallBuilder<TInput, TOutput, TModelParams> {
|
||||
> extends BaseLLM<TInput, TOutput, TModelParams> {
|
||||
_messages: types.ChatMessage[]
|
||||
|
||||
constructor(
|
||||
|
@ -208,21 +206,13 @@ export abstract class BaseChatModelBuilder<
|
|||
: z.object(this._outputSchema)
|
||||
|
||||
if (outputSchema instanceof z.ZodArray) {
|
||||
try {
|
||||
const trimmedOutput = extractJSONArrayFromString(output)
|
||||
output = JSON.parse(jsonrepair(trimmedOutput ?? output))
|
||||
} catch (err) {
|
||||
// TODO
|
||||
throw err
|
||||
}
|
||||
// TODO: gracefully handle parse errors
|
||||
const trimmedOutput = extractJSONArrayFromString(output)
|
||||
output = JSON.parse(jsonrepair(trimmedOutput ?? output))
|
||||
} else if (outputSchema instanceof z.ZodObject) {
|
||||
try {
|
||||
const trimmedOutput = extractJSONObjectFromString(output)
|
||||
output = JSON.parse(jsonrepair(trimmedOutput ?? output))
|
||||
} catch (err) {
|
||||
// TODO
|
||||
throw err
|
||||
}
|
||||
// TODO: gracefully handle parse errors
|
||||
const trimmedOutput = extractJSONObjectFromString(output)
|
||||
output = JSON.parse(jsonrepair(trimmedOutput ?? output))
|
||||
} else if (outputSchema instanceof z.ZodBoolean) {
|
||||
output = output.toLowerCase().trim()
|
||||
const booleanOutputs = {
|
||||
|
|
|
@ -3,12 +3,12 @@ import { ZodTypeAny, z } from 'zod'
|
|||
|
||||
import * as types from './types'
|
||||
import { defaultOpenAIModel } from './constants'
|
||||
import { BaseChatModelBuilder } from './llm'
|
||||
import { BaseChatModel } from './llm'
|
||||
|
||||
export class OpenAIChatModelBuilder<
|
||||
export class OpenAIChatModel<
|
||||
TInput extends ZodTypeAny = ZodTypeAny,
|
||||
TOutput extends ZodTypeAny = z.ZodType<string>
|
||||
> extends BaseChatModelBuilder<
|
||||
> extends BaseChatModel<
|
||||
TInput,
|
||||
TOutput,
|
||||
SetOptional<Omit<types.openai.ChatCompletionParams, 'messages'>, 'model'>,
|
||||
|
|
12
src/task.ts
12
src/task.ts
|
@ -6,12 +6,12 @@ import * as types from './types'
|
|||
* A `Task` is a typed, async function call that may be non-deterministic.
|
||||
*
|
||||
* Examples of tasks include:
|
||||
* - LLM calls with structured input and output
|
||||
* - LLM calls
|
||||
* - API calls
|
||||
* - Native function calls
|
||||
* - Invoking sub-agents
|
||||
*/
|
||||
export abstract class BaseTaskCallBuilder<
|
||||
export abstract class BaseTask<
|
||||
TInput extends ZodRawShape | ZodTypeAny = ZodTypeAny,
|
||||
TOutput extends ZodRawShape | ZodTypeAny = z.ZodTypeAny
|
||||
> {
|
||||
|
@ -24,9 +24,15 @@ export abstract class BaseTaskCallBuilder<
|
|||
}
|
||||
|
||||
public abstract get inputSchema(): TInput
|
||||
|
||||
public abstract get outputSchema(): TOutput
|
||||
|
||||
// TODO
|
||||
// public abstract get nameForModel(): string
|
||||
// public abstract get nameForHuman(): string
|
||||
|
||||
// public abstract get descForModel(): string
|
||||
// public abstract get descForHuman(): string
|
||||
|
||||
public retryConfig(retryConfig: types.RetryConfig) {
|
||||
this._retryConfig = retryConfig
|
||||
return this
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
import { z } from 'zod'
|
||||
|
||||
import { MetaphorClient } from '../services/metaphor'
|
||||
import { BaseTaskCallBuilder } from '../task'
|
||||
import { BaseTask } from '../task'
|
||||
|
||||
export const MetaphorSearchToolInputSchema = z.object({
|
||||
query: z.string(),
|
||||
|
@ -28,7 +28,7 @@ export type MetaphorSearchToolOutput = z.infer<
|
|||
typeof MetaphorSearchToolOutputSchema
|
||||
>
|
||||
|
||||
export class MetaphorSearchTool extends BaseTaskCallBuilder<
|
||||
export class MetaphorSearchTool extends BaseTask<
|
||||
typeof MetaphorSearchToolInputSchema,
|
||||
typeof MetaphorSearchToolOutputSchema
|
||||
> {
|
||||
|
|
|
@ -1,15 +1,14 @@
|
|||
import test from 'ava'
|
||||
import { expectTypeOf } from 'expect-type'
|
||||
import { z } from 'zod'
|
||||
|
||||
import { AnthropicChatModelBuilder } from '../src/anthropic'
|
||||
import { AnthropicChatModel } from '../src/anthropic'
|
||||
import { createAnthropicTestClient } from './_utils'
|
||||
|
||||
test('AnthropicChatModel ⇒ string output', async (t) => {
|
||||
t.timeout(2 * 60 * 1000)
|
||||
const client = createAnthropicTestClient()
|
||||
|
||||
const builder = new AnthropicChatModelBuilder(client, {
|
||||
const builder = new AnthropicChatModel(client, {
|
||||
modelParams: {
|
||||
temperature: 0,
|
||||
max_tokens_to_sample: 30
|
||||
|
|
|
@ -2,14 +2,14 @@ import test from 'ava'
|
|||
import { expectTypeOf } from 'expect-type'
|
||||
import { z } from 'zod'
|
||||
|
||||
import { OpenAIChatModelBuilder } from '../src/openai'
|
||||
import { OpenAIChatModel } from '../src/openai'
|
||||
import { createOpenAITestClient } from './_utils'
|
||||
|
||||
test('OpenAIChatModel ⇒ string output', async (t) => {
|
||||
t.timeout(2 * 60 * 1000)
|
||||
const client = createOpenAITestClient()
|
||||
|
||||
const builder = new OpenAIChatModelBuilder(client, {
|
||||
const builder = new OpenAIChatModel(client, {
|
||||
modelParams: {
|
||||
temperature: 0,
|
||||
max_tokens: 30
|
||||
|
@ -42,7 +42,7 @@ test('OpenAIChatModel ⇒ json output', async (t) => {
|
|||
t.timeout(2 * 60 * 1000)
|
||||
const client = createOpenAITestClient()
|
||||
|
||||
const builder = new OpenAIChatModelBuilder(client, {
|
||||
const builder = new OpenAIChatModel(client, {
|
||||
modelParams: {
|
||||
temperature: 0.5
|
||||
},
|
||||
|
@ -67,7 +67,7 @@ test('OpenAIChatModel ⇒ boolean output', async (t) => {
|
|||
t.timeout(2 * 60 * 1000)
|
||||
const client = createOpenAITestClient()
|
||||
|
||||
const builder = new OpenAIChatModelBuilder(client, {
|
||||
const builder = new OpenAIChatModel(client, {
|
||||
modelParams: {
|
||||
temperature: 0,
|
||||
max_tokens: 30
|
||||
|
|
Ładowanie…
Reference in New Issue