From 7afbc0d259a2e86f3da289dbefb5cf90cb7bec54 Mon Sep 17 00:00:00 2001 From: Travis Fischer Date: Tue, 13 Jun 2023 19:49:54 -0700 Subject: [PATCH] feat: add initial support for OpenAI functions w/ chat completion --- examples/basic.ts | 2 +- examples/facts.ts | 2 +- examples/functions.ts | 19 ++++++ examples/llm-with-search.ts | 2 +- examples/sentiment.ts | 2 +- package.json | 4 +- pnpm-lock.yaml | 37 ++++++++---- scratch/examples/calc-eval.ts | 2 +- scratch/examples/equation-producer.ts | 2 +- scratch/examples/food-expert.ts | 2 +- scratch/examples/human-feedback-select.ts | 2 +- scratch/examples/human-feedback.ts | 2 +- scratch/examples/json-summary.ts | 2 +- scratch/examples/misc.ts | 2 +- scratch/examples/tools.ts | 2 +- scratch/scratch-types.ts | 74 +++++++++++++++++++++++ src/llms/anthropic.ts | 9 +++ src/llms/chat.ts | 22 +++++-- src/llms/llm.ts | 6 +- src/llms/openai.ts | 13 +++- src/task.ts | 14 ++++- src/tokenizer.ts | 8 +++ src/tools/calculator.ts | 8 ++- src/tools/metaphor.ts | 4 +- src/tools/novu.ts | 4 +- src/types.ts | 19 ++---- test/_utils.ts | 2 +- test/services/openai.test.ts | 72 ++++++++++++++++++++++ test/tokenizer.test.ts | 1 + 29 files changed, 284 insertions(+), 56 deletions(-) create mode 100644 examples/functions.ts create mode 100644 scratch/scratch-types.ts create mode 100644 test/services/openai.test.ts diff --git a/examples/basic.ts b/examples/basic.ts index 399ee42..2bc42e1 100644 --- a/examples/basic.ts +++ b/examples/basic.ts @@ -1,5 +1,5 @@ +import { OpenAIClient } from '@agentic/openai-fetch' import 'dotenv/config' -import { OpenAIClient } from 'openai-fetch' import { z } from 'zod' import { Agentic } from '@/agentic' diff --git a/examples/facts.ts b/examples/facts.ts index 0c308c0..9ae9b34 100644 --- a/examples/facts.ts +++ b/examples/facts.ts @@ -1,5 +1,5 @@ +import { OpenAIClient } from '@agentic/openai-fetch' import 'dotenv/config' -import { OpenAIClient } from 'openai-fetch' import { z } from 'zod' import { Agentic } from '@/agentic' diff --git a/examples/functions.ts b/examples/functions.ts new file mode 100644 index 0000000..ee28cb7 --- /dev/null +++ b/examples/functions.ts @@ -0,0 +1,19 @@ +import { OpenAIClient } from '@agentic/openai-fetch' +import 'dotenv/config' +import { z } from 'zod' + +import { Agentic, CalculatorTool } from '@/index' + +async function main() { + const openai = new OpenAIClient({ apiKey: process.env.OPENAI_API_KEY! }) + const agentic = new Agentic({ openai }) + + const example = await agentic + .gpt4('What is 5 * 50?') + .tools([new CalculatorTool({ agentic })]) + .output(z.object({ answer: z.number() })) + .call() + console.log(example) +} + +main() diff --git a/examples/llm-with-search.ts b/examples/llm-with-search.ts index 6edf471..9509708 100644 --- a/examples/llm-with-search.ts +++ b/examples/llm-with-search.ts @@ -1,5 +1,5 @@ +import { OpenAIClient } from '@agentic/openai-fetch' import 'dotenv/config' -import { OpenAIClient } from 'openai-fetch' import { z } from 'zod' import { Agentic, MetaphorSearchTool } from '@/index' diff --git a/examples/sentiment.ts b/examples/sentiment.ts index de49ddb..74576a5 100644 --- a/examples/sentiment.ts +++ b/examples/sentiment.ts @@ -1,5 +1,5 @@ +import { OpenAIClient } from '@agentic/openai-fetch' import 'dotenv/config' -import { OpenAIClient } from 'openai-fetch' import { z } from 'zod' import { Agentic } from '@/agentic' diff --git a/package.json b/package.json index ec26494..82fd944 100644 --- a/package.json +++ b/package.json @@ -38,11 +38,13 @@ "test:eslint": "eslint \"**/*.ts\"" }, "dependencies": { + "@agentic/openai-fetch": "^1.5.3", "@anthropic-ai/sdk": "^0.4.4", "@inquirer/checkbox": "^1.3.1", "@inquirer/editor": "^1.2.0", "@inquirer/input": "^1.2.1", "@inquirer/select": "^1.2.1", + "@types/json-schema": "^7.0.12", "debug": "^4.3.4", "expr-eval": "^2.0.2", "handlebars": "^4.7.7", @@ -52,7 +54,6 @@ "ky": "^0.33.3", "nanoid": "^4.0.2", "normalize-url": "^8.0.0", - "openai-fetch": "^1.5.1", "p-map": "^6.0.0", "p-retry": "^5.1.2", "p-timeout": "^6.1.2", @@ -60,6 +61,7 @@ "ts-dedent": "^2.2.0", "uuid": "^9.0.0", "zod": "^3.21.4", + "zod-to-json-schema": "^3.21.1", "zod-to-ts": "^1.1.4", "zod-validation-error": "^1.3.0" }, diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index 229db0d..c9cd7a3 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -1,10 +1,13 @@ -lockfileVersion: '6.1' +lockfileVersion: '6.0' settings: autoInstallPeers: true excludeLinksFromLockfile: false dependencies: + '@agentic/openai-fetch': + specifier: ^1.5.3 + version: 1.5.3 '@anthropic-ai/sdk': specifier: ^0.4.4 version: 0.4.4 @@ -20,6 +23,9 @@ dependencies: '@inquirer/select': specifier: ^1.2.1 version: 1.2.1 + '@types/json-schema': + specifier: ^7.0.12 + version: 7.0.12 debug: specifier: ^4.3.4 version: 4.3.4 @@ -47,9 +53,6 @@ dependencies: normalize-url: specifier: ^8.0.0 version: 8.0.0 - openai-fetch: - specifier: ^1.5.1 - version: 1.5.1 p-map: specifier: ^6.0.0 version: 6.0.0 @@ -71,6 +74,9 @@ dependencies: zod: specifier: ^3.21.4 version: 3.21.4 + zod-to-json-schema: + specifier: ^3.21.1 + version: 3.21.1(zod@3.21.4) zod-to-ts: specifier: ^1.1.4 version: 1.1.4(typescript@5.1.3)(zod@3.21.4) @@ -163,6 +169,13 @@ devDependencies: packages: + /@agentic/openai-fetch@1.5.3: + resolution: {integrity: sha512-4c5YWz6jQdGxxM+SVhf0XW3mKYnFr56hntPep+y7wRfkjUl6lgZiuU3J61esQ8bj8vSFkgSfwjf3DeZIi/IEsg==} + dependencies: + ky: 0.33.3 + zod: 3.21.4 + dev: false + /@anthropic-ai/sdk@0.4.4: resolution: {integrity: sha512-Z/39nQi1sSUCeLII3lsAbL1u+0JF6cR2XmUEX9sLH0VtxmIjY6cjOUYjCkYh4oapTxOkhAFnVSAFJ6cxml2qXg==} dependencies: @@ -756,7 +769,6 @@ packages: /@types/json-schema@7.0.12: resolution: {integrity: sha512-Hr5Jfhc9eYOQNPYO5WLDq/n4jqijdHNlDXjuAQkkt+mWdQR+XJToOHrsD4cPaMXpn6KO7y2+wM8AZEs8VpBLVA==} - dev: true /@types/minimist@1.2.2: resolution: {integrity: sha512-jhuKLIRrhvCPLqwPcx6INqmKeiA5EWrsCOPhrlFSrbrmU4ZMPjj5Ul/oLCMDO98XRUIwVm78xICz4EPCektzeQ==} @@ -2989,13 +3001,6 @@ packages: mimic-fn: 4.0.0 dev: true - /openai-fetch@1.5.1: - resolution: {integrity: sha512-LDSsXTFa2ssjYTZY51+B/69wXg8/UteqKyPtuFa+bMFRav7ACQXi3AJl+gieh3BF8La95NHCE0FS8t0F8fRHwA==} - dependencies: - ky: 0.33.3 - zod: 3.21.4 - dev: false - /optionator@0.9.1: resolution: {integrity: sha512-74RlY5FCnhq4jRxVUPKDaRwrVNXMqsGsiW6AJw4XK8hmtm10wC0ypZBLw5IIp85NZMr91+qd1RvvENwg7jjRFw==} engines: {node: '>= 0.8.0'} @@ -4147,6 +4152,14 @@ packages: engines: {node: '>=12.20'} dev: true + /zod-to-json-schema@3.21.1(zod@3.21.4): + resolution: {integrity: sha512-y5g0MPxDq+YG/T+cHGPYH4PcBpyCqwK6wxeJ76MR563y0gk/14HKfebq8xHiItY7lkc9GDFygCnkvNDTvAhYAg==} + peerDependencies: + zod: ^3.21.4 + dependencies: + zod: 3.21.4 + dev: false + /zod-to-ts@1.1.4(typescript@5.1.3)(zod@3.21.4): resolution: {integrity: sha512-jsCg+pTNxLAdJOfW4ul+SpechdGYEJPPnssSbqWdR2LSIkotT22k+UvqPb1nEHwe/YbEcbUOlZUfGM0npgR+Jg==} peerDependencies: diff --git a/scratch/examples/calc-eval.ts b/scratch/examples/calc-eval.ts index 5a73012..dfdf676 100644 --- a/scratch/examples/calc-eval.ts +++ b/scratch/examples/calc-eval.ts @@ -1,5 +1,5 @@ +import { OpenAIClient } from '@agentic/openai-fetch' import 'dotenv/config' -import { OpenAIClient } from 'openai-fetch' import { z } from 'zod' import { Agentic } from '@/agentic' diff --git a/scratch/examples/equation-producer.ts b/scratch/examples/equation-producer.ts index 4995a11..64499fb 100644 --- a/scratch/examples/equation-producer.ts +++ b/scratch/examples/equation-producer.ts @@ -1,5 +1,5 @@ +import { OpenAIClient } from '@agentic/openai-fetch' import 'dotenv/config' -import { OpenAIClient } from 'openai-fetch' import { z } from 'zod' import { Agentic } from '@/agentic' diff --git a/scratch/examples/food-expert.ts b/scratch/examples/food-expert.ts index cc53b5a..bd67c8b 100644 --- a/scratch/examples/food-expert.ts +++ b/scratch/examples/food-expert.ts @@ -1,5 +1,5 @@ +import { OpenAIClient } from '@agentic/openai-fetch' import 'dotenv/config' -import { OpenAIClient } from 'openai-fetch' import { z } from 'zod' import { Agentic } from '@/agentic' diff --git a/scratch/examples/human-feedback-select.ts b/scratch/examples/human-feedback-select.ts index 37e11a2..0e50ec0 100644 --- a/scratch/examples/human-feedback-select.ts +++ b/scratch/examples/human-feedback-select.ts @@ -1,5 +1,5 @@ +import { OpenAIClient } from '@agentic/openai-fetch' import 'dotenv/config' -import { OpenAIClient } from 'openai-fetch' import { z } from 'zod' import { Agentic, HumanFeedbackSelect } from '@/index' diff --git a/scratch/examples/human-feedback.ts b/scratch/examples/human-feedback.ts index 5285769..c62f2b5 100644 --- a/scratch/examples/human-feedback.ts +++ b/scratch/examples/human-feedback.ts @@ -1,5 +1,5 @@ +import { OpenAIClient } from '@agentic/openai-fetch' import 'dotenv/config' -import { OpenAIClient } from 'openai-fetch' import { z } from 'zod' import { Agentic, HumanFeedbackSingle } from '@/index' diff --git a/scratch/examples/json-summary.ts b/scratch/examples/json-summary.ts index de68bdc..b98a30f 100644 --- a/scratch/examples/json-summary.ts +++ b/scratch/examples/json-summary.ts @@ -1,5 +1,5 @@ +import { OpenAIClient } from '@agentic/openai-fetch' import 'dotenv/config' -import { OpenAIClient } from 'openai-fetch' import { z } from 'zod' import { Agentic } from '@/agentic' diff --git a/scratch/examples/misc.ts b/scratch/examples/misc.ts index e227b06..672f70f 100644 --- a/scratch/examples/misc.ts +++ b/scratch/examples/misc.ts @@ -1,5 +1,5 @@ +import { OpenAIClient } from '@agentic/openai-fetch' import 'dotenv/config' -import { OpenAIClient } from 'openai-fetch' import { z } from 'zod' import { Agentic } from '@/agentic' diff --git a/scratch/examples/tools.ts b/scratch/examples/tools.ts index e64716b..0a00c2c 100644 --- a/scratch/examples/tools.ts +++ b/scratch/examples/tools.ts @@ -1,5 +1,5 @@ +import { OpenAIClient } from '@agentic/openai-fetch' import 'dotenv/config' -import { OpenAIClient } from 'openai-fetch' import { z } from 'zod' import { Agentic, MetaphorSearchTool } from '@/index' diff --git a/scratch/scratch-types.ts b/scratch/scratch-types.ts new file mode 100644 index 0000000..2f65178 --- /dev/null +++ b/scratch/scratch-types.ts @@ -0,0 +1,74 @@ +import { z } from 'zod' + +// export type ChatMessageRole = 'user' | 'system' | 'assistant' +export const ChatMessageRoleSchema = z.union([ + z.literal('user'), + z.literal('system'), + z.literal('assistant'), + z.literal('function') +]) +export type ChatMessageRole = z.infer + +export interface ChatMessageBase { + role: ChatMessageRole + content: string + name?: string +} + +export interface ChatMessageUser extends ChatMessageBase { + role: 'user' +} + +export interface ChatMessageSystem extends ChatMessageBase { + role: 'system' +} + +export interface ChatMessageAssistant extends ChatMessageBase { + role: 'assistant' +} + +export interface ChatMessageFunctionCall extends ChatMessageBase { + role: 'assistant' + function_call: FunctionCall +} + +export interface FunctionCall { + name: string + arguments: string +} + +export interface ChatMessageFunction extends ChatMessageBase { + role: 'function' + name: string +} + +export type ChatMessage = + | ChatMessageUser + | ChatMessageSystem + | ChatMessageAssistant + | ChatMessageFunctionCall + | ChatMessageFunction + +export interface FunctionDefinition { + /** + * The name of the function to be called. Must be a-z, A-Z, 0-9, or contain underscores and dashes, with a maximum length of 64. + */ + name: string + + /** + * The description of what the function does. + */ + description?: string + + /** + * The parameters the functions accepts, described as a JSON Schema object. See the [guide](/docs/guides/gpt/function-calling) for examples, and the [JSON Schema reference](https://json-schema.org/understanding-json-schema/) for documentation about the format. + */ + parameters?: { [key: string]: any } +} + +export type FunctionCallOptions = + | 'none' + | 'auto' + | { + name: string + } diff --git a/src/llms/anthropic.ts b/src/llms/anthropic.ts index 4fe4375..f29b17e 100644 --- a/src/llms/anthropic.ts +++ b/src/llms/anthropic.ts @@ -47,6 +47,14 @@ export class AnthropicChatModel< } } + public override get nameForModel(): string { + return 'anthropic_chat' + } + + public override get nameForHuman(): string { + return 'AnthropicChatModel' + } + protected override async _createChatCompletion( messages: types.ChatMessage[] ): Promise> { @@ -62,6 +70,7 @@ export class AnthropicChatModel< return message.content } }) + .filter(Boolean) .join('') + anthropic.AI_PROMPT // TODO: support streaming diff --git a/src/llms/chat.ts b/src/llms/chat.ts index 3583b0a..3032f2b 100644 --- a/src/llms/chat.ts +++ b/src/llms/chat.ts @@ -3,6 +3,7 @@ import pMap from 'p-map' import { dedent } from 'ts-dedent' import { type SetRequired } from 'type-fest' import { ZodType, z } from 'zod' +import { zodToJsonSchema } from 'zod-to-json-schema' import { printNode, zodToTs } from 'zod-to-ts' import * as errors from '@/errors' @@ -14,6 +15,7 @@ import { extractJSONObjectFromString } from '@/utils' +import { BaseTask } from '../task' import { BaseLLM } from './llm' export abstract class BaseChatModel< @@ -22,7 +24,8 @@ export abstract class BaseChatModel< TModelParams extends Record = Record, TChatCompletionResponse extends Record = Record > extends BaseLLM { - _messages: types.ChatMessage[] + protected _messages: types.ChatMessage[] + protected _tools?: BaseTask[] constructor( options: SetRequired< @@ -33,6 +36,7 @@ export abstract class BaseChatModel< super(options) this._messages = options.messages + this._tools = options.tools } // TODO: use polymorphic `this` type to return correct BaseLLM subclass type @@ -57,6 +61,11 @@ export abstract class BaseChatModel< return refinedInstance } + tools(tools: BaseTask[]): this { + this._tools = tools + return this + } + protected abstract _createChatCompletion( messages: types.ChatMessage[] ): Promise> @@ -70,9 +79,6 @@ export abstract class BaseChatModel< input = this.inputSchema.parse(input) } - // TODO: validate input message variables against input schema - console.log({ input }) - const messages = this._messages .map((message) => { return { @@ -263,9 +269,15 @@ export abstract class BaseChatModel< const numTokensPerMessage = await pMap( messages, async (message) => { + let content = message.content || '' + if (message.function_call) { + // TODO: this case needs testing + content = message.function_call.arguments + } + const [numTokensContent, numTokensRole, numTokensName] = await Promise.all([ - this.getNumTokens(message.content), + this.getNumTokens(content), this.getNumTokens(message.role), message.name ? this.getNumTokens(message.name).then((n) => n + tokensPerName) diff --git a/src/llms/llm.ts b/src/llms/llm.ts index ab8d746..932d7c9 100644 --- a/src/llms/llm.ts +++ b/src/llms/llm.ts @@ -69,7 +69,11 @@ export abstract class BaseLLM< } } - public override get name(): string { + public override get nameForModel(): string { + return `${this._provider}_chat` + } + + public override get nameForHuman(): string { return `${this._provider}:chat:${this._model}` } diff --git a/src/llms/openai.ts b/src/llms/openai.ts index 3956fef..9573546 100644 --- a/src/llms/openai.ts +++ b/src/llms/openai.ts @@ -38,16 +38,26 @@ export class OpenAIChatModel< } } + public override get nameForModel(): string { + return 'openai_chat' + } + + public override get nameForHuman(): string { + return 'OpenAIChatModel' + } + protected override async _createChatCompletion( messages: types.ChatMessage[] ): Promise< types.BaseChatCompletionResponse > { - return this._client.createChatCompletion({ + const res = await this._client.createChatCompletion({ ...this._modelParams, model: this._model, messages }) + + return res } public override clone(): OpenAIChatModel { @@ -61,6 +71,7 @@ export class OpenAIChatModel< model: this._model, examples: this._examples, messages: this._messages, + tools: this._tools, ...this._modelParams }) } diff --git a/src/task.ts b/src/task.ts index 8e08637..44ea49c 100644 --- a/src/task.ts +++ b/src/task.ts @@ -46,12 +46,20 @@ export abstract class BaseTask { public abstract get inputSchema(): ZodType public abstract get outputSchema(): ZodType - public abstract get name(): string + public abstract get nameForModel(): string + + public get nameForHuman(): string { + return this.nameForModel + } + + public get descForModel(): string { + return '' + } // TODO: is this really necessary? public clone(): BaseTask { // TODO: override in subclass if needed - throw new Error(`clone not implemented for task "${this.name}"`) + throw new Error(`clone not implemented for task "${this.nameForModel}"`) } public retryConfig(retryConfig: types.RetryConfig): this { @@ -81,7 +89,7 @@ export abstract class BaseTask { input, attemptNumber: 0, metadata: { - taskName: this.name, + taskName: this.nameForModel, taskId: this.id, callId: this._agentic.idGeneratorFn() } diff --git a/src/tokenizer.ts b/src/tokenizer.ts index 341cb43..117c94b 100644 --- a/src/tokenizer.ts +++ b/src/tokenizer.ts @@ -91,6 +91,11 @@ export async function getTokenizerForModel( } export function getModelNameForTiktoken(modelName: string): TiktokenModel { + if (modelName.startsWith('gpt-3.5-turbo-16k-')) { + // TODO: remove this once the model is added to tiktoken + return 'gpt-3.5-turbo-16k' as TiktokenModel + } + if (modelName.startsWith('gpt-3.5-turbo-')) { return 'gpt-3.5-turbo' } @@ -119,6 +124,9 @@ export function getContextSizeForModel(model: string): number { const modelName = getModelNameForTiktoken(model) switch (modelName) { + case 'gpt-3.5-turbo-16k' as TiktokenModel: + return 16384 + case 'gpt-3.5-turbo': return 4096 diff --git a/src/tools/calculator.ts b/src/tools/calculator.ts index 7dbe2c6..fe67b01 100644 --- a/src/tools/calculator.ts +++ b/src/tools/calculator.ts @@ -29,11 +29,15 @@ export class CalculatorTool extends BaseTask< return CalculatorOutputSchema } - public override get name(): string { + public override get nameForModel(): string { return 'calculator' } - public get descriptionForModel(): string { + public override get nameForHuman(): string { + return 'Calculator' + } + + public override get descForModel(): string { return 'Useful for getting the result of a math expression. The input to this tool should be a valid mathematical expression that could be executed by a simple calculator.' } diff --git a/src/tools/metaphor.ts b/src/tools/metaphor.ts index 4f21d23..0dec282 100644 --- a/src/tools/metaphor.ts +++ b/src/tools/metaphor.ts @@ -33,8 +33,8 @@ export class MetaphorSearchTool extends BaseTask< return metaphor.MetaphorSearchOutputSchema } - public override get name(): string { - return 'metaphor-search' + public override get nameForModel(): string { + return 'metaphor_web_search' } protected override async _call( diff --git a/src/tools/novu.ts b/src/tools/novu.ts index cd7aae5..5f218da 100644 --- a/src/tools/novu.ts +++ b/src/tools/novu.ts @@ -63,8 +63,8 @@ export class NovuNotificationTool extends BaseTask< return NovuNotificationToolOutputSchema } - public override get name(): string { - return 'novu' + public override get nameForModel(): string { + return 'novu_send_notification' } protected override async _call( diff --git a/src/types.ts b/src/types.ts index f866547..e2991eb 100644 --- a/src/types.ts +++ b/src/types.ts @@ -1,10 +1,11 @@ +import * as openai from '@agentic/openai-fetch' import * as anthropic from '@anthropic-ai/sdk' -import * as openai from 'openai-fetch' import type { Options as RetryOptions } from 'p-retry' import type { JsonObject } from 'type-fest' import { SafeParseReturnType, ZodType, ZodTypeAny, output, z } from 'zod' import type { Agentic } from './agentic' +import type { BaseTask } from './task' export { openai } export { anthropic } @@ -54,19 +55,8 @@ export interface LLMOptions< promptSuffix?: string } -// export type ChatMessageRole = 'user' | 'system' | 'assistant' -export const ChatMessageRoleSchema = z.union([ - z.literal('user'), - z.literal('system'), - z.literal('assistant') -]) -export type ChatMessageRole = z.infer - -export interface ChatMessage { - role: ChatMessageRole - content: string - name?: string -} +export type ChatMessage = openai.ChatMessage +export type ChatMessageRole = openai.ChatMessageRole export interface ChatModelOptions< TInput = void, @@ -74,6 +64,7 @@ export interface ChatModelOptions< TModelParams extends Record = Record > extends BaseLLMOptions { messages: ChatMessage[] + tools?: BaseTask[] } export interface BaseChatCompletionResponse< diff --git a/test/_utils.ts b/test/_utils.ts index 3383f8a..c7642b3 100644 --- a/test/_utils.ts +++ b/test/_utils.ts @@ -1,11 +1,11 @@ import * as anthropic from '@anthropic-ai/sdk' +import { OpenAIClient } from '@agentic/openai-fetch' import KeyvRedis from '@keyv/redis' import 'dotenv/config' import hashObject from 'hash-obj' import Redis from 'ioredis' import Keyv from 'keyv' import defaultKy from 'ky' -import { OpenAIClient } from 'openai-fetch' import pMemoize from 'p-memoize' import { Agentic } from '@/agentic' diff --git a/test/services/openai.test.ts b/test/services/openai.test.ts new file mode 100644 index 0000000..9d84727 --- /dev/null +++ b/test/services/openai.test.ts @@ -0,0 +1,72 @@ +import test from 'ava' + +import * as types from '@/types' + +import { createOpenAITestClient } from '../_utils' + +test('OpenAIClient - createChatCompletion - functions', async (t) => { + const openai = createOpenAITestClient() + + const model = 'gpt-3.5-turbo-0613' + const messages: types.ChatMessage[] = [ + { + role: 'user', + content: 'What’s the weather like in Boston right now?' + } + ] + const functions = [ + { + name: 'get_current_weather', + description: 'Get the current weather in a given location', + parameters: { + type: 'object', + properties: { + location: { + type: 'string', + description: 'The city and state, e.g. San Francisco, CA' + }, + unit: { + type: 'string', + enum: ['celsius', 'fahrenheit'] + } + }, + required: ['location'] + } + } + ] + + const res0 = await openai.createChatCompletion({ + model, + messages, + functions + }) + + // console.log(JSON.stringify(res0, null, 2)) + t.is(res0.message.role, 'assistant') + t.is(res0.message.content as any, null) + t.is(res0.message.function_call!.name, 'get_current_weather') + + const args = JSON.parse(res0.message.function_call!.arguments) + t.deepEqual(args, { location: 'Boston' }) + + const weatherMock = { temperature: 22, unit: 'celsius', description: 'Sunny' } + + const res1 = await openai.createChatCompletion({ + model, + messages: [ + ...messages, + res0.message, + { + role: 'function', + name: 'get_current_weather', + content: JSON.stringify(weatherMock) + } + ], + functions + }) + + // console.log(JSON.stringify(res1, null, 2)) + t.is(res1.message.role, 'assistant') + t.true(res1.message.content.length > 0) + t.is(res1.message.function_call, undefined) +}) diff --git a/test/tokenizer.test.ts b/test/tokenizer.test.ts index 9ac8339..d65f62d 100644 --- a/test/tokenizer.test.ts +++ b/test/tokenizer.test.ts @@ -7,6 +7,7 @@ import './_utils' const models = [ 'gpt-3.5-turbo', 'gpt-4', + 'gpt-4-0613', 'text-davinci-003', 'code-davinci-002' ]