diff --git a/examples/replicate.ts b/examples/replicate.ts index 15f4d1b9..c5cf551c 100644 --- a/examples/replicate.ts +++ b/examples/replicate.ts @@ -5,19 +5,15 @@ import { z } from 'zod' import { Agentic, ReplicateStableDiffusionTool } from '@/index' async function main() { - const openai = new OpenAIClient({ - apiKey: process.env.OPENAI_API_KEY!, - fetchOptions: { - timeout: false - } - }) + const openai = new OpenAIClient({ apiKey: process.env.OPENAI_API_KEY! }) const agentic = new Agentic({ openai }) const topic = process.argv[2] || 'san francisco' const res = await agentic .gpt4( - `Generate {{numImages}} images of {{topic}}. Use prompts that are artistic and creative.` + ({ numImages, topic }) => + `Generate ${numImages} images of ${topic}. Use prompts that are artistic and creative. The output should contain ${numImages} markdown images with descriptions.` ) .modelParams({ temperature: 1.0 }) .tools([new ReplicateStableDiffusionTool()]) diff --git a/package.json b/package.json index d2c901a7..29b95118 100644 --- a/package.json +++ b/package.json @@ -33,7 +33,7 @@ "prepare": "husky install", "pre-commit": "lint-staged", "test": "run-s test:*", - "test:unit": "ava", + "test:unit": "NODE_OPTIONS='--loader=tsx --no-warnings' ava", "test:prettier": "prettier \"**/*.{js,jsx,ts,tsx}\" --check", "test:eslint": "eslint \"**/*.ts\"", "test-cov": "c8 ava" diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index 57159b09..35a3eb67 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -1,9 +1,5 @@ lockfileVersion: '6.1' -settings: - autoInstallPeers: true - excludeLinksFromLockfile: false - dependencies: '@agentic/midjourney-fetch': specifier: ^1.0.1 @@ -4807,3 +4803,7 @@ packages: /zod@3.21.4: resolution: {integrity: sha512-m46AKbrzKVzOzs/DZgVnG5H55N1sv1M8qZU3A8RIKbs3mrACDNeIOeilDymVb2HdmP8uwshOCF4uJ8uM9rCqJw==} dev: false + +settings: + autoInstallPeers: true + excludeLinksFromLockfile: false diff --git a/src/llms/chat.ts b/src/llms/chat.ts index 1c65d9a8..cc776c33 100644 --- a/src/llms/chat.ts +++ b/src/llms/chat.ts @@ -241,6 +241,7 @@ export abstract class BaseChatCompletion< // console.log('<<< completion', { messages, functions }) const completion = await this._createChatCompletion(messages, functions) const message = completion.message + const functionCall = message.function_call // console.log('>>> completion', completion.message) this._logger.info( @@ -249,9 +250,7 @@ export abstract class BaseChatCompletion< ) ctx.metadata.completion = completion - if (message.function_call && !message.content) { - const functionCall = message.function_call - + if (functionCall) { if (!isUsingTools) { // TODO: not sure what we should do in this case... output = functionCall diff --git a/test/llms/openai-tools.test.ts b/test/llms/openai-tools.test.ts index 8efc658d..0fa31705 100644 --- a/test/llms/openai-tools.test.ts +++ b/test/llms/openai-tools.test.ts @@ -50,6 +50,7 @@ test('OpenAIChatCompletion - tools - weather', async (t) => { ) .call() + // console.log(JSON.stringify(result, null, 2)) t.truthy(typeof result === 'object') t.truthy(typeof result.answer === 'number') t.truthy(typeof result.units === 'string')