From 28d8ca2430d96a88fc5db779803944365e94de56 Mon Sep 17 00:00:00 2001 From: Travis Fischer Date: Sun, 4 Aug 2024 05:55:47 -0500 Subject: [PATCH] =?UTF-8?q?=F0=9F=92=AF?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- examples/ai-sdk/bin/browserbase.ts | 2 +- examples/ai-sdk/bin/weather.ts | 2 +- examples/dexter/bin/analyze.ts | 2 +- examples/dexter/bin/code-interpreter.ts | 2 +- examples/dexter/bin/election-news-chain.ts | 2 +- examples/dexter/bin/election-news.ts | 2 +- examples/dexter/bin/weather.ts | 2 +- examples/langchain/bin/weather.ts | 2 +- examples/llamaindex/bin/weather.ts | 2 +- examples/openai/bin/weather.ts | 4 +-- readme.md | 35 +++++++++++++++------- 11 files changed, 35 insertions(+), 22 deletions(-) diff --git a/examples/ai-sdk/bin/browserbase.ts b/examples/ai-sdk/bin/browserbase.ts index e3faba4..b5ea3fa 100644 --- a/examples/ai-sdk/bin/browserbase.ts +++ b/examples/ai-sdk/bin/browserbase.ts @@ -12,7 +12,7 @@ async function main() { console.log(browserTool.parameters) const result = await generateText({ - model: openai('gpt-4o'), + model: openai('gpt-4o-mini'), tools: { browserTool }, toolChoice: 'required', temperature: 0, diff --git a/examples/ai-sdk/bin/weather.ts b/examples/ai-sdk/bin/weather.ts index 1ef659c..6ba6b6d 100644 --- a/examples/ai-sdk/bin/weather.ts +++ b/examples/ai-sdk/bin/weather.ts @@ -10,7 +10,7 @@ async function main() { const weather = new WeatherClient() const result = await generateText({ - model: openai('gpt-4o'), + model: openai('gpt-4o-mini'), tools: createAISDKTools(weather), toolChoice: 'required', temperature: 0, diff --git a/examples/dexter/bin/analyze.ts b/examples/dexter/bin/analyze.ts index 5b936e0..ba82328 100644 --- a/examples/dexter/bin/analyze.ts +++ b/examples/dexter/bin/analyze.ts @@ -13,7 +13,7 @@ async function main() { const runner = createAIRunner({ chatModel: new ChatModel({ - params: { model: 'gpt-4o', temperature: 0 } + params: { model: 'gpt-4o-mini', temperature: 0 } // debug: true }), functions: createDexterFunctions(searchAndCrawl), diff --git a/examples/dexter/bin/code-interpreter.ts b/examples/dexter/bin/code-interpreter.ts index 79c0e18..c7f70fd 100644 --- a/examples/dexter/bin/code-interpreter.ts +++ b/examples/dexter/bin/code-interpreter.ts @@ -8,7 +8,7 @@ import { ChatModel, createAIRunner } from '@dexaai/dexter' async function main() { const runner = createAIRunner({ chatModel: new ChatModel({ - params: { model: 'gpt-4o', temperature: 0 }, + params: { model: 'gpt-4o-mini', temperature: 0 }, debug: true }), functions: createDexterFunctions(e2b) diff --git a/examples/dexter/bin/election-news-chain.ts b/examples/dexter/bin/election-news-chain.ts index bcd467f..beee439 100644 --- a/examples/dexter/bin/election-news-chain.ts +++ b/examples/dexter/bin/election-news-chain.ts @@ -11,7 +11,7 @@ async function main() { const serper = new SerperClient() const chatModel = new ChatModel({ - params: { model: 'gpt-4o', temperature: 0 }, + params: { model: 'gpt-4o-mini', temperature: 0 }, debug: true }) diff --git a/examples/dexter/bin/election-news.ts b/examples/dexter/bin/election-news.ts index d37121e..74f0868 100644 --- a/examples/dexter/bin/election-news.ts +++ b/examples/dexter/bin/election-news.ts @@ -12,7 +12,7 @@ async function main() { const runner = createAIRunner({ chatModel: new ChatModel({ - params: { model: 'gpt-4o', temperature: 0 } + params: { model: 'gpt-4o-mini', temperature: 0 } // debug: true }), functions: createDexterFunctions( diff --git a/examples/dexter/bin/weather.ts b/examples/dexter/bin/weather.ts index 05309c9..fb8dd3f 100644 --- a/examples/dexter/bin/weather.ts +++ b/examples/dexter/bin/weather.ts @@ -10,7 +10,7 @@ async function main() { const runner = createAIRunner({ chatModel: new ChatModel({ - params: { model: 'gpt-4o', temperature: 0 } + params: { model: 'gpt-4o-mini', temperature: 0 } // debug: true }), functions: createDexterFunctions(weather), diff --git a/examples/langchain/bin/weather.ts b/examples/langchain/bin/weather.ts index a8174c5..5942c57 100644 --- a/examples/langchain/bin/weather.ts +++ b/examples/langchain/bin/weather.ts @@ -12,7 +12,7 @@ async function main() { const tools = createLangChainTools(weather) const agent = createToolCallingAgent({ - llm: new ChatOpenAI({ model: 'gpt-4o', temperature: 0 }), + llm: new ChatOpenAI({ model: 'gpt-4o-mini', temperature: 0 }), tools, prompt: ChatPromptTemplate.fromMessages([ ['system', 'You are a helpful assistant. Be as concise as possible.'], diff --git a/examples/llamaindex/bin/weather.ts b/examples/llamaindex/bin/weather.ts index 8cb7325..bf39545 100644 --- a/examples/llamaindex/bin/weather.ts +++ b/examples/llamaindex/bin/weather.ts @@ -10,7 +10,7 @@ async function main() { const tools = createLlamaIndexTools(weather) const agent = new OpenAIAgent({ - llm: new OpenAI({ model: 'gpt-4o', temperature: 0 }), + llm: new OpenAI({ model: 'gpt-4o-mini', temperature: 0 }), systemPrompt: 'You are a helpful assistant. Be as concise as possible.', tools }) diff --git a/examples/openai/bin/weather.ts b/examples/openai/bin/weather.ts index 4ee64d4..60124e8 100644 --- a/examples/openai/bin/weather.ts +++ b/examples/openai/bin/weather.ts @@ -21,7 +21,7 @@ async function main() { // First call to OpenAI to invoke the weather tool const res = await openai.chat.completions.create({ messages, - model: 'gpt-4o', + model: 'gpt-4o-mini', temperature: 0, tools: weather.functions.toolSpecs, tool_choice: 'required' @@ -48,7 +48,7 @@ async function main() { // Second call to OpenAI to generate a text response const res = await openai.chat.completions.create({ messages, - model: 'gpt-4o', + model: 'gpt-4o-mini', temperature: 0, tools: weather.functions.toolSpecs }) diff --git a/readme.md b/readme.md index fa939b7..05a3f1a 100644 --- a/readme.md +++ b/readme.md @@ -33,14 +33,15 @@ ## Intro -The goal of this project is to create a **set of standard AI functions / tools** which are **optimized for both normal TS-usage as well as LLM-based apps** and that work with all of the major AI SDKs (LangChain, LlamaIndex, Vercel AI SDK, OpenAI SDK, etc). +The goal of this project is to create a **set of standard AI functions / tools** which are **optimized for both normal TS-usage as well as LLM-based apps** and **work with all of the major TS AI SDKs** (LangChain, LlamaIndex, Vercel AI SDK, OpenAI SDK, etc). -For example, stdlib clients like `WeatherClient` can be used as normal TS classes: +Agentic clients like `WeatherClient` can be used as normal TS classes: ```ts import { WeatherClient } from '@agentic/stdlib' -const weather = new WeatherClient() // (requires `WEATHER_API_KEY` env var) +// Requires `process.env.WEATHER_API_KEY` (from weatherapi.com) +const weather = new WeatherClient() const result = await weather.getCurrentWeather({ q: 'San Francisco' @@ -64,7 +65,7 @@ import { WeatherClient } from '@agentic/stdlib' const weather = new WeatherClient() const result = await generateText({ - model: openai('gpt-4o'), + model: openai('gpt-4o-mini'), // this is the key line which uses the `@agentic/ai-sdk` adapter tools: createAISDKTools(weather), toolChoice: 'required', @@ -93,13 +94,13 @@ async function main() { const runner = createAIRunner({ chatModel: new ChatModel({ - params: { model: 'gpt-4o', temperature: 0 } + params: { model: 'gpt-4o-mini', temperature: 0 } }), functions: createDexterFunctions( perigon.functions.pick('search_news_stories'), serper ), - systemMessage: `You are a helpful assistant. Be as concise as possible.` + systemMessage: 'You are a helpful assistant. Be as concise as possible.' }) const result = await runner( @@ -111,7 +112,7 @@ async function main() { Here we've exposed 2 functions to the LLM, `search_news_stories` (which comes from the `PerigonClient.searchStories` method) and `serper_google_search` (which implicitly comes from the `SerperClient.search` method). -All of the SDK adapters like `createDexterFunctions` accept very flexible in what they accept. `AIFunctionLike` objects include: +All of the SDK adapters like `createDexterFunctions` accept very flexible `AIFunctionLike` objects, which include: - `AIFunctionSet` - Sets of AI functions (like `perigon.functions.pick('search_news_stories')` or `perigon.functions` or `serper.functions`) - `AIFunctionsProvider` - Client classes which expose an `AIFunctionSet` via the `.functions` property (like `perigon` or `serper`) @@ -129,7 +130,7 @@ This package is [ESM only](https://gist.github.com/sindresorhus/a39789f98801d908 ### AI SDKs -Each AI SDK adapter is available from it's own package and needs to be installed in addition to the packages above. +Each AI SDK adapter has its own package which needs to be installed.
@@ -144,6 +145,8 @@ npm install @agentic/ai-sdk ai import { createAISDKTools } from '@agentic/ai-sdk' ``` +See [examples/ai-sdk](./examples/ai-sdk) for a full example. +
@@ -159,6 +162,8 @@ npm install @agentic/langchain @langchain/core langchain import { createLangChainTools } from '@agentic/langchain' ``` +See [examples/langchain](./examples/langchain) for a full example. +
@@ -174,6 +179,8 @@ npm install @agentic/llamaindex llamaindex import { createLlamaIndexTools } from '@agentic/llamaindex' ``` +See [examples/llamaindex](./examples/llamaindex) for a full example. +
@@ -189,6 +196,8 @@ npm install @agentic/genkit @genkit-ai/ai @genkit-ai/core import { createGenkitTools } from '@agentic/genkit' ``` +See [examples/genkit](./examples/genkit) for a full example. +
@@ -204,6 +213,8 @@ npm install @agentic/dexter @dexaai/dexter import { createDexterFunctions } from '@agentic/dexter' ``` +See [examples/dexter](./examples/dexter) for a full example. +
@@ -215,7 +226,7 @@ import { createDexterFunctions } from '@agentic/dexter' npm install openai ``` -There's no need for an adapter with the OpenAI SDK since all agentic tools are compatible with OpenAI by default. You can use `AIFunctionSet.specs` for function calling or `AIFunctionSet.toolSpecs` for parallel tool calling. For example: +There's no need for an adapter with the OpenAI SDK since all agentic tools are compatible with OpenAI by default. You can use `AIFunctionSet.specs` for function calling or `AIFunctionSet.toolSpecs` for parallel tool calling. ```ts import { WeatherClient } from '@agentic/stdlib' @@ -236,7 +247,7 @@ const messages: OpenAI.ChatCompletionMessageParam[] = [ // First call to OpenAI to invoke the weather tool const res = await openai.chat.completions.create({ messages, - model: 'gpt-4o', + model: 'gpt-4o-mini', temperature: 0, tools: weather.functions.toolSpecs, tool_choice: 'required' @@ -263,7 +274,7 @@ const messages: OpenAI.ChatCompletionMessageParam[] = [ // Second call to OpenAI to generate a text response const res = await openai.chat.completions.create({ messages, - model: 'gpt-4o', + model: 'gpt-4o-mini', temperature: 0, tools: weather.functions.toolSpecs }) @@ -272,6 +283,8 @@ const messages: OpenAI.ChatCompletionMessageParam[] = [ } ``` +See [examples/openai](./examples/openai) for a full example. +
See the [examples](./examples) directory for working examples of how to use each of these adapters.