pull/657/head
Travis Fischer 2024-08-04 05:55:47 -05:00
rodzic c8cdc7c0c9
commit 28d8ca2430
11 zmienionych plików z 35 dodań i 22 usunięć

Wyświetl plik

@ -12,7 +12,7 @@ async function main() {
console.log(browserTool.parameters)
const result = await generateText({
model: openai('gpt-4o'),
model: openai('gpt-4o-mini'),
tools: { browserTool },
toolChoice: 'required',
temperature: 0,

Wyświetl plik

@ -10,7 +10,7 @@ async function main() {
const weather = new WeatherClient()
const result = await generateText({
model: openai('gpt-4o'),
model: openai('gpt-4o-mini'),
tools: createAISDKTools(weather),
toolChoice: 'required',
temperature: 0,

Wyświetl plik

@ -13,7 +13,7 @@ async function main() {
const runner = createAIRunner({
chatModel: new ChatModel({
params: { model: 'gpt-4o', temperature: 0 }
params: { model: 'gpt-4o-mini', temperature: 0 }
// debug: true
}),
functions: createDexterFunctions(searchAndCrawl),

Wyświetl plik

@ -8,7 +8,7 @@ import { ChatModel, createAIRunner } from '@dexaai/dexter'
async function main() {
const runner = createAIRunner({
chatModel: new ChatModel({
params: { model: 'gpt-4o', temperature: 0 },
params: { model: 'gpt-4o-mini', temperature: 0 },
debug: true
}),
functions: createDexterFunctions(e2b)

Wyświetl plik

@ -11,7 +11,7 @@ async function main() {
const serper = new SerperClient()
const chatModel = new ChatModel({
params: { model: 'gpt-4o', temperature: 0 },
params: { model: 'gpt-4o-mini', temperature: 0 },
debug: true
})

Wyświetl plik

@ -12,7 +12,7 @@ async function main() {
const runner = createAIRunner({
chatModel: new ChatModel({
params: { model: 'gpt-4o', temperature: 0 }
params: { model: 'gpt-4o-mini', temperature: 0 }
// debug: true
}),
functions: createDexterFunctions(

Wyświetl plik

@ -10,7 +10,7 @@ async function main() {
const runner = createAIRunner({
chatModel: new ChatModel({
params: { model: 'gpt-4o', temperature: 0 }
params: { model: 'gpt-4o-mini', temperature: 0 }
// debug: true
}),
functions: createDexterFunctions(weather),

Wyświetl plik

@ -12,7 +12,7 @@ async function main() {
const tools = createLangChainTools(weather)
const agent = createToolCallingAgent({
llm: new ChatOpenAI({ model: 'gpt-4o', temperature: 0 }),
llm: new ChatOpenAI({ model: 'gpt-4o-mini', temperature: 0 }),
tools,
prompt: ChatPromptTemplate.fromMessages([
['system', 'You are a helpful assistant. Be as concise as possible.'],

Wyświetl plik

@ -10,7 +10,7 @@ async function main() {
const tools = createLlamaIndexTools(weather)
const agent = new OpenAIAgent({
llm: new OpenAI({ model: 'gpt-4o', temperature: 0 }),
llm: new OpenAI({ model: 'gpt-4o-mini', temperature: 0 }),
systemPrompt: 'You are a helpful assistant. Be as concise as possible.',
tools
})

Wyświetl plik

@ -21,7 +21,7 @@ async function main() {
// First call to OpenAI to invoke the weather tool
const res = await openai.chat.completions.create({
messages,
model: 'gpt-4o',
model: 'gpt-4o-mini',
temperature: 0,
tools: weather.functions.toolSpecs,
tool_choice: 'required'
@ -48,7 +48,7 @@ async function main() {
// Second call to OpenAI to generate a text response
const res = await openai.chat.completions.create({
messages,
model: 'gpt-4o',
model: 'gpt-4o-mini',
temperature: 0,
tools: weather.functions.toolSpecs
})

Wyświetl plik

@ -33,14 +33,15 @@
## Intro
The goal of this project is to create a **set of standard AI functions / tools** which are **optimized for both normal TS-usage as well as LLM-based apps** and that work with all of the major AI SDKs (LangChain, LlamaIndex, Vercel AI SDK, OpenAI SDK, etc).
The goal of this project is to create a **set of standard AI functions / tools** which are **optimized for both normal TS-usage as well as LLM-based apps** and **work with all of the major TS AI SDKs** (LangChain, LlamaIndex, Vercel AI SDK, OpenAI SDK, etc).
For example, stdlib clients like `WeatherClient` can be used as normal TS classes:
Agentic clients like `WeatherClient` can be used as normal TS classes:
```ts
import { WeatherClient } from '@agentic/stdlib'
const weather = new WeatherClient() // (requires `WEATHER_API_KEY` env var)
// Requires `process.env.WEATHER_API_KEY` (from weatherapi.com)
const weather = new WeatherClient()
const result = await weather.getCurrentWeather({
q: 'San Francisco'
@ -64,7 +65,7 @@ import { WeatherClient } from '@agentic/stdlib'
const weather = new WeatherClient()
const result = await generateText({
model: openai('gpt-4o'),
model: openai('gpt-4o-mini'),
// this is the key line which uses the `@agentic/ai-sdk` adapter
tools: createAISDKTools(weather),
toolChoice: 'required',
@ -93,13 +94,13 @@ async function main() {
const runner = createAIRunner({
chatModel: new ChatModel({
params: { model: 'gpt-4o', temperature: 0 }
params: { model: 'gpt-4o-mini', temperature: 0 }
}),
functions: createDexterFunctions(
perigon.functions.pick('search_news_stories'),
serper
),
systemMessage: `You are a helpful assistant. Be as concise as possible.`
systemMessage: 'You are a helpful assistant. Be as concise as possible.'
})
const result = await runner(
@ -111,7 +112,7 @@ async function main() {
Here we've exposed 2 functions to the LLM, `search_news_stories` (which comes from the `PerigonClient.searchStories` method) and `serper_google_search` (which implicitly comes from the `SerperClient.search` method).
All of the SDK adapters like `createDexterFunctions` accept very flexible in what they accept. `AIFunctionLike` objects include:
All of the SDK adapters like `createDexterFunctions` accept very flexible `AIFunctionLike` objects, which include:
- `AIFunctionSet` - Sets of AI functions (like `perigon.functions.pick('search_news_stories')` or `perigon.functions` or `serper.functions`)
- `AIFunctionsProvider` - Client classes which expose an `AIFunctionSet` via the `.functions` property (like `perigon` or `serper`)
@ -129,7 +130,7 @@ This package is [ESM only](https://gist.github.com/sindresorhus/a39789f98801d908
### AI SDKs
Each AI SDK adapter is available from it's own package and needs to be installed in addition to the packages above.
Each AI SDK adapter has its own package which needs to be installed.
<details>
<summary>
@ -144,6 +145,8 @@ npm install @agentic/ai-sdk ai
import { createAISDKTools } from '@agentic/ai-sdk'
```
See [examples/ai-sdk](./examples/ai-sdk) for a full example.
</details>
<details>
@ -159,6 +162,8 @@ npm install @agentic/langchain @langchain/core langchain
import { createLangChainTools } from '@agentic/langchain'
```
See [examples/langchain](./examples/langchain) for a full example.
</details>
<details>
@ -174,6 +179,8 @@ npm install @agentic/llamaindex llamaindex
import { createLlamaIndexTools } from '@agentic/llamaindex'
```
See [examples/llamaindex](./examples/llamaindex) for a full example.
</details>
<details>
@ -189,6 +196,8 @@ npm install @agentic/genkit @genkit-ai/ai @genkit-ai/core
import { createGenkitTools } from '@agentic/genkit'
```
See [examples/genkit](./examples/genkit) for a full example.
</details>
<details>
@ -204,6 +213,8 @@ npm install @agentic/dexter @dexaai/dexter
import { createDexterFunctions } from '@agentic/dexter'
```
See [examples/dexter](./examples/dexter) for a full example.
</details>
<details>
@ -215,7 +226,7 @@ import { createDexterFunctions } from '@agentic/dexter'
npm install openai
```
There's no need for an adapter with the OpenAI SDK since all agentic tools are compatible with OpenAI by default. You can use `AIFunctionSet.specs` for function calling or `AIFunctionSet.toolSpecs` for parallel tool calling. For example:
There's no need for an adapter with the OpenAI SDK since all agentic tools are compatible with OpenAI by default. You can use `AIFunctionSet.specs` for function calling or `AIFunctionSet.toolSpecs` for parallel tool calling.
```ts
import { WeatherClient } from '@agentic/stdlib'
@ -236,7 +247,7 @@ const messages: OpenAI.ChatCompletionMessageParam[] = [
// First call to OpenAI to invoke the weather tool
const res = await openai.chat.completions.create({
messages,
model: 'gpt-4o',
model: 'gpt-4o-mini',
temperature: 0,
tools: weather.functions.toolSpecs,
tool_choice: 'required'
@ -263,7 +274,7 @@ const messages: OpenAI.ChatCompletionMessageParam[] = [
// Second call to OpenAI to generate a text response
const res = await openai.chat.completions.create({
messages,
model: 'gpt-4o',
model: 'gpt-4o-mini',
temperature: 0,
tools: weather.functions.toolSpecs
})
@ -272,6 +283,8 @@ const messages: OpenAI.ChatCompletionMessageParam[] = [
}
```
See [examples/openai](./examples/openai) for a full example.
</details>
See the [examples](./examples) directory for working examples of how to use each of these adapters.