From c044e9e4b089828d52859f4e759d2141005c2270 Mon Sep 17 00:00:00 2001 From: Travis Fischer Date: Mon, 26 May 2025 22:04:32 +0700 Subject: [PATCH] =?UTF-8?q?=F0=9F=8D=B0?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- docs/sdks/llamaindex.mdx | 22 ++++++++++++---------- 1 file changed, 12 insertions(+), 10 deletions(-) diff --git a/docs/sdks/llamaindex.mdx b/docs/sdks/llamaindex.mdx index 86d53d02..f0042afb 100644 --- a/docs/sdks/llamaindex.mdx +++ b/docs/sdks/llamaindex.mdx @@ -12,15 +12,15 @@ description: Agentic adapter for the LlamaIndex TS SDK. ```bash npm -npm install @agentic/llamaindex llamaindex +npm install @agentic/llamaindex llamaindex @llamaindex/openai @llamaindex/workflow ``` ```bash yarn -yarn add @agentic/llamaindex llamaindex +yarn add @agentic/llamaindex llamaindex @llamaindex/openai @llamaindex/workflow ``` ```bash pnpm -pnpm add @agentic/llamaindex llamaindex +pnpm add @agentic/llamaindex llamaindex @llamaindex/openai @llamaindex/workflow ``` @@ -32,23 +32,25 @@ import 'dotenv/config' import { createLlamaIndexTools } from '@agentic/llamaindex' import { WeatherClient } from '@agentic/stdlib' -import { OpenAI, OpenAIAgent } from 'llamaindex' +import { openai } from '@llamaindex/openai' +import { agent } from '@llamaindex/workflow' async function main() { const weather = new WeatherClient() const tools = createLlamaIndexTools(weather) - const agent = new OpenAIAgent({ - llm: new OpenAI({ model: 'gpt-4o-mini', temperature: 0 }), + const weatherAgent = agent({ + name: 'Weather Agent', + llm: openai({ model: 'gpt-4o-mini', temperature: 0 }), systemPrompt: 'You are a helpful assistant. Be as concise as possible.', tools }) - const response = await agent.chat({ - message: 'What is the weather in San Francisco?' - }) + const response = await weatherAgent.run( + 'What is the weather in San Francisco?' + ) - console.log(response.message.content) + console.log(response.data.result) } await main()