kopia lustrzana https://github.com/transitive-bullshit/chatgpt-api
Merge pull request #715 from transitive-bullshit/feature/docs-1
commit
c1562d8546
|
@ -1,6 +1,7 @@
|
|||
import type { RawConsumer } from '@/db'
|
||||
import type { AuthenticatedHonoContext } from '@/lib/types'
|
||||
import { setPublicCacheControl } from '@/lib/cache-control'
|
||||
import { env } from '@/lib/env'
|
||||
|
||||
export function setAdminCacheControlForConsumer(
|
||||
c: AuthenticatedHonoContext,
|
||||
|
@ -11,8 +12,15 @@ export function setAdminCacheControlForConsumer(
|
|||
!consumer.activated ||
|
||||
!consumer.isStripeSubscriptionActive
|
||||
) {
|
||||
setPublicCacheControl(c.res, '10s')
|
||||
// TODO: should we cache free-tier consumers for longer on prod?
|
||||
// We really don't want free tier customers to cause our backend API so
|
||||
// much traffic, but we'd also like for customers upgrading to a paid tier
|
||||
// to have a snappy, smooth experience – without having to wait for their
|
||||
// free tier subscription to expire from the cache.
|
||||
setPublicCacheControl(c.res, env.isProd ? '30s' : '10s')
|
||||
} else {
|
||||
setPublicCacheControl(c.res, '1m')
|
||||
// We don't want the gateway hitting our API too often, so cache active
|
||||
// customer subscriptions for longer in production
|
||||
setPublicCacheControl(c.res, env.isProd ? '30m' : '1m')
|
||||
}
|
||||
}
|
||||
|
|
|
@ -5,7 +5,9 @@ import type { AuthenticatedHonoEnv } from '@/lib/types'
|
|||
import { schema } from '@/db'
|
||||
import { acl } from '@/lib/acl'
|
||||
import { aclAdmin } from '@/lib/acl-admin'
|
||||
import { setPublicCacheControl } from '@/lib/cache-control'
|
||||
import { tryGetDeploymentByIdentifier } from '@/lib/deployments/try-get-deployment-by-identifier'
|
||||
import { env } from '@/lib/env'
|
||||
import {
|
||||
openapiAuthenticatedSecuritySchemas,
|
||||
openapiErrorResponse404,
|
||||
|
@ -66,6 +68,13 @@ export function registerV1AdminGetDeploymentByIdentifier(
|
|||
|
||||
const hasPopulateProject = populate.includes('project')
|
||||
|
||||
if (env.isProd) {
|
||||
// Published deployments are immutable, so cache them for longer in production
|
||||
setPublicCacheControl(c.res, deployment.published ? '1h' : '5m')
|
||||
} else {
|
||||
setPublicCacheControl(c.res, '10s')
|
||||
}
|
||||
|
||||
return c.json(
|
||||
parseZodSchema(schema.deploymentAdminSelectSchema, {
|
||||
...deployment,
|
||||
|
|
|
@ -4,6 +4,7 @@ import { createRoute, type OpenAPIHono } from '@hono/zod-openapi'
|
|||
|
||||
import { schema } from '@/db'
|
||||
import { aclPublicProject } from '@/lib/acl-public-project'
|
||||
import { setPublicCacheControl } from '@/lib/cache-control'
|
||||
import { tryGetDeploymentByIdentifier } from '@/lib/deployments/try-get-deployment-by-identifier'
|
||||
import {
|
||||
openapiAuthenticatedSecuritySchemas,
|
||||
|
@ -59,6 +60,13 @@ export function registerV1GetPublicDeploymentByIdentifier(
|
|||
)
|
||||
aclPublicProject(deployment.project!)
|
||||
|
||||
if (deployment.published) {
|
||||
// Note that published deployments should be immutable
|
||||
setPublicCacheControl(c.res, '1m')
|
||||
} else {
|
||||
setPublicCacheControl(c.res, '10s')
|
||||
}
|
||||
|
||||
return c.json(parseZodSchema(schema.deploymentSelectSchema, deployment))
|
||||
})
|
||||
}
|
||||
|
|
|
@ -4,6 +4,8 @@ import { createRoute, type OpenAPIHono } from '@hono/zod-openapi'
|
|||
|
||||
import { db, eq, schema } from '@/db'
|
||||
import { aclPublicProject } from '@/lib/acl-public-project'
|
||||
import { setPublicCacheControl } from '@/lib/cache-control'
|
||||
import { env } from '@/lib/env'
|
||||
import {
|
||||
openapiAuthenticatedSecuritySchemas,
|
||||
openapiErrorResponse404,
|
||||
|
@ -50,7 +52,8 @@ export function registerV1GetPublicProjectByIdentifier(
|
|||
...Object.fromEntries(populate.map((field) => [field, true]))
|
||||
}
|
||||
})
|
||||
await aclPublicProject(project, projectIdentifier)
|
||||
aclPublicProject(project, projectIdentifier)
|
||||
setPublicCacheControl(c.res, env.isProd ? '1m' : '10s')
|
||||
|
||||
return c.json(parseZodSchema(schema.projectSelectSchema, project))
|
||||
})
|
||||
|
|
|
@ -4,6 +4,8 @@ import { createRoute, type OpenAPIHono } from '@hono/zod-openapi'
|
|||
|
||||
import { db, eq, schema } from '@/db'
|
||||
import { aclPublicProject } from '@/lib/acl-public-project'
|
||||
import { setPublicCacheControl } from '@/lib/cache-control'
|
||||
import { env } from '@/lib/env'
|
||||
import {
|
||||
openapiAuthenticatedSecuritySchemas,
|
||||
openapiErrorResponse404,
|
||||
|
@ -50,6 +52,7 @@ export function registerV1GetPublicProject(app: OpenAPIHono<DefaultHonoEnv>) {
|
|||
}
|
||||
})
|
||||
aclPublicProject(project, projectId)
|
||||
setPublicCacheControl(c.res, env.isProd ? '1m' : '10s')
|
||||
|
||||
return c.json(parseZodSchema(schema.projectSelectSchema, project))
|
||||
})
|
||||
|
|
|
@ -1,8 +1,11 @@
|
|||
import { env } from 'node:process'
|
||||
|
||||
import type { DefaultHonoEnv } from '@agentic/platform-hono'
|
||||
import { parseZodSchema } from '@agentic/platform-core'
|
||||
import { createRoute, type OpenAPIHono, z } from '@hono/zod-openapi'
|
||||
|
||||
import { and, db, eq, isNotNull, schema } from '@/db'
|
||||
import { setPublicCacheControl } from '@/lib/cache-control'
|
||||
import {
|
||||
openapiAuthenticatedSecuritySchemas,
|
||||
openapiErrorResponses
|
||||
|
@ -60,6 +63,7 @@ export function registerV1ListPublicProjects(app: OpenAPIHono<DefaultHonoEnv>) {
|
|||
offset,
|
||||
limit
|
||||
})
|
||||
setPublicCacheControl(c.res, env.isProd ? '1m' : '10s')
|
||||
|
||||
return c.json(parseZodSchema(z.array(schema.projectSelectSchema), projects))
|
||||
})
|
||||
|
|
|
@ -4,6 +4,8 @@ import { createRoute, type OpenAPIHono } from '@hono/zod-openapi'
|
|||
import type { AuthenticatedHonoEnv } from '@/lib/types'
|
||||
import { db, eq, schema } from '@/db'
|
||||
import { acl } from '@/lib/acl'
|
||||
import { setPublicCacheControl } from '@/lib/cache-control'
|
||||
import { env } from '@/lib/env'
|
||||
import {
|
||||
openapiAuthenticatedSecuritySchemas,
|
||||
openapiErrorResponse404,
|
||||
|
@ -45,6 +47,7 @@ export function registerV1GetUser(app: OpenAPIHono<AuthenticatedHonoEnv>) {
|
|||
where: eq(schema.users.id, userId)
|
||||
})
|
||||
assert(user, 404, `User not found "${userId}"`)
|
||||
setPublicCacheControl(c.res, env.isProd ? '30s' : '10s')
|
||||
|
||||
return c.json(parseZodSchema(schema.userSelectSchema, user))
|
||||
})
|
||||
|
|
|
@ -1,12 +1,25 @@
|
|||
import { assert } from '@agentic/platform-core'
|
||||
|
||||
export type PublicCacheControlLevels = '1s' | '10s' | '1m' | '1h' | '1d'
|
||||
export type PublicCacheControlLevels =
|
||||
| '1s'
|
||||
| '10s'
|
||||
| '30s'
|
||||
| '1m'
|
||||
| '5m'
|
||||
| '10m'
|
||||
| '30m'
|
||||
| '1h'
|
||||
| '1d'
|
||||
|
||||
const publicCacheControlLevelsMap: Record<PublicCacheControlLevels, string> = {
|
||||
'1s': 'public, max-age=1, s-maxage=1 stale-while-revalidate=0',
|
||||
'10s': 'public, max-age=10, s-maxage=10 stale-while-revalidate=1',
|
||||
'30s': 'public, max-age=30, s-maxage=30 stale-while-revalidate=5',
|
||||
'1m': 'public, max-age=60, s-maxage=60 stale-while-revalidate=10',
|
||||
'1h': 'public, max-age=3600, s-maxage=3600, stale-while-revalidate=300',
|
||||
'5m': 'public, max-age=300, s-maxage=300 stale-while-revalidate=60',
|
||||
'10m': 'public, max-age=600, s-maxage=600 stale-while-revalidate=120',
|
||||
'30m': 'public, max-age=1800, s-maxage=1800 stale-while-revalidate=300',
|
||||
'1h': 'public, max-age=3600, s-maxage=3600, stale-while-revalidate=500',
|
||||
'1d': 'public, max-age=86400, s-maxage=86400, stale-while-revalidate=3600'
|
||||
}
|
||||
|
||||
|
|
|
@ -180,7 +180,7 @@ export function recordToolCallUsage({
|
|||
// If there's a consumer and it hasn't been activated yet, make sure it's
|
||||
// activated. This may be called multiple times if the consumer is cached,
|
||||
// but this method is intentionally idempotent, and we don't cache non-
|
||||
// activated consumers for long, so shouldn't be a problem.
|
||||
// activated consumers for long, so it shouldn't be a problem.
|
||||
waitUntil(client.adminActivateConsumer({ consumerId: consumer.id }))
|
||||
}
|
||||
|
||||
|
|
|
@ -45,6 +45,18 @@
|
|||
{
|
||||
"group": "Getting Started",
|
||||
"pages": ["index", "marketplace/index"]
|
||||
},
|
||||
{
|
||||
"group": "TypeScript AI SDKs",
|
||||
"pages": [
|
||||
"marketplace/ts-sdks/ai-sdk",
|
||||
"marketplace/ts-sdks/openai-chat",
|
||||
"marketplace/ts-sdks/openai-responses",
|
||||
"marketplace/ts-sdks/langchain",
|
||||
"marketplace/ts-sdks/llamaindex",
|
||||
"marketplace/ts-sdks/genkit",
|
||||
"marketplace/ts-sdks/mastra"
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
|
|
|
@ -7,7 +7,7 @@ description: Agentic is the App Store for LLM Tools.
|
|||
|
||||
<Columns cols={2}>
|
||||
<Card title='MCP Marketplace Docs' href='/marketplace' icon='store'>
|
||||
For developers interested in consuming Agentic tools.
|
||||
For developers interested in using Agentic tools in their apps.
|
||||
</Card>
|
||||
<Card title='Publishing MCP Docs' href='/publishing' icon='upload'>
|
||||
For developers interested in publishing their own MCP server or OpenAPI
|
||||
|
|
|
@ -1,8 +1,78 @@
|
|||
---
|
||||
title: Quick Start
|
||||
description: A quick start on using tools from Agentic's MCP marketplace.
|
||||
description: A quick start on how to use tools from Agentic's marketplace.
|
||||
---
|
||||
|
||||
Docs for Agentic's MCP marketplace are a WIP.
|
||||
## TypeScript AI SDKs
|
||||
|
||||
Visit the [publishing quick start](/publishing/quickstart) to get started in the meantime.
|
||||
<Columns cols={2}>
|
||||
<Card
|
||||
title='Vercel AI SDK'
|
||||
href='/marketplace/ts-sdks/ai-sdk'
|
||||
icon='/media/sdks/vercel.svg'
|
||||
>
|
||||
How to use Agentic tools with the Vercel AI SDK.
|
||||
</Card>
|
||||
|
||||
<Card
|
||||
title='LangChain'
|
||||
href='/marketplace/ts-sdks/langchain'
|
||||
icon='/media/sdks/langchain.svg'
|
||||
>
|
||||
How to use Agentic tools with LangChain's TS SDK.
|
||||
</Card>
|
||||
|
||||
<Card
|
||||
title='OpenAI Chat Completions'
|
||||
href='/marketplace/ts-sdks/openai-chat'
|
||||
icon='/media/sdks/openai.svg'
|
||||
>
|
||||
How to use Agentic tools with the OpenAI Chat Completions API.
|
||||
</Card>
|
||||
|
||||
<Card
|
||||
title='OpenAI Responses'
|
||||
href='/marketplace/ts-sdks/openai-responses'
|
||||
icon='/media/sdks/openai.svg'
|
||||
>
|
||||
How to use Agentic tools with the OpenAI Responses API.
|
||||
</Card>
|
||||
|
||||
<Card
|
||||
title='Llamaindex'
|
||||
href='/marketplace/ts-sdks/llamaindex'
|
||||
icon='/media/sdks/llamaindex.svg'
|
||||
>
|
||||
How to use Agentic tools with Llamaindex's TS SDK.
|
||||
</Card>
|
||||
|
||||
<Card
|
||||
title='Firebase Genkit'
|
||||
href='/marketplace/ts-sdks/genkit'
|
||||
icon='/media/sdks/firebase.svg'
|
||||
>
|
||||
How to use Agentic tools with the Firebase Genkit SDK.
|
||||
</Card>
|
||||
|
||||
<Card
|
||||
title='Mastra'
|
||||
href='/marketplace/ts-sdks/mastra'
|
||||
icon='/media/sdks/mastra.svg'
|
||||
>
|
||||
How to use Agentic tools with the Mastra SDK.
|
||||
</Card>
|
||||
</Columns>
|
||||
|
||||
## MCP Clients
|
||||
|
||||
_MCP client docs are coming soon..._
|
||||
|
||||
## Python AI SDKs
|
||||
|
||||
_Python docs are coming soon..._
|
||||
|
||||
## Want to publish your own MCP tools?
|
||||
|
||||
Agentic makes it extremely easy to publish and monetize your own MCP tools. Regardless of whether you're starting from scratch or already have an existing API, you'll be up and running with a production-ready MCP product in minutes.
|
||||
|
||||
Visit the [publishing docs](/publishing) to get started.
|
||||
|
|
|
@ -0,0 +1,111 @@
|
|||
---
|
||||
title: Vercel AI SDK
|
||||
description: How to use Agentic tools with the Vercel AI SDK.
|
||||
---
|
||||
|
||||
## Install
|
||||
|
||||
<CodeGroup>
|
||||
```bash npm
|
||||
npm install ai @agentic/ai-sdk @agentic/platform-tool-client
|
||||
```
|
||||
|
||||
```bash pnpm
|
||||
pnpm add ai @agentic/ai-sdk @agentic/platform-tool-client
|
||||
```
|
||||
|
||||
```bash bun
|
||||
bun add ai @agentic/ai-sdk @agentic/platform-tool-client
|
||||
```
|
||||
|
||||
```bash yarn
|
||||
yarn add ai @agentic/ai-sdk @agentic/platform-tool-client
|
||||
```
|
||||
|
||||
</CodeGroup>
|
||||
|
||||
## Usage
|
||||
|
||||
This example uses the [`@agentic/search`](https://agentic.so/marketplace/projects/@agentic/search) tool.
|
||||
|
||||
```ts
|
||||
import 'dotenv/config'
|
||||
|
||||
import { createAISDKTools } from '@agentic/ai-sdk'
|
||||
import { AgenticToolClient } from '@agentic/platform-tool-client'
|
||||
import { createOpenAI } from '@ai-sdk/openai'
|
||||
import { generateText } from 'ai'
|
||||
|
||||
async function main() {
|
||||
const searchTool = await AgenticToolClient.fromIdentifier('@agentic/search')
|
||||
const openai = createOpenAI({ compatibility: 'strict' })
|
||||
|
||||
const result = await generateText({
|
||||
model: openai('gpt-4o-mini'),
|
||||
tools: createAISDKTools(searchTool),
|
||||
toolChoice: 'required',
|
||||
temperature: 0,
|
||||
system: 'You are a helpful assistant. Be as concise as possible.',
|
||||
prompt: 'What is the weather in San Francisco?'
|
||||
})
|
||||
|
||||
console.log(JSON.stringify(result.toolResults[0], null, 2))
|
||||
}
|
||||
|
||||
await main()
|
||||
```
|
||||
|
||||
<Expandable title="Additional dependencies">
|
||||
This example also uses the [@ai-sdk/openai](https://ai-sdk.dev/providers/ai-sdk-providers/openai) provider, which adds OpenAI support to the Vercel AI SDK.
|
||||
|
||||
_Note that OpenAI is not necessary to use Agentic; this is just an example._
|
||||
|
||||
<CodeGroup>
|
||||
```bash npm
|
||||
npm install @ai-sdk/openai dotenv
|
||||
```
|
||||
|
||||
```bash pnpm
|
||||
pnpm add @ai-sdk/openai dotenv
|
||||
```
|
||||
|
||||
```bash bun
|
||||
bun add @ai-sdk/openai dotenv
|
||||
```
|
||||
|
||||
```bash yarn
|
||||
yarn add @ai-sdk/openai dotenv
|
||||
```
|
||||
|
||||
</CodeGroup>
|
||||
</Expandable>
|
||||
|
||||
## Running this example
|
||||
|
||||
You can view the full source for this example here: https://github.com/transitive-bullshit/agentic/tree/main/examples/ts-sdks/ai-sdk
|
||||
|
||||
<Info>
|
||||
You'll need an [OpenAI API key](https://platform.openai.com/docs/quickstart)
|
||||
to run this example. Store it in a local `.env` file as `OPENAI_API_KEY`.
|
||||
</Info>
|
||||
|
||||
<Info>
|
||||
The
|
||||
[`@agentic/search`](https://agentic.so/marketplace/projects/@agentic/search)
|
||||
tool comes with a generous free tier, but once that runs out, you'll need to
|
||||
sign up for a paid plan and add an `AGENTIC_API_KEY` to your `.env` file.
|
||||
</Info>
|
||||
|
||||
```sh
|
||||
git clone git@github.com:transitive-bullshit/agentic.git
|
||||
cd agentic
|
||||
pnpm install
|
||||
pnpm build
|
||||
echo 'OPENAI_API_KEY=your-key' >> .env
|
||||
npx tsx examples/ts-sdks/ai-sdk/bin/weather.ts
|
||||
```
|
||||
|
||||
## Additional resources
|
||||
|
||||
- [`@agentic/ai-sdk` source](https://github.com/transitive-bullshit/agentic/blob/main/stdlib/ai-sdk/src/ai-sdk.ts)
|
||||
- [Vercel AI SDK docs](https://ai-sdk.dev)
|
|
@ -0,0 +1,112 @@
|
|||
---
|
||||
title: Firebase Genkit
|
||||
description: How to use Agentic tools with the Firebase Genkit SDK.
|
||||
---
|
||||
|
||||
## Install
|
||||
|
||||
<CodeGroup>
|
||||
```bash npm
|
||||
npm install genkit @agentic/genkit @agentic/platform-tool-client
|
||||
```
|
||||
|
||||
```bash pnpm
|
||||
pnpm add genkit @agentic/genkit @agentic/platform-tool-client
|
||||
```
|
||||
|
||||
```bash bun
|
||||
bun add genkit @agentic/genkit @agentic/platform-tool-client
|
||||
```
|
||||
|
||||
```bash yarn
|
||||
yarn add genkit @agentic/genkit @agentic/platform-tool-client
|
||||
```
|
||||
|
||||
</CodeGroup>
|
||||
|
||||
## Usage
|
||||
|
||||
This example uses the [`@agentic/search`](https://agentic.so/marketplace/projects/@agentic/search) tool.
|
||||
|
||||
```ts
|
||||
import 'dotenv/config'
|
||||
|
||||
import { createGenkitTools } from '@agentic/genkit'
|
||||
import { AgenticToolClient } from '@agentic/platform-tool-client'
|
||||
import { genkit } from 'genkit'
|
||||
import { gpt4oMini, openAI } from 'genkitx-openai'
|
||||
|
||||
async function main() {
|
||||
const searchTool = await AgenticToolClient.fromIdentifier('@agentic/search')
|
||||
|
||||
const ai = genkit({
|
||||
plugins: [openAI()]
|
||||
})
|
||||
|
||||
const result = await ai.generate({
|
||||
model: gpt4oMini,
|
||||
tools: createGenkitTools(ai, searchTool),
|
||||
system: 'You are a helpful assistant. Be as concise as possible.',
|
||||
prompt: 'What is the weather in San Francisco?'
|
||||
})
|
||||
|
||||
console.log(result)
|
||||
}
|
||||
|
||||
await main()
|
||||
```
|
||||
|
||||
<Expandable title="Additional dependencies">
|
||||
This example also uses the [genkitx-openai](https://github.com/TheFireCo/genkit-plugins/tree/main/plugins/openai) package, which adds OpenAI support to Genkit.
|
||||
|
||||
_Note that OpenAI is not necessary to use Agentic; this is just an example._
|
||||
|
||||
<CodeGroup>
|
||||
```bash npm
|
||||
npm install genkitx-openai dotenv
|
||||
```
|
||||
|
||||
```bash pnpm
|
||||
pnpm add genkitx-openai dotenv
|
||||
```
|
||||
|
||||
```bash bun
|
||||
bun add genkitx-openai dotenv
|
||||
```
|
||||
|
||||
```bash yarn
|
||||
yarn add genkitx-openai dotenv
|
||||
```
|
||||
|
||||
</CodeGroup>
|
||||
</Expandable>
|
||||
|
||||
## Running this example
|
||||
|
||||
You can view the full source for this example here: https://github.com/transitive-bullshit/agentic/tree/main/examples/ts-sdks/genkit
|
||||
|
||||
<Info>
|
||||
You'll need an [OpenAI API key](https://platform.openai.com/docs/quickstart)
|
||||
to run this example. Store it in a local `.env` file as `OPENAI_API_KEY`.
|
||||
</Info>
|
||||
|
||||
<Info>
|
||||
The
|
||||
[`@agentic/search`](https://agentic.so/marketplace/projects/@agentic/search)
|
||||
tool comes with a generous free tier, but once that runs out, you'll need to
|
||||
sign up for a paid plan and add an `AGENTIC_API_KEY` to your `.env` file.
|
||||
</Info>
|
||||
|
||||
```sh
|
||||
git clone git@github.com:transitive-bullshit/agentic.git
|
||||
cd agentic
|
||||
pnpm install
|
||||
pnpm build
|
||||
echo 'OPENAI_API_KEY=your-key' >> .env
|
||||
npx tsx examples/ts-sdks/genkit/bin/weather.ts
|
||||
```
|
||||
|
||||
## Additional resources
|
||||
|
||||
- [`@agentic/genkit` source](https://github.com/transitive-bullshit/agentic/blob/main/stdlib/genkit/src/genkit.ts)
|
||||
- [Firebase Genkit docs](https://firebase.google.com/docs/genkit)
|
|
@ -0,0 +1,124 @@
|
|||
---
|
||||
title: LangChain
|
||||
description: How to use Agentic tools with the LangChain TS SDK.
|
||||
---
|
||||
|
||||
## Install
|
||||
|
||||
<CodeGroup>
|
||||
```bash npm
|
||||
npm install langchain @langchain/core @langchain/agents @agentic/langchain @agentic/platform-tool-client
|
||||
```
|
||||
|
||||
```bash pnpm
|
||||
pnpm add langchain @langchain/core @langchain/agents @agentic/langchain @agentic/platform-tool-client
|
||||
```
|
||||
|
||||
```bash bun
|
||||
bun add langchain @langchain/core @langchain/agents @agentic/langchain @agentic/platform-tool-client
|
||||
```
|
||||
|
||||
```bash yarn
|
||||
yarn add langchain @langchain/core @langchain/agents @agentic/langchain @agentic/platform-tool-client
|
||||
```
|
||||
|
||||
</CodeGroup>
|
||||
|
||||
## Usage
|
||||
|
||||
This example uses the [`@agentic/search`](https://agentic.so/marketplace/projects/@agentic/search) tool.
|
||||
|
||||
```ts
|
||||
import 'dotenv/config'
|
||||
|
||||
import { createLangChainTools } from '@agentic/langchain'
|
||||
import { AgenticToolClient } from '@agentic/platform-tool-client'
|
||||
import { ChatPromptTemplate } from '@langchain/core/prompts'
|
||||
import { ChatOpenAI } from '@langchain/openai'
|
||||
import { AgentExecutor, createToolCallingAgent } from 'langchain/agents'
|
||||
|
||||
async function main() {
|
||||
const searchTool = await AgenticToolClient.fromIdentifier('@agentic/search')
|
||||
|
||||
const tools = createLangChainTools(searchTool)
|
||||
const agent = createToolCallingAgent({
|
||||
llm: new ChatOpenAI({ model: 'gpt-4o-mini', temperature: 0 }),
|
||||
tools,
|
||||
prompt: ChatPromptTemplate.fromMessages([
|
||||
['system', 'You are a helpful assistant. Be as concise as possible.'],
|
||||
['placeholder', '{chat_history}'],
|
||||
['human', '{input}'],
|
||||
['placeholder', '{agent_scratchpad}']
|
||||
])
|
||||
})
|
||||
|
||||
const agentExecutor = new AgentExecutor({
|
||||
agent,
|
||||
tools
|
||||
// verbose: true
|
||||
})
|
||||
|
||||
const result = await agentExecutor.invoke({
|
||||
input: 'What is the weather in San Francisco?'
|
||||
})
|
||||
|
||||
console.log(result.output)
|
||||
}
|
||||
|
||||
await main()
|
||||
```
|
||||
|
||||
<Expandable title="Additional dependencies">
|
||||
This example also uses the [@langchain/openai](https://js.langchain.com/docs/integrations/platforms/openai) package, which adds OpenAI support to LangChain.
|
||||
|
||||
_Note that OpenAI is not necessary to use Agentic; this is just an example._
|
||||
|
||||
<CodeGroup>
|
||||
```bash npm
|
||||
npm install @langchain/openai dotenv
|
||||
```
|
||||
|
||||
```bash pnpm
|
||||
pnpm add @langchain/openai dotenv
|
||||
```
|
||||
|
||||
```bash bun
|
||||
bun add @langchain/openai dotenv
|
||||
```
|
||||
|
||||
```bash yarn
|
||||
yarn add @langchain/openai dotenv
|
||||
```
|
||||
|
||||
</CodeGroup>
|
||||
</Expandable>
|
||||
|
||||
## Running this example
|
||||
|
||||
You can view the full source for this example here: https://github.com/transitive-bullshit/agentic/tree/main/examples/ts-sdks/langchain
|
||||
|
||||
<Info>
|
||||
You'll need an [OpenAI API key](https://platform.openai.com/docs/quickstart)
|
||||
to run this example. Store it in a local `.env` file as `OPENAI_API_KEY`.
|
||||
</Info>
|
||||
|
||||
<Info>
|
||||
The
|
||||
[`@agentic/search`](https://agentic.so/marketplace/projects/@agentic/search)
|
||||
tool comes with a generous free tier, but once that runs out, you'll need to
|
||||
sign up for a paid plan and add an `AGENTIC_API_KEY` to your `.env` file.
|
||||
</Info>
|
||||
|
||||
```sh
|
||||
git clone git@github.com:transitive-bullshit/agentic.git
|
||||
cd agentic
|
||||
pnpm install
|
||||
pnpm build
|
||||
echo 'OPENAI_API_KEY=your-key' >> .env
|
||||
npx tsx examples/ts-sdks/langchain/bin/weather.ts
|
||||
```
|
||||
|
||||
## Additional resources
|
||||
|
||||
- [`@agentic/langchain` source](https://github.com/transitive-bullshit/agentic/blob/main/stdlib/langchain/src/langchain.ts)
|
||||
- [LangChain TS docs](https://js.langchain.com)
|
|
@ -0,0 +1,112 @@
|
|||
---
|
||||
title: LlamaIndex
|
||||
description: How to use Agentic tools with the LlamaIndex TS SDK.
|
||||
---
|
||||
|
||||
## Install
|
||||
|
||||
<CodeGroup>
|
||||
```bash npm
|
||||
npm install llamaindex @llamaindex/workflow @agentic/llamaindex @agentic/platform-tool-client
|
||||
```
|
||||
|
||||
```bash pnpm
|
||||
pnpm add llamaindex @llamaindex/workflow @agentic/llamaindex @agentic/platform-tool-client
|
||||
```
|
||||
|
||||
```bash bun
|
||||
bun add llamaindex @llamaindex/workflow @agentic/llamaindex @agentic/platform-tool-client
|
||||
```
|
||||
|
||||
```bash yarn
|
||||
yarn add llamaindex @llamaindex/workflow @agentic/llamaindex @agentic/platform-tool-client
|
||||
```
|
||||
|
||||
</CodeGroup>
|
||||
|
||||
## Usage
|
||||
|
||||
This example uses the [`@agentic/search`](https://agentic.so/marketplace/projects/@agentic/search) tool.
|
||||
|
||||
```ts
|
||||
import 'dotenv/config'
|
||||
|
||||
import { createLlamaIndexTools } from '@agentic/llamaindex'
|
||||
import { AgenticToolClient } from '@agentic/platform-tool-client'
|
||||
import { openai } from '@llamaindex/openai'
|
||||
import { agent } from '@llamaindex/workflow'
|
||||
|
||||
async function main() {
|
||||
const searchTool = await AgenticToolClient.fromIdentifier('@agentic/search')
|
||||
|
||||
const tools = createLlamaIndexTools(searchTool)
|
||||
const weatherAgent = agent({
|
||||
llm: openai({ model: 'gpt-4o-mini', temperature: 0 }),
|
||||
systemPrompt: 'You are a helpful assistant. Be as concise as possible.',
|
||||
tools
|
||||
})
|
||||
|
||||
const response = await weatherAgent.run(
|
||||
'What is the weather in San Francisco?'
|
||||
)
|
||||
|
||||
console.log(response.data.result)
|
||||
}
|
||||
|
||||
await main()
|
||||
```
|
||||
|
||||
<Expandable title="Additional dependencies">
|
||||
This example also uses the [@llamaindex/openai](https://ts.llamaindex.ai/docs/llamaindex/modules/models/llms/openai) provider, which adds OpenAI support to LlamaIndex.
|
||||
|
||||
_Note that OpenAI is not necessary to use Agentic; this is just an example._
|
||||
|
||||
<CodeGroup>
|
||||
```bash npm
|
||||
npm install @llamaindex/openai dotenv
|
||||
```
|
||||
|
||||
```bash pnpm
|
||||
pnpm add @llamaindex/openai dotenv
|
||||
```
|
||||
|
||||
```bash bun
|
||||
bun add @llamaindex/openai dotenv
|
||||
```
|
||||
|
||||
```bash yarn
|
||||
yarn add @llamaindex/openai dotenv
|
||||
```
|
||||
|
||||
</CodeGroup>
|
||||
</Expandable>
|
||||
|
||||
## Running this example
|
||||
|
||||
You can view the full source for this example here: https://github.com/transitive-bullshit/agentic/tree/main/examples/ts-sdks/llamaindex
|
||||
|
||||
<Info>
|
||||
You'll need an [OpenAI API key](https://platform.openai.com/docs/quickstart)
|
||||
to run this example. Store it in a local `.env` file as `OPENAI_API_KEY`.
|
||||
</Info>
|
||||
|
||||
<Info>
|
||||
The
|
||||
[`@agentic/search`](https://agentic.so/marketplace/projects/@agentic/search)
|
||||
tool comes with a generous free tier, but once that runs out, you'll need to
|
||||
sign up for a paid plan and add an `AGENTIC_API_KEY` to your `.env` file.
|
||||
</Info>
|
||||
|
||||
```sh
|
||||
git clone git@github.com:transitive-bullshit/agentic.git
|
||||
cd agentic
|
||||
pnpm install
|
||||
pnpm build
|
||||
echo 'OPENAI_API_KEY=your-key' >> .env
|
||||
npx tsx examples/ts-sdks/llamaindex/bin/weather.ts
|
||||
```
|
||||
|
||||
## Additional resources
|
||||
|
||||
- [`@agentic/llamaindex` source](https://github.com/transitive-bullshit/agentic/blob/main/stdlib/llamaindex/src/llamaindex.ts)
|
||||
- [LlamaIndex TS docs](https://ts.llamaindex.ai)
|
|
@ -0,0 +1,112 @@
|
|||
---
|
||||
title: Mastra
|
||||
description: How to use Agentic tools with the Mastra AI Agent framework.
|
||||
---
|
||||
|
||||
## Install
|
||||
|
||||
<CodeGroup>
|
||||
```bash npm
|
||||
npm install @mastra/core @agentic/mastra @agentic/platform-tool-client
|
||||
```
|
||||
|
||||
```bash pnpm
|
||||
pnpm add @mastra/core @agentic/mastra @agentic/platform-tool-client
|
||||
```
|
||||
|
||||
```bash bun
|
||||
bun add @mastra/core @agentic/mastra @agentic/platform-tool-client
|
||||
```
|
||||
|
||||
```bash yarn
|
||||
yarn add @mastra/core @agentic/mastra @agentic/platform-tool-client
|
||||
```
|
||||
|
||||
</CodeGroup>
|
||||
|
||||
## Usage
|
||||
|
||||
This example uses the [`@agentic/search`](https://agentic.so/marketplace/projects/@agentic/search) tool.
|
||||
|
||||
```ts
|
||||
import 'dotenv/config'
|
||||
|
||||
import { createMastraTools } from '@agentic/mastra'
|
||||
import { AgenticToolClient } from '@agentic/platform-tool-client'
|
||||
import { openai } from '@ai-sdk/openai'
|
||||
import { Agent } from '@mastra/core/agent'
|
||||
|
||||
async function main() {
|
||||
const searchTool = await AgenticToolClient.fromIdentifier('@agentic/search')
|
||||
|
||||
const weatherAgent = new Agent({
|
||||
name: 'Weather Agent',
|
||||
instructions: 'You are a helpful assistant. Be as concise as possible.',
|
||||
model: openai('gpt-4o-mini'),
|
||||
tools: createMastraTools(searchTool)
|
||||
})
|
||||
|
||||
const res = await weatherAgent.generate(
|
||||
'What is the weather in San Francisco?'
|
||||
)
|
||||
console.log(res.text)
|
||||
}
|
||||
|
||||
await main()
|
||||
```
|
||||
|
||||
<Expandable title="Additional dependencies">
|
||||
This example also uses the [@ai-sdk/openai](https://ai-sdk.dev/providers/ai-sdk-providers/openai) provider, which adds OpenAI support to Mastra.
|
||||
|
||||
_Note that OpenAI is not necessary to use Agentic; this is just an example._
|
||||
|
||||
<CodeGroup>
|
||||
```bash npm
|
||||
npm install @ai-sdk/openai dotenv
|
||||
```
|
||||
|
||||
```bash pnpm
|
||||
pnpm add @ai-sdk/openai dotenv
|
||||
```
|
||||
|
||||
```bash bun
|
||||
bun add @ai-sdk/openai dotenv
|
||||
```
|
||||
|
||||
```bash yarn
|
||||
yarn add @ai-sdk/openai dotenv
|
||||
```
|
||||
|
||||
</CodeGroup>
|
||||
</Expandable>
|
||||
|
||||
## Running this example
|
||||
|
||||
You can view the full source for this example here:
|
||||
https://github.com/transitive-bullshit/agentic/tree/main/examples/ts-sdks/mastra
|
||||
|
||||
<Info>
|
||||
You'll need an [OpenAI API key](https://platform.openai.com/docs/quickstart)
|
||||
to run this example. Store it in a local `.env` file as `OPENAI_API_KEY`.
|
||||
</Info>
|
||||
|
||||
<Info>
|
||||
The
|
||||
[`@agentic/search`](https://agentic.so/marketplace/projects/@agentic/search)
|
||||
tool comes with a generous free tier, but once that runs out, you'll need to
|
||||
sign up for a paid plan and add an `AGENTIC_API_KEY` to your `.env` file.
|
||||
</Info>
|
||||
|
||||
```sh
|
||||
git clone git@github.com:transitive-bullshit/agentic.git
|
||||
cd agentic
|
||||
pnpm install
|
||||
pnpm build
|
||||
echo 'OPENAI_API_KEY=your-key' >> .env
|
||||
npx tsx examples/ts-sdks/mastra/bin/weather.ts
|
||||
```
|
||||
|
||||
## Additional resources
|
||||
|
||||
- [`@agentic/mastra` source](https://github.com/transitive-bullshit/agentic/blob/main/stdlib/mastra/src/mastra.ts)
|
||||
- [Mastra docs](https://mastra.ai)
|
|
@ -0,0 +1,124 @@
|
|||
---
|
||||
title: OpenAI Chat Completions
|
||||
description: How to use Agentic tools with the OpenAI Chat Completions API.
|
||||
---
|
||||
|
||||
<Tip>
|
||||
There's no need for an adapter with the OpenAI SDK since all agentic tools are
|
||||
compatible with OpenAI by default.
|
||||
</Tip>
|
||||
|
||||
## Install
|
||||
|
||||
<CodeGroup>
|
||||
```bash npm
|
||||
npm install openai @agentic/platform-tool-client
|
||||
```
|
||||
|
||||
```bash pnpm
|
||||
pnpm add openai @agentic/platform-tool-client
|
||||
```
|
||||
|
||||
```bash bun
|
||||
bun add openai @agentic/platform-tool-client
|
||||
```
|
||||
|
||||
```bash yarn
|
||||
yarn add openai @agentic/platform-tool-client
|
||||
```
|
||||
|
||||
</CodeGroup>
|
||||
|
||||
## Usage
|
||||
|
||||
This example uses the [`@agentic/search`](https://agentic.so/marketplace/projects/@agentic/search) tool.
|
||||
|
||||
```ts
|
||||
import 'dotenv/config'
|
||||
|
||||
import { AgenticToolClient } from '@agentic/platform-tool-client'
|
||||
import OpenAI from 'openai'
|
||||
|
||||
async function main() {
|
||||
const searchTool = await AgenticToolClient.fromIdentifier('@agentic/search')
|
||||
const openai = new OpenAI()
|
||||
|
||||
const messages: OpenAI.ChatCompletionMessageParam[] = [
|
||||
{
|
||||
role: 'system',
|
||||
content: 'You are a helpful assistant. Be as concise as possible.'
|
||||
},
|
||||
{ role: 'user', content: 'What is the weather in San Francisco?' }
|
||||
]
|
||||
|
||||
{
|
||||
// First call to OpenAI to invoke the tool
|
||||
const res = await openai.chat.completions.create({
|
||||
messages,
|
||||
model: 'gpt-4o-mini',
|
||||
temperature: 0,
|
||||
tools: searchTool.functions.toolSpecs,
|
||||
tool_choice: 'required'
|
||||
})
|
||||
|
||||
const message = res.choices[0]!.message!
|
||||
const toolCall = message.tool_calls![0]!.function!
|
||||
const toolResult = await searchTool.callTool(
|
||||
toolCall.name,
|
||||
toolCall.arguments
|
||||
)
|
||||
|
||||
messages.push(message)
|
||||
messages.push({
|
||||
role: 'tool',
|
||||
tool_call_id: message.tool_calls![0]!.id,
|
||||
content: JSON.stringify(toolResult)
|
||||
})
|
||||
}
|
||||
|
||||
{
|
||||
// Second call to OpenAI to generate a text response
|
||||
const res = await openai.chat.completions.create({
|
||||
messages,
|
||||
model: 'gpt-4o-mini',
|
||||
temperature: 0,
|
||||
tools: searchTool.functions.toolSpecs
|
||||
})
|
||||
const message = res.choices?.[0]?.message
|
||||
console.log(message?.content)
|
||||
}
|
||||
}
|
||||
|
||||
await main()
|
||||
```
|
||||
|
||||
## Running this example
|
||||
|
||||
You can view the full source for this example here: https://github.com/transitive-bullshit/agentic/tree/main/examples/ts-sdks/openai
|
||||
|
||||
<Info>
|
||||
You'll need an [OpenAI API key](https://platform.openai.com/docs/quickstart)
|
||||
to run this example. Store it in a local `.env` file as `OPENAI_API_KEY`.
|
||||
</Info>
|
||||
|
||||
<Info>
|
||||
The
|
||||
[`@agentic/search`](https://agentic.so/marketplace/projects/@agentic/search)
|
||||
tool comes with a generous free tier, but once that runs out, you'll need to
|
||||
sign up for a paid plan and add an `AGENTIC_API_KEY` to your `.env` file.
|
||||
</Info>
|
||||
|
||||
```sh
|
||||
git clone git@github.com:transitive-bullshit/agentic.git
|
||||
cd agentic
|
||||
pnpm install
|
||||
pnpm build
|
||||
echo 'OPENAI_API_KEY=your-key' >> .env
|
||||
npx tsx examples/ts-sdks/openai/bin/weather.ts
|
||||
```
|
||||
|
||||
## Additional resources
|
||||
|
||||
- [OpenAI Chat Completions API docs](https://platform.openai.com/docs/api-reference/chat/create)
|
||||
- [OpenAI Responses vs Chat Completions](https://platform.openai.com/docs/guides/responses-vs-chat-completions)
|
||||
- [Using OpenAI's Responses API with Agentic](/marketplace/ts-sdks/openai-responses)
|
|
@ -0,0 +1,125 @@
|
|||
---
|
||||
title: OpenAI Responses
|
||||
description: How to use Agentic tools with the OpenAI Responses API.
|
||||
---
|
||||
|
||||
<Tip>
|
||||
There's no need for an adapter with the OpenAI SDK since all agentic tools are
|
||||
compatible with OpenAI by default.
|
||||
</Tip>
|
||||
|
||||
## Install
|
||||
|
||||
<CodeGroup>
|
||||
```bash npm
|
||||
npm install openai @agentic/platform-tool-client
|
||||
```
|
||||
|
||||
```bash pnpm
|
||||
pnpm add openai @agentic/platform-tool-client
|
||||
```
|
||||
|
||||
```bash bun
|
||||
bun add openai @agentic/platform-tool-client
|
||||
```
|
||||
|
||||
```bash yarn
|
||||
yarn add openai @agentic/platform-tool-client
|
||||
```
|
||||
|
||||
</CodeGroup>
|
||||
|
||||
## Usage
|
||||
|
||||
This example uses the [`@agentic/search`](https://agentic.so/marketplace/projects/@agentic/search) tool.
|
||||
|
||||
```ts
|
||||
import 'dotenv/config'
|
||||
|
||||
import { assert } from '@agentic/core'
|
||||
import { AgenticToolClient } from '@agentic/platform-tool-client'
|
||||
import OpenAI from 'openai'
|
||||
|
||||
async function main() {
|
||||
const searchTool = await AgenticToolClient.fromIdentifier('@agentic/search')
|
||||
const openai = new OpenAI()
|
||||
|
||||
const messages: OpenAI.Responses.ResponseInput = [
|
||||
{
|
||||
role: 'system',
|
||||
content: 'You are a helpful assistant. Be as concise as possible.'
|
||||
},
|
||||
{ role: 'user', content: 'What is the weather in San Francisco?' }
|
||||
]
|
||||
|
||||
{
|
||||
// First call to OpenAI to invoke the tool
|
||||
const res = await openai.responses.create({
|
||||
model: 'gpt-4o-mini',
|
||||
temperature: 0,
|
||||
tools: searchTool.functions.responsesToolSpecs,
|
||||
tool_choice: 'required',
|
||||
input: messages
|
||||
})
|
||||
|
||||
const toolCall = res.output[0]
|
||||
assert(toolCall?.type === 'function_call')
|
||||
const toolResult = await searchTool.callTool(
|
||||
toolCall.name,
|
||||
toolCall.arguments
|
||||
)
|
||||
|
||||
messages.push(toolCall)
|
||||
messages.push({
|
||||
type: 'function_call_output',
|
||||
call_id: toolCall.call_id,
|
||||
output: JSON.stringify(toolResult)
|
||||
})
|
||||
}
|
||||
|
||||
{
|
||||
// Second call to OpenAI to generate a text response
|
||||
const res = await openai.responses.create({
|
||||
model: 'gpt-4o-mini',
|
||||
temperature: 0,
|
||||
tools: searchTool.functions.responsesToolSpecs,
|
||||
input: messages
|
||||
})
|
||||
|
||||
console.log(res.output_text)
|
||||
}
|
||||
}
|
||||
|
||||
await main()
|
||||
```
|
||||
|
||||
## Running this example
|
||||
|
||||
You can view the full source for this example here: https://github.com/transitive-bullshit/agentic/tree/main/examples/ts-sdks/openai
|
||||
|
||||
<Info>
|
||||
You'll need an [OpenAI API key](https://platform.openai.com/docs/quickstart)
|
||||
to run this example. Store it in a local `.env` file as `OPENAI_API_KEY`.
|
||||
</Info>
|
||||
|
||||
<Info>
|
||||
The
|
||||
[`@agentic/search`](https://agentic.so/marketplace/projects/@agentic/search)
|
||||
tool comes with a generous free tier, but once that runs out, you'll need to
|
||||
sign up for a paid plan and add an `AGENTIC_API_KEY` to your `.env` file.
|
||||
</Info>
|
||||
|
||||
```sh
|
||||
git clone git@github.com:transitive-bullshit/agentic.git
|
||||
cd agentic
|
||||
pnpm install
|
||||
pnpm build
|
||||
echo 'OPENAI_API_KEY=your-key' >> .env
|
||||
npx tsx examples/ts-sdks/openai/bin/weather-responses.ts
|
||||
```
|
||||
|
||||
## Additional resources
|
||||
|
||||
- [OpenAI Chat Completions API docs](https://platform.openai.com/docs/api-reference/chat/create)
|
||||
- [OpenAI Responses vs Chat Completions](https://platform.openai.com/docs/guides/responses-vs-chat-completions)
|
||||
- [Using OpenAI's Chat Completion API with Agentic](/marketplace/ts-sdks/openai-chat)
|
|
@ -0,0 +1 @@
|
|||
<svg role="img" viewBox="0 0 24 24" xmlns="http://www.w3.org/2000/svg"><title>Firebase</title><path d="M19.455 8.369c-.538-.748-1.778-2.285-3.681-4.569-.826-.991-1.535-1.832-1.884-2.245a146 146 0 0 0-.488-.576l-.207-.245-.113-.133-.022-.032-.01-.005L12.57 0l-.609.488c-1.555 1.246-2.828 2.851-3.681 4.64-.523 1.064-.864 2.105-1.043 3.176-.047.241-.088.489-.121.738-.209-.017-.421-.028-.632-.033-.018-.001-.035-.002-.059-.003a7.46 7.46 0 0 0-2.28.274l-.317.089-.163.286c-.765 1.342-1.198 2.869-1.252 4.416-.07 2.01.477 3.954 1.583 5.625 1.082 1.633 2.61 2.882 4.42 3.611l.236.095.071.025.003-.001a9.59 9.59 0 0 0 2.941.568q.171.006.342.006c1.273 0 2.513-.249 3.69-.742l.008.004.313-.145a9.63 9.63 0 0 0 3.927-3.335c1.01-1.49 1.577-3.234 1.641-5.042.075-2.161-.643-4.304-2.133-6.371m-7.083 6.695c.328 1.244.264 2.44-.191 3.558-1.135-1.12-1.967-2.352-2.475-3.665-.543-1.404-.87-2.74-.974-3.975.48.157.922.366 1.315.622 1.132.737 1.914 1.902 2.325 3.461zm.207 6.022c.482.368.99.712 1.513 1.028-.771.21-1.565.302-2.369.273a8 8 0 0 1-.373-.022c.458-.394.869-.823 1.228-1.279zm1.347-6.431c-.516-1.957-1.527-3.437-3.002-4.398-.647-.421-1.385-.741-2.194-.95.011-.134.026-.268.043-.4.014-.113.03-.216.046-.313.133-.689.332-1.37.589-2.025.099-.25.206-.499.321-.74l.004-.008c.177-.358.376-.719.61-1.105l.092-.152-.003-.001c.544-.851 1.197-1.627 1.942-2.311l.288.341c.672.796 1.304 1.548 1.878 2.237 1.291 1.549 2.966 3.583 3.612 4.48 1.277 1.771 1.893 3.579 1.83 5.375-.049 1.395-.461 2.755-1.195 3.933-.694 1.116-1.661 2.05-2.8 2.708-.636-.318-1.559-.839-2.539-1.599.79-1.575.952-3.28.479-5.072zm-2.575 5.397c-.725.939-1.587 1.55-2.09 1.856-.081-.029-.163-.06-.243-.093l-.065-.026c-1.49-.616-2.747-1.656-3.635-3.01-.907-1.384-1.356-2.993-1.298-4.653.041-1.19.338-2.327.882-3.379.316-.07.638-.114.96-.131l.084-.002c.162-.003.324-.003.478 0 .227.011.454.035.677.07.073 1.513.445 3.145 1.105 4.852.637 1.644 1.694 3.162 3.144 4.515z"/></svg>
|
Po Szerokość: | Wysokość: | Rozmiar: 1.9 KiB |
|
@ -0,0 +1 @@
|
|||
<svg role="img" viewBox="0 0 24 24" xmlns="http://www.w3.org/2000/svg"><title>LangChain</title><path d="M6.0988 5.9175C2.7359 5.9175 0 8.6462 0 12s2.736 6.0825 6.0988 6.0825h11.8024C21.2641 18.0825 24 15.3538 24 12s-2.736-6.0825-6.0988-6.0825ZM5.9774 7.851c.493.0124 1.02.2496 1.273.6228.3673.4592.4778 1.0668.8944 1.4932.5604.6118 1.199 1.1505 1.7161 1.802.4892.5954.8386 1.2937 1.1436 1.9975.1244.2335.1257.5202.31.7197.0908.1204.5346.4483.4383.5645.0555.1204.4702.286.3263.4027-.1944.04-.4129.0476-.5616-.1074-.0549.126-.183.0596-.2819.0432a4 4 0 0 0-.025.0736c-.3288.0219-.5754-.3126-.732-.565-.3111-.168-.6642-.2702-.982-.446-.0182.2895.0452.6485-.231.8353-.014.5565.8436.0656.9222.4804-.061.0067-.1286-.0095-.1774.0373-.2239.2172-.4805-.1645-.7385-.007-.3464.174-.3808.3161-.8096.352-.0237-.0359-.0143-.0592.0059-.0811.1207-.1399.1295-.3046.3356-.3643-.2122-.0334-.3899.0833-.5686.1757-.2323.095-.2304-.2141-.5878.0164-.0396-.0322-.0208-.0615.0018-.0864.0908-.1107.2102-.127.345-.1208-.663-.3686-.9751.4507-1.2813.0432-.092.0243-.1265.1068-.1845.1652-.05-.0548-.0123-.1212-.0099-.1857-.0598-.028-.1356-.041-.1179-.1366-.1171-.0395-.1988.0295-.286.0952-.0787-.0608.0532-.1492.0776-.2125.0702-.1216.23-.025.3111-.1126.2306-.1308.552.0814.8155.0455.203.0255.4544-.1825.3526-.39-.2171-.2767-.179-.6386-.1839-.9695-.0268-.1929-.491-.4382-.6252-.6462-.1659-.1873-.295-.4047-.4243-.6182-.4666-.9008-.3198-2.0584-.9077-2.8947-.266.1466-.6125.0774-.8418-.119-.1238.1125-.1292.2598-.139.4161-.297-.2962-.2593-.8559-.022-1.1855.0969-.1302.2127-.2373.342-.3316.0292-.0213.0391-.0419.0385-.0747.1174-.5267.5764-.7391 1.0694-.7267m12.4071.46c.5575 0 1.0806.2159 1.474.6082s.61.9145.61 1.4704c0 .556-.2167 1.078-.61 1.4698v.0006l-.902.8995a2.08 2.08 0 0 1-.8597.5166l-.0164.0047-.0058.0164a2.05 2.05 0 0 1-.474.7308l-.9018.8995c-.3934.3924-.917.6083-1.4745.6083s-1.0806-.216-1.474-.6083c-.813-.8107-.813-2.1294 0-2.9402l.9019-.8995a2.056 2.056 0 0 1 .858-.5143l.017-.0053.0058-.0158a2.07 2.07 0 0 1 .4752-.7337l.9018-.8995c.3934-.3924.9171-.6083 1.4745-.6083zm0 .8965a1.18 1.18 0 0 0-.8388.3462l-.9018.8995a1.181 1.181 0 0 0-.3427.9252l.0053.0572c.0323.2652.149.5044.3374.6917.13.1296.2733.2114.4471.2686a.9.9 0 0 1 .014.1582.884.884 0 0 1-.2609.6304l-.0554.0554c-.3013-.1028-.5525-.253-.7794-.4792a2.06 2.06 0 0 1-.5761-1.0968l-.0099-.0578-.0461.0368a1.1 1.1 0 0 0-.0876.0794l-.9024.8995c-.4623.461-.4623 1.212 0 1.673.2311.2305.535.346.8394.3461.3043 0 .6077-.1156.8388-.3462l.9019-.8995c.4623-.461.4623-1.2113 0-1.673a1.17 1.17 0 0 0-.4367-.2749 1 1 0 0 1-.014-.1611c0-.2591.1023-.505.2901-.6923.3019.1028.57.2694.7962.495.3007.2999.4994.679.5756 1.0968l.0105.0578.0455-.0373a1.1 1.1 0 0 0 .0887-.0794l.902-.8996c.4622-.461.4628-1.2124 0-1.6735a1.18 1.18 0 0 0-.8395-.3462Zm-9.973 5.1567-.0006.0006c-.0793.3078-.1048.8318-.506.847-.033.1776.1228.2445.2655.1874.141-.0645.2081.0508.2557.1657.2177.0317.5394-.0725.5516-.3298-.325-.1867-.4253-.5418-.5662-.8709"/></svg>
|
Po Szerokość: | Wysokość: | Rozmiar: 2.9 KiB |
|
@ -0,0 +1,12 @@
|
|||
<svg xmlns="http://www.w3.org/2000/svg" fill="none" viewBox="0 0 121 120" height="120" width="121">
|
||||
<path fill="black" d="M0 24C0 10.7452 10.7539 0 24.0194 0H96.0777C109.343 0 120.097 10.7452 120.097 24V96C120.097 109.255 109.343 120 96.0777 120H24.0194C10.7539 120 0 109.255 0 96V24Z"></path>
|
||||
<path fill="url(#paint0_linear_3021_4220)" d="M75.4637 78.9301C67.7328 82.3428 59.3577 80.9446 56.1365 79.8188C56.1365 80.5891 56.1014 82.9827 55.9608 86.3954C55.8203 89.8082 54.7309 91.9648 54.2038 92.6166C54.2624 94.7495 54.3093 99.4065 54.0281 100.971C53.747 102.535 52.7396 103.874 52.2711 104.348H47.5272C47.9488 102.215 49.577 100.852 50.3384 100.437C50.7601 96.0293 49.9284 92.2018 49.4599 90.8391C48.9913 92.4981 47.8434 96.3493 47.0001 98.4822C46.1567 100.615 44.8916 102.57 44.3645 103.281H40.8505C40.6748 101.148 41.8461 100.437 42.6075 100.437C42.9589 99.7857 43.8023 97.7357 44.3645 94.7495C44.9268 91.7634 44.1303 86.1584 43.6617 83.7292V76.0861C38.0393 73.0644 35.9308 70.0427 34.5252 66.6656C33.4007 63.9638 33.7053 59.8519 33.9981 58.1337C33.6467 57.482 32.6222 55.8256 32.2411 53.5123C31.714 50.3128 32.0068 48.0021 32.2411 46.7579C31.8897 46.4024 31.1869 44.5894 31.1869 40.1813C31.1869 35.7732 32.4753 33.4861 33.1196 32.8937V30.9384C30.6597 30.7607 28.1999 29.6942 26.7943 28.2722C25.3887 26.8503 26.4429 24.7173 27.3214 24.0063C28.1999 23.2953 29.0784 23.8286 30.3083 23.4731C31.5383 23.1176 32.5925 22.7621 33.1196 21.6956C33.5413 20.8424 32.7096 17.3112 32.2411 15.6522C34.3495 15.9366 35.6965 17.7852 36.1065 18.6739V15.6522C38.742 16.8964 43.486 19.9181 45.0673 26.4948C46.3324 31.7561 47.2343 42.7883 47.5271 47.6467C54.2624 47.706 62.8132 46.6816 70.5441 48.3576C77.5721 49.8813 80.7348 52.9791 84.4245 52.9791C88.1143 52.9791 90.2227 50.8461 92.8582 52.6236C95.4937 54.401 96.8994 59.378 96.548 63.1106C96.2668 66.0968 93.971 67.0803 92.8582 67.1988C91.4526 71.8913 92.8582 76.3824 93.7367 78.0414V84.7957C94.1467 85.3882 94.9666 87.2131 94.9666 89.7727C94.9666 92.3322 94.1467 94.0386 93.7367 94.5718C94.4395 98.5534 93.4439 102.63 92.8582 104.17H88.1143C88.6765 102.748 89.637 102.393 90.047 102.393C90.8904 97.9846 90.2812 93.9201 89.8713 92.4389C87.2006 90.8747 85.4787 88.1137 84.9516 86.9287C85.0102 87.9359 84.8462 90.7325 83.7217 93.8608C82.5972 96.9892 80.9105 98.8378 80.2077 99.371V103.104H75.4637C75.4637 100.829 76.7522 100.378 77.3964 100.437C78.2164 98.9563 80.2077 96.7048 80.2077 92.2611C80.2077 88.5115 77.5721 86.751 75.6394 83.3738C74.7212 81.7692 75.1709 79.7596 75.4637 78.9301Z"></path>
|
||||
<defs>
|
||||
<linearGradient gradientUnits="userSpaceOnUse" y2="86.8919" x2="107.83" y1="23.1176" x1="31.7318" id="paint0_linear_3021_4220">
|
||||
<stop stop-color="#F6DCD9" offset="0.0619804"></stop>
|
||||
<stop stop-color="#FFA5EA" offset="0.325677"></stop>
|
||||
<stop stop-color="#45DFF8" offset="0.589257"></stop>
|
||||
<stop stop-color="#BC8DEB" offset="1"></stop>
|
||||
</linearGradient>
|
||||
</defs>
|
||||
</svg>
|
Po Szerokość: | Wysokość: | Rozmiar: 2.8 KiB |
|
@ -0,0 +1,8 @@
|
|||
<svg width="164" height="160" viewBox="0 0 164 160" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<path d="M81.9098 158.773C125.344 158.773 160.554 123.504 160.554 79.9978C160.554 36.4915 125.344 1.22266 81.9098 1.22266C38.4758 1.22266 3.26562 36.4915 3.26562 79.9978C3.26562 123.504 38.4758 158.773 81.9098 158.773Z" stroke="black" stroke-width="1.16026"/>
|
||||
<path d="M44.6109 117.361C75.3234 148.124 116.92 156.336 137.519 135.703C158.118 115.069 149.92 73.4035 119.207 42.6399C88.495 11.8763 46.8986 3.66431 26.2994 24.2979C5.70011 44.9314 13.8985 86.5971 44.6109 117.361Z" stroke="black" stroke-width="1.16026"/>
|
||||
<path d="M49.8027 80.2852H114.433" stroke="black" stroke-width="1.16026"/>
|
||||
<path d="M65.7012 96.7272L98.5383 63.8359" stroke="black" stroke-width="1.16026"/>
|
||||
<path d="M98.5319 96.7272L65.6953 63.8359" stroke="black" stroke-width="1.16026"/>
|
||||
<path fill-rule="evenodd" clip-rule="evenodd" d="M28.1029 45.3313C14.4524 54.5021 6.50238 66.8214 6.50238 79.9996C6.50238 93.1772 14.4524 105.496 28.1029 114.667C41.7327 123.824 60.7469 129.593 81.9101 129.593C103.073 129.593 122.087 123.824 135.717 114.667C149.367 105.496 157.317 93.1772 157.317 79.9996C157.317 66.8214 149.367 54.5021 135.717 45.3313C122.087 36.1745 103.073 30.4057 81.9101 30.4057C60.7469 30.4057 41.7327 36.1745 28.1029 45.3313ZM24.4973 39.9466C39.3312 29.9808 59.6389 23.9219 81.9101 23.9219C104.181 23.9219 124.489 29.9808 139.322 39.9466C154.136 49.8986 163.791 63.9971 163.791 79.9996C163.791 96.0015 154.136 110.1 139.322 120.052C124.489 130.018 104.181 136.077 81.9101 136.077C59.6389 136.077 39.3312 130.018 24.4973 120.052C9.68433 110.1 0.0292969 96.0015 0.0292969 79.9996C0.0292969 63.9971 9.68433 49.8986 24.4973 39.9466Z" fill="black"/>
|
||||
</svg>
|
Po Szerokość: | Wysokość: | Rozmiar: 1.7 KiB |
|
@ -0,0 +1,3 @@
|
|||
<svg width="485" height="481" viewBox="0 0 485 481" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<path d="M186.246 175.614V130.031C186.246 126.192 187.687 123.312 191.044 121.395L282.692 68.6149C295.167 61.4179 310.042 58.0609 325.394 58.0609C382.971 58.0609 419.44 102.685 419.44 150.185C419.44 153.543 419.44 157.382 418.959 161.221L323.954 105.561C318.197 102.204 312.437 102.204 306.68 105.561L186.246 175.614ZM400.245 353.148V244.227C400.245 237.508 397.364 232.71 391.608 229.352L271.174 159.299L310.519 136.746C313.877 134.829 316.757 134.829 320.115 136.746L411.762 189.526C438.154 204.882 455.905 237.508 455.905 269.174C455.905 305.639 434.315 339.228 400.245 353.144V353.148ZM157.937 257.185L118.592 234.155C115.235 232.238 113.794 229.357 113.794 225.518V119.959C113.794 68.6199 153.139 29.7519 206.4 29.7519C226.555 29.7519 245.264 36.4709 261.102 48.4659L166.578 103.167C160.822 106.524 157.942 111.322 157.942 118.042V257.189L157.937 257.185ZM242.626 306.125L186.246 274.458V207.286L242.626 175.619L299.002 207.286V274.458L242.626 306.125ZM278.852 451.992C258.698 451.992 239.989 445.273 224.151 433.279L318.674 378.577C324.431 375.22 327.311 370.422 327.311 363.702V224.555L367.138 247.585C370.495 249.502 371.936 252.382 371.936 256.222V361.781C371.936 413.12 332.109 451.988 278.852 451.988V451.992ZM165.134 344.993L73.4856 292.214C47.0936 276.857 29.3426 244.232 29.3426 212.565C29.3426 175.619 51.4146 142.512 85.4796 128.596V237.994C85.4796 244.713 88.3606 249.511 94.1166 252.869L214.074 322.44L174.729 344.993C171.372 346.91 168.491 346.91 165.134 344.993ZM159.859 423.683C105.639 423.683 65.8126 382.898 65.8126 332.517C65.8126 328.678 66.2936 324.839 66.7706 321L161.295 375.701C167.051 379.059 172.812 379.059 178.568 375.701L299.002 306.13V351.713C299.002 355.552 297.562 358.432 294.204 360.349L202.557 413.129C190.081 420.326 175.206 423.683 159.854 423.683H159.859ZM278.852 480.779C336.911 480.779 385.37 439.516 396.41 384.815C450.149 370.899 484.696 320.518 484.696 269.179C484.696 235.59 470.303 202.965 444.392 179.453C446.791 169.376 448.231 159.299 448.231 149.227C448.231 80.6139 392.571 29.2699 328.274 29.2699C315.322 29.2699 302.846 31.1869 290.37 35.5079C268.775 14.3949 239.026 0.960938 206.4 0.960938C148.342 0.960938 99.8826 42.2229 88.8426 96.9239C35.1036 110.84 0.556641 161.221 0.556641 212.56C0.556641 246.149 14.9496 278.774 40.8606 302.286C38.4616 312.363 37.0216 322.44 37.0216 332.513C37.0216 401.126 92.6816 452.469 156.978 452.469C169.931 452.469 182.407 450.552 194.883 446.231C216.473 467.344 246.222 480.779 278.852 480.779Z" fill="black"/>
|
||||
</svg>
|
Po Szerokość: | Wysokość: | Rozmiar: 2.5 KiB |
|
@ -0,0 +1 @@
|
|||
<svg role="img" viewBox="0 0 24 24" xmlns="http://www.w3.org/2000/svg"><title>Vercel</title><path d="m12 1.608 12 20.784H0Z"/></svg>
|
Po Szerokość: | Wysokość: | Rozmiar: 132 B |
|
@ -5,11 +5,11 @@ description: Configuring your Agentic project.
|
|||
|
||||
Every Agentic project needs a config file (`agentic.config.ts`, `agentic.config.js`, or `agentic.config.json`) to define the project's metadata, pricing, rate-limits, and any tool-specific behavior overrides.
|
||||
|
||||
<Note>
|
||||
<Tip>
|
||||
Configuring your project can feel a little overwhelming. Feel free to [reach
|
||||
out to us](/contact) if you're considering using Agentic's MCP Gateway, and
|
||||
I'd be happy to help walk you through setting your product up for success.
|
||||
</Note>
|
||||
</Tip>
|
||||
|
||||
## Config Fields
|
||||
|
||||
|
@ -162,8 +162,8 @@ See [Tool Config](/publishing/config/tool-config) for details.
|
|||
|
||||
## Config Help
|
||||
|
||||
<Note>
|
||||
<Tip>
|
||||
Configuring your project can feel a little overwhelming. Feel free to [reach
|
||||
out to us](/contact) if you're considering using Agentic's MCP Gateway, and
|
||||
I'd be happy to help walk you through setting your product up for success.
|
||||
</Note>
|
||||
</Tip>
|
||||
|
|
|
@ -144,9 +144,9 @@ See [Rate Limits](/publishing/config/rate-limits) for details.
|
|||
|
||||
## Config Help
|
||||
|
||||
<Note>
|
||||
<Tip>
|
||||
Configuring your project can feel a little overwhelming with the amount of
|
||||
options available. Feel free to [reach out to us](/contact) if you're
|
||||
considering using Agentic's MCP Gateway, and I'd be happy to help walk you
|
||||
through setting your product up for success.
|
||||
</Note>
|
||||
</Tip>
|
||||
|
|
|
@ -60,6 +60,8 @@ agentic signup -e <email> -p <password> -u <username>
|
|||
URL and that the URL supports the Streamable HTTP transport.
|
||||
</Info>
|
||||
|
||||
Your agentic config either be an `agentic.config.ts` file or an `agentic.config.json` file. The advantage of using a `ts` file is that you get full autocomplete and type safety.
|
||||
|
||||
<Tabs>
|
||||
<Tab title="TypeScript">
|
||||
|
||||
|
@ -136,8 +138,8 @@ Every time you make a change to your project, you can run `agentic deploy` which
|
|||
</Info>
|
||||
|
||||
<Note>
|
||||
The returned deployment will not have any information about the origin server,
|
||||
because your origin server is considered hidden once deployed to Agentic's MCP
|
||||
The returned deployment will not have any information about your origin
|
||||
server, because the origin is considered hidden once deployed to Agentic's MCP
|
||||
gateway.
|
||||
</Note>
|
||||
|
||||
|
@ -323,7 +325,7 @@ curl -X POST -H "Content-Type: application/json" -d '{ "query": "example google
|
|||
<Tab title="HTTPie">
|
||||
|
||||
```bash
|
||||
http -j https://gateway.agentic.com/mcp/search/search query='example google search'
|
||||
http https://gateway.agentic.com/mcp/search/search query='example google search'
|
||||
```
|
||||
|
||||
</Tab>
|
||||
|
@ -346,7 +348,7 @@ Now, your project will be available at `https://agentic.so/marketplace/projects/
|
|||
|
||||
You can share your product's public URL with customers, and they'll be able to subscribe to your product via Stripe. You can visit your [dashboard](https://agentic.so/app) to track customer usage and revenue.
|
||||
|
||||
<Note>Congrats, you now have a live MCP product! 🎉</Note>
|
||||
<Tip>Congrats, you now have a live MCP product! 🎉</Tip>
|
||||
|
||||
## 7. (Optional) Submit your product to the public Agentic Marketplace
|
||||
|
||||
|
|
|
@ -60,6 +60,8 @@ agentic signup -e <email> -p <password> -u <username>
|
|||
`https` URL, and that your OpenAPI spec is a valid 3.0 or 3.1 spec.
|
||||
</Info>
|
||||
|
||||
Your agentic config either be an `agentic.config.ts` file or an `agentic.config.json` file. The advantage of using a `ts` file is that you get full autocomplete and type safety.
|
||||
|
||||
<Tabs>
|
||||
<Tab title="TypeScript">
|
||||
|
||||
|
@ -137,11 +139,17 @@ Every time you make a change to your project, you can run `agentic deploy` which
|
|||
prioritize this feature.
|
||||
</Info>
|
||||
|
||||
<Note>
|
||||
The returned deployment will not have any information about your origin
|
||||
server, because the origin is considered hidden once deployed to Agentic's MCP
|
||||
gateway.
|
||||
</Note>
|
||||
|
||||
<Expandable title="example output">
|
||||
|
||||
<Note>
|
||||
The returned deployment will not have any information about the origin server,
|
||||
because your origin server is considered hidden once deployed to Agentic's MCP
|
||||
The returned deployment will not have any information about your origin
|
||||
server, because the origin is considered hidden once deployed to Agentic's MCP
|
||||
gateway.
|
||||
</Note>
|
||||
|
||||
|
@ -325,7 +333,7 @@ curl -X POST -H "Content-Type: application/json" -d '{ "query": "example google
|
|||
<Tab title="HTTPie">
|
||||
|
||||
```bash
|
||||
http -j https://gateway.agentic.com/mcp/search/search query='example google search'
|
||||
http https://gateway.agentic.com/mcp/search/search query='example google search'
|
||||
```
|
||||
|
||||
</Tab>
|
||||
|
@ -348,7 +356,7 @@ Now, your project will be available at `https://agentic.so/marketplace/projects/
|
|||
|
||||
You can share your product's public URL with customers, and they'll be able to subscribe to your product via Stripe. You can visit your [dashboard](https://agentic.so/app) to track customer usage and revenue.
|
||||
|
||||
<Note>Congrats, you now have a live MCP product! 🎉</Note>
|
||||
<Tip>Congrats, you now have a live MCP product! 🎉</Tip>
|
||||
|
||||
## 7. (Optional) Submit your product to the public Agentic Marketplace
|
||||
|
||||
|
|
|
@ -80,11 +80,11 @@ app.all('/mcp', async (c) => {
|
|||
serve({ fetch: app.fetch, port: 8787 })
|
||||
```
|
||||
|
||||
<Note>
|
||||
<Tip>
|
||||
Hono is really flexible, so if you'd rather deploy your server to Cloudflare
|
||||
Workers instead of using Node.js (or any other platform), just follow [Hono's
|
||||
docs](https://hono.dev/docs/getting-started/basic).
|
||||
</Note>
|
||||
</Tip>
|
||||
|
||||
## 3. Deploy your MCP server remotely
|
||||
|
||||
|
|
|
@ -87,11 +87,11 @@ serve({ fetch: app.fetch, port: 8787 })
|
|||
|
||||
Note that the auto-generated OpenAPI spec will be available at `/docs` in this example.
|
||||
|
||||
<Note>
|
||||
<Tip>
|
||||
Hono is really flexible, so if you'd rather deploy your server to Cloudflare
|
||||
Workers instead of using Node.js (or any other platform), just follow [Hono's
|
||||
docs](https://hono.dev/docs/getting-started/basic).
|
||||
</Note>
|
||||
</Tip>
|
||||
|
||||
## 3. Deploy your OpenAPI server remotely
|
||||
|
||||
|
|
|
@ -9,13 +9,13 @@ description: Configuring your origin MCP server or OpenAPI service with Agentic'
|
|||
interested in hosting your origin server with Agentic's infrastructure, please
|
||||
[reach out to us](/contact) and we'll be happy to help you get set up.
|
||||
|
||||
<Note>
|
||||
<Tip>
|
||||
Remote origin servers are important because they allow for maximum flexibility
|
||||
with how you author and host your MCP server or OpenAPI service.
|
||||
|
||||
By cleanly separating between Agentic's MCP gateway and your remote origin server, Agentic supports origin servers written in any language or framework and deployed to any cloud.
|
||||
|
||||
</Note>
|
||||
</Tip>
|
||||
|
||||
### Remote Origin MCP Server
|
||||
|
||||
|
|
|
@ -1,92 +0,0 @@
|
|||
---
|
||||
title: Vercel AI SDK
|
||||
description: Agentic adapter for the Vercel AI SDK.
|
||||
---
|
||||
|
||||
- package: `@agentic/ai-sdk`
|
||||
- exports: `function createAISDKTools`
|
||||
- [source](https://github.com/transitive-bullshit/agentic/blob/main/packages/ai-sdk/src/ai-sdk.ts)
|
||||
- [Vercel AI SDK docs](https://sdk.vercel.ai)
|
||||
|
||||
## Install
|
||||
|
||||
<CodeGroup>
|
||||
```bash npm
|
||||
npm install @agentic/ai-sdk ai
|
||||
```
|
||||
|
||||
```bash yarn
|
||||
yarn add @agentic/ai-sdk ai
|
||||
```
|
||||
|
||||
```bash pnpm
|
||||
pnpm add @agentic/ai-sdk ai
|
||||
```
|
||||
|
||||
</CodeGroup>
|
||||
|
||||
## Usage
|
||||
|
||||
```ts
|
||||
import 'dotenv/config'
|
||||
|
||||
import { createAISDKTools } from '@agentic/ai-sdk'
|
||||
import { WeatherClient } from '@agentic/weather'
|
||||
import { openai } from '@ai-sdk/openai'
|
||||
import { generateText } from 'ai'
|
||||
|
||||
async function main() {
|
||||
const weather = new WeatherClient()
|
||||
|
||||
const result = await generateText({
|
||||
model: openai('gpt-4o-mini'),
|
||||
tools: createAISDKTools(weather),
|
||||
toolChoice: 'required',
|
||||
temperature: 0,
|
||||
system: 'You are a helpful assistant. Be as concise as possible.',
|
||||
prompt: 'What is the weather in San Francisco?'
|
||||
})
|
||||
|
||||
console.log(result.toolResults[0])
|
||||
}
|
||||
|
||||
await main()
|
||||
```
|
||||
|
||||
Note that this example snippet also requires you to install the AI SDK's OpenAI provider, the Agentic weather tool, and `dotenv`.
|
||||
|
||||
<CodeGroup>
|
||||
```bash npm
|
||||
npm install @ai-sdk/openai @agentic/weather dotenv
|
||||
```
|
||||
|
||||
```bash yarn
|
||||
yarn add @ai-sdk/openai @agentic/weather dotenv
|
||||
```
|
||||
|
||||
```bash pnpm
|
||||
pnpm add @ai-sdk/openai @agentic/weather dotenv
|
||||
```
|
||||
|
||||
</CodeGroup>
|
||||
|
||||
## Running this example
|
||||
|
||||
<Info>
|
||||
You'll need a free API key from [weatherapi.com](https://www.weatherapi.com)
|
||||
to run this example. Store it in a local `.env` file as `WEATHER_API_KEY`.
|
||||
</Info>
|
||||
|
||||
<Info>
|
||||
You'll need an [OpenAI API key](https://platform.openai.com/docs/quickstart)
|
||||
to run this example. Store it in a local `.env` file as `OPENAI_API_KEY`.
|
||||
</Info>
|
||||
|
||||
```sh
|
||||
git clone git@github.com:transitive-bullshit/agentic.git
|
||||
cd agentic
|
||||
pnpm install
|
||||
echo 'WEATHER_API_KEY=your-key' >> .env
|
||||
echo 'OPENAI_API_KEY=your-key' >> .env
|
||||
npx tsx examples/ai-sdk/bin/weather.ts
|
||||
```
|
|
@ -1,75 +0,0 @@
|
|||
---
|
||||
title: Dexter
|
||||
description: Agentic adapter for the Dexa Dexter SDK.
|
||||
---
|
||||
|
||||
- package: `@agentic/dexter`
|
||||
- exports: `function createDexterFunctions`
|
||||
- [source](https://github.com/transitive-bullshit/agentic/blob/main/packages/dexter/src/dexter.ts)
|
||||
- [Dexa Dexter SDK docs](https://dexter.dexa.ai)
|
||||
|
||||
## Install
|
||||
|
||||
<CodeGroup>
|
||||
```bash npm
|
||||
npm install @agentic/dexter @dexaai/dexter
|
||||
```
|
||||
|
||||
```bash yarn
|
||||
yarn add @agentic/dexter @dexaai/dexter
|
||||
```
|
||||
|
||||
```bash pnpm
|
||||
pnpm add @agentic/dexter @dexaai/dexter
|
||||
```
|
||||
|
||||
</CodeGroup>
|
||||
|
||||
## Usage
|
||||
|
||||
```ts
|
||||
import 'dotenv/config'
|
||||
|
||||
import { createDexterFunctions } from '@agentic/dexter'
|
||||
import { WeatherClient } from '@agentic/weather'
|
||||
import { ChatModel, createAIRunner } from '@dexaai/dexter'
|
||||
|
||||
async function main() {
|
||||
const weather = new WeatherClient()
|
||||
|
||||
const runner = createAIRunner({
|
||||
chatModel: new ChatModel({
|
||||
params: { model: 'gpt-4o-mini', temperature: 0 }
|
||||
// debug: true
|
||||
}),
|
||||
functions: createDexterFunctions(weather),
|
||||
systemMessage: 'You are a helpful assistant. Be as concise as possible.'
|
||||
})
|
||||
|
||||
const result = await runner('What is the weather in San Francisco?')
|
||||
console.log(result)
|
||||
}
|
||||
|
||||
await main()
|
||||
```
|
||||
|
||||
## Running this example
|
||||
|
||||
<Info>
|
||||
You'll need a free API key from [weatherapi.com](https://www.weatherapi.com)
|
||||
to run this example. Store it in a local `.env` file as `WEATHER_API_KEY`.
|
||||
</Info>
|
||||
|
||||
<Info>
|
||||
You'll need an [OpenAI API key](https://platform.openai.com/docs/quickstart)
|
||||
to run this example. Store it in a local `.env` file as `OPENAI_API_KEY`.
|
||||
</Info>
|
||||
|
||||
```sh
|
||||
git clone git@github.com:transitive-bullshit/agentic.git
|
||||
cd agentic
|
||||
pnpm install
|
||||
echo 'WEATHER_API_KEY=your-key' >> .env
|
||||
echo 'OPENAI_API_KEY=your-key' >> .env
|
||||
npx tsx examples/dexter/bin/weather.ts
|
||||
```
|
|
@ -1,79 +0,0 @@
|
|||
---
|
||||
title: Genkit
|
||||
description: Agentic adapter for the Firebase Genkit SDK.
|
||||
---
|
||||
|
||||
- package: `@agentic/genkit`
|
||||
- exports: `function createGenkitTools`
|
||||
- [source](https://github.com/transitive-bullshit/agentic/blob/main/packages/genkit/src/genkit.ts)
|
||||
- [Firebase Genkit docs](https://firebase.google.com/docs/genkit)
|
||||
|
||||
## Install
|
||||
|
||||
<CodeGroup>
|
||||
```bash npm
|
||||
npm install @agentic/genkit genkit
|
||||
```
|
||||
|
||||
```bash yarn
|
||||
yarn add @agentic/genkit genkit
|
||||
```
|
||||
|
||||
```bash pnpm
|
||||
pnpm add @agentic/genkit genkit
|
||||
```
|
||||
|
||||
</CodeGroup>
|
||||
|
||||
## Usage
|
||||
|
||||
This example also requires you to install the [genkitx-openai](https://github.com/TheFireCo/genkit-plugins/tree/main/plugins/openai) package, which adds support for OpenAI to Genkit.
|
||||
|
||||
```ts
|
||||
import 'dotenv/config'
|
||||
|
||||
import { createGenkitTools } from '@agentic/genkit'
|
||||
import { WeatherClient } from '@agentic/stdlib'
|
||||
import { genkit } from 'genkit'
|
||||
import { gpt4oMini, openAI } from 'genkitx-openai'
|
||||
|
||||
async function main() {
|
||||
const weather = new WeatherClient()
|
||||
|
||||
const ai = genkit({
|
||||
plugins: [openAI()]
|
||||
})
|
||||
|
||||
const result = await ai.generate({
|
||||
model: gpt4oMini,
|
||||
tools: createGenkitTools(ai, weather),
|
||||
system: 'You are a helpful assistant. Be as concise as possible.',
|
||||
prompt: 'What is the weather in San Francisco?'
|
||||
})
|
||||
|
||||
console.log(result)
|
||||
}
|
||||
|
||||
await main()
|
||||
```
|
||||
|
||||
## Running this example
|
||||
|
||||
<Info>
|
||||
You'll need a free API key from [weatherapi.com](https://www.weatherapi.com)
|
||||
to run this example. Store it in a local `.env` file as `WEATHER_API_KEY`.
|
||||
</Info>
|
||||
|
||||
<Info>
|
||||
You'll need an [OpenAI API key](https://platform.openai.com/docs/quickstart)
|
||||
to run this example. Store it in a local `.env` file as `OPENAI_API_KEY`.
|
||||
</Info>
|
||||
|
||||
```sh
|
||||
git clone git@github.com:transitive-bullshit/agentic.git
|
||||
cd agentic
|
||||
pnpm install
|
||||
echo 'WEATHER_API_KEY=your-key' >> .env
|
||||
echo 'OPENAI_API_KEY=your-key' >> .env
|
||||
npx tsx examples/genkit/bin/weather.ts
|
||||
```
|
|
@ -1,87 +0,0 @@
|
|||
---
|
||||
title: LangChain
|
||||
description: Agentic adapter for the LangChain JS SDK.
|
||||
---
|
||||
|
||||
- package: `@agentic/langchain`
|
||||
- exports: `function createLangChainTools`
|
||||
- [source](https://github.com/transitive-bullshit/agentic/blob/main/packages/langchain/src/langchain.ts)
|
||||
- [LangChain JS docs](https://js.langchain.com)
|
||||
|
||||
## Install
|
||||
|
||||
<CodeGroup>
|
||||
```bash npm
|
||||
npm install @agentic/langchain @langchain/core langchain
|
||||
```
|
||||
|
||||
```bash yarn
|
||||
yarn add @agentic/langchain @langchain/core langchain
|
||||
```
|
||||
|
||||
```bash pnpm
|
||||
pnpm add @agentic/langchain @langchain/core langchain
|
||||
```
|
||||
|
||||
</CodeGroup>
|
||||
|
||||
## Usage
|
||||
|
||||
```ts
|
||||
import { createLangChainTools } from '@agentic/langchain'
|
||||
import { WeatherClient } from '@agentic/stdlib'
|
||||
import { ChatPromptTemplate } from '@langchain/core/prompts'
|
||||
import { ChatOpenAI } from '@langchain/openai'
|
||||
import { AgentExecutor, createToolCallingAgent } from 'langchain/agents'
|
||||
|
||||
async function main() {
|
||||
const weather = new WeatherClient()
|
||||
|
||||
const tools = createLangChainTools(weather)
|
||||
const agent = createToolCallingAgent({
|
||||
llm: new ChatOpenAI({ model: 'gpt-4o-mini', temperature: 0 }),
|
||||
tools,
|
||||
prompt: ChatPromptTemplate.fromMessages([
|
||||
['system', 'You are a helpful assistant. Be as concise as possible.'],
|
||||
['placeholder', '{chat_history}'],
|
||||
['human', '{input}'],
|
||||
['placeholder', '{agent_scratchpad}']
|
||||
])
|
||||
})
|
||||
|
||||
const agentExecutor = new AgentExecutor({
|
||||
agent,
|
||||
tools
|
||||
// verbose: true
|
||||
})
|
||||
|
||||
const result = await agentExecutor.invoke({
|
||||
input: 'What is the weather in San Francisco?'
|
||||
})
|
||||
|
||||
console.log(result.output)
|
||||
}
|
||||
|
||||
await main()
|
||||
```
|
||||
|
||||
## Running this example
|
||||
|
||||
<Info>
|
||||
You'll need a free API key from [weatherapi.com](https://www.weatherapi.com)
|
||||
to run this example. Store it in a local `.env` file as `WEATHER_API_KEY`.
|
||||
</Info>
|
||||
|
||||
<Info>
|
||||
You'll need an [OpenAI API key](https://platform.openai.com/docs/quickstart)
|
||||
to run this example. Store it in a local `.env` file as `OPENAI_API_KEY`.
|
||||
</Info>
|
||||
|
||||
```sh
|
||||
git clone git@github.com:transitive-bullshit/agentic.git
|
||||
cd agentic
|
||||
pnpm install
|
||||
echo 'WEATHER_API_KEY=your-key' >> .env
|
||||
echo 'OPENAI_API_KEY=your-key' >> .env
|
||||
npx tsx examples/langchain/bin/weather.ts
|
||||
```
|
|
@ -1,78 +0,0 @@
|
|||
---
|
||||
title: LlamaIndex
|
||||
description: Agentic adapter for the LlamaIndex TS SDK.
|
||||
---
|
||||
|
||||
- package: `@agentic/llamaindex`
|
||||
- exports: `function createLlamaIndexTools`
|
||||
- [source](https://github.com/transitive-bullshit/agentic/blob/main/packages/llamaindex/src/llamaindex.ts)
|
||||
- [LlamaIndex TS docs](https://ts.llamaindex.ai)
|
||||
|
||||
## Install
|
||||
|
||||
<CodeGroup>
|
||||
```bash npm
|
||||
npm install @agentic/llamaindex llamaindex @llamaindex/openai @llamaindex/workflow
|
||||
```
|
||||
|
||||
```bash yarn
|
||||
yarn add @agentic/llamaindex llamaindex @llamaindex/openai @llamaindex/workflow
|
||||
```
|
||||
|
||||
```bash pnpm
|
||||
pnpm add @agentic/llamaindex llamaindex @llamaindex/openai @llamaindex/workflow
|
||||
```
|
||||
|
||||
</CodeGroup>
|
||||
|
||||
## Usage
|
||||
|
||||
```ts
|
||||
import 'dotenv/config'
|
||||
|
||||
import { createLlamaIndexTools } from '@agentic/llamaindex'
|
||||
import { WeatherClient } from '@agentic/stdlib'
|
||||
import { openai } from '@llamaindex/openai'
|
||||
import { agent } from '@llamaindex/workflow'
|
||||
|
||||
async function main() {
|
||||
const weather = new WeatherClient()
|
||||
|
||||
const tools = createLlamaIndexTools(weather)
|
||||
const weatherAgent = agent({
|
||||
name: 'Weather Agent',
|
||||
llm: openai({ model: 'gpt-4o-mini', temperature: 0 }),
|
||||
systemPrompt: 'You are a helpful assistant. Be as concise as possible.',
|
||||
tools
|
||||
})
|
||||
|
||||
const response = await weatherAgent.run(
|
||||
'What is the weather in San Francisco?'
|
||||
)
|
||||
|
||||
console.log(response.data.result)
|
||||
}
|
||||
|
||||
await main()
|
||||
```
|
||||
|
||||
## Running this example
|
||||
|
||||
<Info>
|
||||
You'll need a free API key from [weatherapi.com](https://www.weatherapi.com)
|
||||
to run this example. Store it in a local `.env` file as `WEATHER_API_KEY`.
|
||||
</Info>
|
||||
|
||||
<Info>
|
||||
You'll need an [OpenAI API key](https://platform.openai.com/docs/quickstart)
|
||||
to run this example. Store it in a local `.env` file as `OPENAI_API_KEY`.
|
||||
</Info>
|
||||
|
||||
```sh
|
||||
git clone git@github.com:transitive-bullshit/agentic.git
|
||||
cd agentic
|
||||
pnpm install
|
||||
echo 'WEATHER_API_KEY=your-key' >> .env
|
||||
echo 'OPENAI_API_KEY=your-key' >> .env
|
||||
npx tsx examples/llamaindex/bin/weather.ts
|
||||
```
|
|
@ -1,93 +0,0 @@
|
|||
---
|
||||
title: Mastra
|
||||
description: Agentic adapter for the Mastra AI Agent framework.
|
||||
---
|
||||
|
||||
- package: `@agentic/mastra`
|
||||
- exports: `function createMastraTools`
|
||||
- [source](https://github.com/transitive-bullshit/agentic/blob/main/packages/mastra/src/mastra.ts)
|
||||
- [Mastra docs](https://mastra.ai/docs)
|
||||
|
||||
## Install
|
||||
|
||||
<CodeGroup>
|
||||
```bash npm
|
||||
npm install @agentic/mastra @mastra/core
|
||||
```
|
||||
|
||||
```bash yarn
|
||||
yarn add @agentic/mastra @mastra/core
|
||||
```
|
||||
|
||||
```bash pnpm
|
||||
pnpm add @agentic/mastra @mastra/core
|
||||
```
|
||||
|
||||
</CodeGroup>
|
||||
|
||||
## Usage
|
||||
|
||||
```ts
|
||||
import 'dotenv/config'
|
||||
|
||||
import { createMastraTools } from '@agentic/mastra'
|
||||
import { WeatherClient } from '@agentic/weather'
|
||||
import { openai } from '@ai-sdk/openai'
|
||||
import { Agent } from '@mastra/core/agent'
|
||||
|
||||
async function main() {
|
||||
const weather = new WeatherClient()
|
||||
|
||||
const weatherAgent = new Agent({
|
||||
name: 'Weather Agent',
|
||||
instructions: 'You are a helpful assistant. Be as concise as possible.',
|
||||
model: openai('gpt-4o-mini'),
|
||||
tools: createMastraTools(weather)
|
||||
})
|
||||
|
||||
const res = await weatherAgent.generate(
|
||||
'What is the weather in San Francisco?'
|
||||
)
|
||||
console.log(res.text)
|
||||
}
|
||||
|
||||
await main()
|
||||
```
|
||||
|
||||
Note that this example snippet also requires you to install the AI SDK's OpenAI provider, the Agentic weather tool, and `dotenv`.
|
||||
|
||||
<CodeGroup>
|
||||
```bash npm
|
||||
npm install @ai-sdk/openai @agentic/weather dotenv
|
||||
```
|
||||
|
||||
```bash yarn
|
||||
yarn add @ai-sdk/openai @agentic/weather dotenv
|
||||
```
|
||||
|
||||
```bash pnpm
|
||||
pnpm add @ai-sdk/openai @agentic/weather dotenv
|
||||
```
|
||||
|
||||
</CodeGroup>
|
||||
|
||||
## Running this example
|
||||
|
||||
<Info>
|
||||
You'll need a free API key from [weatherapi.com](https://www.weatherapi.com)
|
||||
to run this example. Store it in a local `.env` file as `WEATHER_API_KEY`.
|
||||
</Info>
|
||||
|
||||
<Info>
|
||||
You'll need an [OpenAI API key](https://platform.openai.com/docs/quickstart)
|
||||
to run this example. Store it in a local `.env` file as `OPENAI_API_KEY`.
|
||||
</Info>
|
||||
|
||||
```sh
|
||||
git clone git@github.com:transitive-bullshit/agentic.git
|
||||
cd agentic
|
||||
pnpm install
|
||||
echo 'WEATHER_API_KEY=your-key' >> .env
|
||||
echo 'OPENAI_API_KEY=your-key' >> .env
|
||||
npx tsx examples/mastra/bin/weather.ts
|
||||
```
|
|
@ -1,156 +0,0 @@
|
|||
---
|
||||
title: OpenAI
|
||||
description: How to use Agentic with the OpenAI TS SDK directly.
|
||||
---
|
||||
|
||||
<Note>
|
||||
There's no need for an adapter with the OpenAI SDK since all agentic tools are
|
||||
compatible with OpenAI by default. You can use `AIFunctionSet.specs` for
|
||||
function calling or `AIFunctionSet.toolSpecs` for parallel tool calling.
|
||||
</Note>
|
||||
|
||||
## Install
|
||||
|
||||
<CodeGroup>
|
||||
```bash npm
|
||||
npm install @agentic/stdlib openai
|
||||
```
|
||||
|
||||
```bash yarn
|
||||
yarn add @agentic/stdlib openai
|
||||
```
|
||||
|
||||
```bash pnpm
|
||||
pnpm add @agentic/stdlib openai
|
||||
```
|
||||
|
||||
</CodeGroup>
|
||||
|
||||
## Usage
|
||||
|
||||
```ts
|
||||
import { WeatherClient } from '@agentic/stdlib'
|
||||
import OpenAI from 'openai'
|
||||
|
||||
const weather = new WeatherClient()
|
||||
const openai = new OpenAI()
|
||||
|
||||
const messages: OpenAI.ChatCompletionMessageParam[] = [
|
||||
{
|
||||
role: 'system',
|
||||
content: 'You are a helpful assistant. Be as concise as possible.'
|
||||
},
|
||||
{ role: 'user', content: 'What is the weather in San Francisco?' }
|
||||
]
|
||||
|
||||
const res = await openai.chat.completions.create({
|
||||
messages,
|
||||
model: 'gpt-4o-mini',
|
||||
temperature: 0,
|
||||
tools: weather.functions.toolSpecs,
|
||||
tool_choice: 'required'
|
||||
})
|
||||
const message = res.choices[0]?.message!
|
||||
console.log(JSON.stringify(message, null, 2))
|
||||
assert(message.tool_calls?.[0]?.function?.name === 'get_current_weather')
|
||||
|
||||
const fn = weather.functions.get('get_current_weather')!
|
||||
|
||||
const toolArgs = message.tool_calls[0].function.arguments
|
||||
const toolResult = await fn(toolArgs)
|
||||
console.log(JSON.stringify(toolResult, null, 2))
|
||||
```
|
||||
|
||||
### Running this example
|
||||
|
||||
<Info>
|
||||
You'll need a free API key from [weatherapi.com](https://www.weatherapi.com)
|
||||
to run this example. Store it in a local `.env` file as `WEATHER_API_KEY`.
|
||||
</Info>
|
||||
|
||||
<Info>
|
||||
You'll need an [OpenAI API key](https://platform.openai.com/docs/quickstart)
|
||||
to run this example. Store it in a local `.env` file as `OPENAI_API_KEY`.
|
||||
</Info>
|
||||
|
||||
```sh
|
||||
git clone git@github.com:transitive-bullshit/agentic.git
|
||||
cd agentic
|
||||
pnpm install
|
||||
echo 'WEATHER_API_KEY=your-key' >> .env
|
||||
echo 'OPENAI_API_KEY=your-key' >> .env
|
||||
npx tsx examples/openai/bin/weather.ts
|
||||
```
|
||||
|
||||
## OpenAI Responses API
|
||||
|
||||
Agentic also supports the [OpenAI Responses API](https://platform.openai.com/docs/api-reference/responses) by referencing `AIFunctionSet.responsesToolSpecs` as in this example:
|
||||
|
||||
```ts
|
||||
import 'dotenv/config'
|
||||
|
||||
import { WeatherClient } from '@agentic/stdlib'
|
||||
import OpenAI from 'openai'
|
||||
|
||||
async function main() {
|
||||
const weather = new WeatherClient()
|
||||
const openai = new OpenAI()
|
||||
|
||||
const messages: OpenAI.Responses.ResponseInput = [
|
||||
{
|
||||
role: 'system',
|
||||
content: 'You are a helpful assistant. Be as concise as possible.'
|
||||
},
|
||||
{ role: 'user', content: 'What is the weather in San Francisco?' }
|
||||
]
|
||||
|
||||
{
|
||||
// Call to OpenAI to invoke the weather tool
|
||||
const res = await openai.responses.create({
|
||||
model: 'gpt-4o-mini',
|
||||
temperature: 0,
|
||||
tools: weather.functions.responsesToolSpecs,
|
||||
tool_choice: 'required',
|
||||
input: messages
|
||||
})
|
||||
|
||||
const message = res.output[0]
|
||||
console.log(JSON.stringify(message, null, 2))
|
||||
assert(message?.type === 'function_call')
|
||||
assert(message.name === 'get_current_weather')
|
||||
|
||||
const fn = weather.functions.get('get_current_weather')!
|
||||
const toolResult = await fn(message.arguments)
|
||||
|
||||
messages.push(message)
|
||||
messages.push({
|
||||
type: 'function_call_output',
|
||||
call_id: message.call_id,
|
||||
output: JSON.stringify(toolResult)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
await main()
|
||||
```
|
||||
|
||||
### Running this example
|
||||
|
||||
<Info>
|
||||
You'll need a free API key from [weatherapi.com](https://www.weatherapi.com)
|
||||
to run this example. Store it in a local `.env` file as `WEATHER_API_KEY`.
|
||||
</Info>
|
||||
|
||||
<Info>
|
||||
You'll need an [OpenAI API key](https://platform.openai.com/docs/quickstart)
|
||||
to run this example. Store it in a local `.env` file as `OPENAI_API_KEY`.
|
||||
</Info>
|
||||
|
||||
```sh
|
||||
git clone git@github.com:transitive-bullshit/agentic.git
|
||||
cd agentic
|
||||
pnpm install
|
||||
echo 'WEATHER_API_KEY=your-key' >> .env
|
||||
echo 'OPENAI_API_KEY=your-key' >> .env
|
||||
npx tsx examples/openai/bin/weather-responses.ts
|
||||
```
|
|
@ -28,7 +28,14 @@ export async function resolveMCPOriginAdapter({
|
|||
)
|
||||
const transport = new StreamableHTTPClientTransport(new URL(origin.url))
|
||||
const client = new McpClient({ name, version })
|
||||
await client.connect(transport)
|
||||
try {
|
||||
await client.connect(transport)
|
||||
} catch (err: any) {
|
||||
throw new Error(
|
||||
`Failed to connect to MCP server at ${origin.url} using the Streamable HTTP transport.Make sure your MCP server is running and accessible, and that your URL is using the correct path (/, /mcp, etc): ${err.message}`,
|
||||
{ cause: err }
|
||||
)
|
||||
}
|
||||
|
||||
const serverInfo = {
|
||||
name,
|
||||
|
|
|
@ -31,7 +31,8 @@
|
|||
"@agentic/platform-core": "workspace:*",
|
||||
"@agentic/platform-types": "workspace:*",
|
||||
"@agentic/platform-validators": "workspace:*",
|
||||
"ky": "catalog:"
|
||||
"ky": "catalog:",
|
||||
"zod": "catalog:"
|
||||
},
|
||||
"publishConfig": {
|
||||
"access": "public",
|
||||
|
|
|
@ -3,7 +3,8 @@ import {
|
|||
AIFunctionSet,
|
||||
AIFunctionsProvider,
|
||||
createAIFunction,
|
||||
createJsonSchema
|
||||
createJsonSchema,
|
||||
getEnv
|
||||
} from '@agentic/core'
|
||||
import { AgenticApiClient } from '@agentic/platform-api-client'
|
||||
import { assert } from '@agentic/platform-core'
|
||||
|
@ -11,13 +12,36 @@ import { parseDeploymentIdentifier } from '@agentic/platform-validators'
|
|||
import defaultKy, { type KyInstance } from 'ky'
|
||||
|
||||
export type AgenticToolClientOptions = {
|
||||
/**
|
||||
* Optional API key for your subscription to the Agentic project.
|
||||
*
|
||||
* If not set, will default to the `AGENTIC_API_KEY` environment variable.
|
||||
*
|
||||
* If no `apiKey` is set, the client will make unauthenticated tool calls,
|
||||
* which may or may not be supported by the target Agentic project.
|
||||
*/
|
||||
apiKey?: string
|
||||
|
||||
/**
|
||||
* Optional custom Agentic API client.
|
||||
*/
|
||||
agenticApiClient?: AgenticApiClient
|
||||
|
||||
/**
|
||||
* Optional custom Agentic Gateway base URL.
|
||||
*
|
||||
* @default `https://gateway.agentic.so`
|
||||
*/
|
||||
agenticGatewayBaseUrl?: string
|
||||
|
||||
/**
|
||||
* Optional custom Ky instance.
|
||||
*
|
||||
* Useful for overriding the default headers, retry logic, etc.
|
||||
*/
|
||||
ky?: KyInstance
|
||||
}
|
||||
|
||||
// TODO: add support for optional apiKey
|
||||
|
||||
/**
|
||||
* Agentic tool client which makes it easy to use an Agentic tool products with
|
||||
* all of the major TypeScript LLM SDKs, without having to go through any MCP
|
||||
|
@ -32,18 +56,21 @@ export type AgenticToolClientOptions = {
|
|||
* ```
|
||||
*/
|
||||
export class AgenticToolClient extends AIFunctionsProvider {
|
||||
readonly apiKey: string | undefined
|
||||
readonly project: Project
|
||||
readonly deployment: Deployment
|
||||
readonly agenticGatewayBaseUrl: string
|
||||
readonly ky: KyInstance
|
||||
|
||||
protected constructor({
|
||||
apiKey,
|
||||
project,
|
||||
deployment,
|
||||
deploymentIdentifier,
|
||||
agenticGatewayBaseUrl,
|
||||
ky
|
||||
}: {
|
||||
apiKey: string | undefined
|
||||
project: Project
|
||||
deployment: Deployment
|
||||
deploymentIdentifier: string
|
||||
|
@ -52,10 +79,13 @@ export class AgenticToolClient extends AIFunctionsProvider {
|
|||
}) {
|
||||
super()
|
||||
|
||||
this.apiKey = apiKey
|
||||
this.project = project
|
||||
this.deployment = deployment
|
||||
this.agenticGatewayBaseUrl = agenticGatewayBaseUrl
|
||||
this.ky = ky
|
||||
this.ky = apiKey
|
||||
? ky.extend({ headers: { Authorization: `Bearer ${apiKey}` } })
|
||||
: ky
|
||||
|
||||
this._functions = new AIFunctionSet(
|
||||
deployment.tools.map((tool) => {
|
||||
|
@ -106,6 +136,7 @@ export class AgenticToolClient extends AIFunctionsProvider {
|
|||
static async fromIdentifier(
|
||||
projectOrDeploymentIdentifier: string,
|
||||
{
|
||||
apiKey = getEnv('AGENTIC_API_KEY'),
|
||||
agenticApiClient = new AgenticApiClient(),
|
||||
agenticGatewayBaseUrl = 'https://gateway.agentic.so',
|
||||
ky = defaultKy
|
||||
|
@ -141,6 +172,7 @@ export class AgenticToolClient extends AIFunctionsProvider {
|
|||
assert(deployment, `Deployment "${deploymentIdentifier}" not found`)
|
||||
|
||||
return new AgenticToolClient({
|
||||
apiKey,
|
||||
project,
|
||||
deployment,
|
||||
deploymentIdentifier,
|
||||
|
|
|
@ -1396,6 +1396,9 @@ importers:
|
|||
ky:
|
||||
specifier: 'catalog:'
|
||||
version: 1.8.1
|
||||
zod:
|
||||
specifier: 'catalog:'
|
||||
version: 3.25.67
|
||||
|
||||
packages/types:
|
||||
dependencies:
|
||||
|
|
7
todo.md
7
todo.md
|
@ -11,6 +11,9 @@
|
|||
- should we bypass stripe for `free` plans to increase conversions?
|
||||
- handle browser back/forward with `?next=`
|
||||
- add some social proof to signup page
|
||||
- example usage
|
||||
- fix mcp examples
|
||||
- add example usage to project detail pages
|
||||
- docs
|
||||
- main readme
|
||||
- sub readmes (https://www.npmjs.com/package/@agentic/cli)
|
||||
|
@ -23,7 +26,6 @@
|
|||
- finesse header (mobile)
|
||||
- create agentic products for legacy tools
|
||||
- add basic legal terms and privacy policy (and update links in stripe)
|
||||
- add caching to public projects api endpoints
|
||||
- add support for [`@google/genai`](https://github.com/googleapis/js-genai) tools adapter
|
||||
- add feature about optimized context to marketing site
|
||||
- ensure all agentic tool inputSchemas support openai strict mode by default
|
||||
|
@ -31,6 +33,9 @@
|
|||
- mcp tool inputSchemas may not support openai strict mode either
|
||||
- maybe default `strict` to `false` in `createAIFunction` for now?
|
||||
- also add `@agentic/json-schema` to `createJsonSchema` parsing instead of current no-op
|
||||
- add support to `@agentic/platform-tool-client` for
|
||||
- double check example usage for all TS sdks
|
||||
- add docs on using multiple tools with `AIFunctionSet`
|
||||
|
||||
## TODO: Post-MVP
|
||||
|
||||
|
|
Ładowanie…
Reference in New Issue