diff --git a/apps/api/src/api-v1/consumers/utils.ts b/apps/api/src/api-v1/consumers/utils.ts index ab509b66..7bc055ad 100644 --- a/apps/api/src/api-v1/consumers/utils.ts +++ b/apps/api/src/api-v1/consumers/utils.ts @@ -1,6 +1,7 @@ import type { RawConsumer } from '@/db' import type { AuthenticatedHonoContext } from '@/lib/types' import { setPublicCacheControl } from '@/lib/cache-control' +import { env } from '@/lib/env' export function setAdminCacheControlForConsumer( c: AuthenticatedHonoContext, @@ -11,8 +12,15 @@ export function setAdminCacheControlForConsumer( !consumer.activated || !consumer.isStripeSubscriptionActive ) { - setPublicCacheControl(c.res, '10s') + // TODO: should we cache free-tier consumers for longer on prod? + // We really don't want free tier customers to cause our backend API so + // much traffic, but we'd also like for customers upgrading to a paid tier + // to have a snappy, smooth experience – without having to wait for their + // free tier subscription to expire from the cache. + setPublicCacheControl(c.res, env.isProd ? '30s' : '10s') } else { - setPublicCacheControl(c.res, '1m') + // We don't want the gateway hitting our API too often, so cache active + // customer subscriptions for longer in production + setPublicCacheControl(c.res, env.isProd ? '30m' : '1m') } } diff --git a/apps/api/src/api-v1/deployments/admin-get-deployment-by-identifier.ts b/apps/api/src/api-v1/deployments/admin-get-deployment-by-identifier.ts index c8f50352..fe372cb0 100644 --- a/apps/api/src/api-v1/deployments/admin-get-deployment-by-identifier.ts +++ b/apps/api/src/api-v1/deployments/admin-get-deployment-by-identifier.ts @@ -5,7 +5,9 @@ import type { AuthenticatedHonoEnv } from '@/lib/types' import { schema } from '@/db' import { acl } from '@/lib/acl' import { aclAdmin } from '@/lib/acl-admin' +import { setPublicCacheControl } from '@/lib/cache-control' import { tryGetDeploymentByIdentifier } from '@/lib/deployments/try-get-deployment-by-identifier' +import { env } from '@/lib/env' import { openapiAuthenticatedSecuritySchemas, openapiErrorResponse404, @@ -66,6 +68,13 @@ export function registerV1AdminGetDeploymentByIdentifier( const hasPopulateProject = populate.includes('project') + if (env.isProd) { + // Published deployments are immutable, so cache them for longer in production + setPublicCacheControl(c.res, deployment.published ? '1h' : '5m') + } else { + setPublicCacheControl(c.res, '10s') + } + return c.json( parseZodSchema(schema.deploymentAdminSelectSchema, { ...deployment, diff --git a/apps/api/src/api-v1/deployments/get-public-deployment-by-identifier.ts b/apps/api/src/api-v1/deployments/get-public-deployment-by-identifier.ts index 0972318f..f757fb30 100644 --- a/apps/api/src/api-v1/deployments/get-public-deployment-by-identifier.ts +++ b/apps/api/src/api-v1/deployments/get-public-deployment-by-identifier.ts @@ -4,6 +4,7 @@ import { createRoute, type OpenAPIHono } from '@hono/zod-openapi' import { schema } from '@/db' import { aclPublicProject } from '@/lib/acl-public-project' +import { setPublicCacheControl } from '@/lib/cache-control' import { tryGetDeploymentByIdentifier } from '@/lib/deployments/try-get-deployment-by-identifier' import { openapiAuthenticatedSecuritySchemas, @@ -59,6 +60,13 @@ export function registerV1GetPublicDeploymentByIdentifier( ) aclPublicProject(deployment.project!) + if (deployment.published) { + // Note that published deployments should be immutable + setPublicCacheControl(c.res, '1m') + } else { + setPublicCacheControl(c.res, '10s') + } + return c.json(parseZodSchema(schema.deploymentSelectSchema, deployment)) }) } diff --git a/apps/api/src/api-v1/projects/get-public-project-by-identifier.ts b/apps/api/src/api-v1/projects/get-public-project-by-identifier.ts index bc5965d5..159e903c 100644 --- a/apps/api/src/api-v1/projects/get-public-project-by-identifier.ts +++ b/apps/api/src/api-v1/projects/get-public-project-by-identifier.ts @@ -4,6 +4,8 @@ import { createRoute, type OpenAPIHono } from '@hono/zod-openapi' import { db, eq, schema } from '@/db' import { aclPublicProject } from '@/lib/acl-public-project' +import { setPublicCacheControl } from '@/lib/cache-control' +import { env } from '@/lib/env' import { openapiAuthenticatedSecuritySchemas, openapiErrorResponse404, @@ -50,7 +52,8 @@ export function registerV1GetPublicProjectByIdentifier( ...Object.fromEntries(populate.map((field) => [field, true])) } }) - await aclPublicProject(project, projectIdentifier) + aclPublicProject(project, projectIdentifier) + setPublicCacheControl(c.res, env.isProd ? '1m' : '10s') return c.json(parseZodSchema(schema.projectSelectSchema, project)) }) diff --git a/apps/api/src/api-v1/projects/get-public-project.ts b/apps/api/src/api-v1/projects/get-public-project.ts index d42f56c8..bc71315f 100644 --- a/apps/api/src/api-v1/projects/get-public-project.ts +++ b/apps/api/src/api-v1/projects/get-public-project.ts @@ -4,6 +4,8 @@ import { createRoute, type OpenAPIHono } from '@hono/zod-openapi' import { db, eq, schema } from '@/db' import { aclPublicProject } from '@/lib/acl-public-project' +import { setPublicCacheControl } from '@/lib/cache-control' +import { env } from '@/lib/env' import { openapiAuthenticatedSecuritySchemas, openapiErrorResponse404, @@ -50,6 +52,7 @@ export function registerV1GetPublicProject(app: OpenAPIHono) { } }) aclPublicProject(project, projectId) + setPublicCacheControl(c.res, env.isProd ? '1m' : '10s') return c.json(parseZodSchema(schema.projectSelectSchema, project)) }) diff --git a/apps/api/src/api-v1/projects/list-public-projects.ts b/apps/api/src/api-v1/projects/list-public-projects.ts index 91c23e18..6e54205d 100644 --- a/apps/api/src/api-v1/projects/list-public-projects.ts +++ b/apps/api/src/api-v1/projects/list-public-projects.ts @@ -1,8 +1,11 @@ +import { env } from 'node:process' + import type { DefaultHonoEnv } from '@agentic/platform-hono' import { parseZodSchema } from '@agentic/platform-core' import { createRoute, type OpenAPIHono, z } from '@hono/zod-openapi' import { and, db, eq, isNotNull, schema } from '@/db' +import { setPublicCacheControl } from '@/lib/cache-control' import { openapiAuthenticatedSecuritySchemas, openapiErrorResponses @@ -60,6 +63,7 @@ export function registerV1ListPublicProjects(app: OpenAPIHono) { offset, limit }) + setPublicCacheControl(c.res, env.isProd ? '1m' : '10s') return c.json(parseZodSchema(z.array(schema.projectSelectSchema), projects)) }) diff --git a/apps/api/src/api-v1/users/get-user.ts b/apps/api/src/api-v1/users/get-user.ts index 1c447868..1bb050f8 100644 --- a/apps/api/src/api-v1/users/get-user.ts +++ b/apps/api/src/api-v1/users/get-user.ts @@ -4,6 +4,8 @@ import { createRoute, type OpenAPIHono } from '@hono/zod-openapi' import type { AuthenticatedHonoEnv } from '@/lib/types' import { db, eq, schema } from '@/db' import { acl } from '@/lib/acl' +import { setPublicCacheControl } from '@/lib/cache-control' +import { env } from '@/lib/env' import { openapiAuthenticatedSecuritySchemas, openapiErrorResponse404, @@ -45,6 +47,7 @@ export function registerV1GetUser(app: OpenAPIHono) { where: eq(schema.users.id, userId) }) assert(user, 404, `User not found "${userId}"`) + setPublicCacheControl(c.res, env.isProd ? '30s' : '10s') return c.json(parseZodSchema(schema.userSelectSchema, user)) }) diff --git a/apps/api/src/lib/cache-control.ts b/apps/api/src/lib/cache-control.ts index d1f5c5fe..f3fdc26d 100644 --- a/apps/api/src/lib/cache-control.ts +++ b/apps/api/src/lib/cache-control.ts @@ -1,12 +1,25 @@ import { assert } from '@agentic/platform-core' -export type PublicCacheControlLevels = '1s' | '10s' | '1m' | '1h' | '1d' +export type PublicCacheControlLevels = + | '1s' + | '10s' + | '30s' + | '1m' + | '5m' + | '10m' + | '30m' + | '1h' + | '1d' const publicCacheControlLevelsMap: Record = { '1s': 'public, max-age=1, s-maxage=1 stale-while-revalidate=0', '10s': 'public, max-age=10, s-maxage=10 stale-while-revalidate=1', + '30s': 'public, max-age=30, s-maxage=30 stale-while-revalidate=5', '1m': 'public, max-age=60, s-maxage=60 stale-while-revalidate=10', - '1h': 'public, max-age=3600, s-maxage=3600, stale-while-revalidate=300', + '5m': 'public, max-age=300, s-maxage=300 stale-while-revalidate=60', + '10m': 'public, max-age=600, s-maxage=600 stale-while-revalidate=120', + '30m': 'public, max-age=1800, s-maxage=1800 stale-while-revalidate=300', + '1h': 'public, max-age=3600, s-maxage=3600, stale-while-revalidate=500', '1d': 'public, max-age=86400, s-maxage=86400, stale-while-revalidate=3600' } diff --git a/apps/gateway/src/lib/record-tool-call-usage.ts b/apps/gateway/src/lib/record-tool-call-usage.ts index f14c645f..74eedbd3 100644 --- a/apps/gateway/src/lib/record-tool-call-usage.ts +++ b/apps/gateway/src/lib/record-tool-call-usage.ts @@ -180,7 +180,7 @@ export function recordToolCallUsage({ // If there's a consumer and it hasn't been activated yet, make sure it's // activated. This may be called multiple times if the consumer is cached, // but this method is intentionally idempotent, and we don't cache non- - // activated consumers for long, so shouldn't be a problem. + // activated consumers for long, so it shouldn't be a problem. waitUntil(client.adminActivateConsumer({ consumerId: consumer.id })) } diff --git a/docs/docs.json b/docs/docs.json index accdbf4e..165f76f1 100644 --- a/docs/docs.json +++ b/docs/docs.json @@ -45,6 +45,18 @@ { "group": "Getting Started", "pages": ["index", "marketplace/index"] + }, + { + "group": "TypeScript AI SDKs", + "pages": [ + "marketplace/ts-sdks/ai-sdk", + "marketplace/ts-sdks/openai-chat", + "marketplace/ts-sdks/openai-responses", + "marketplace/ts-sdks/langchain", + "marketplace/ts-sdks/llamaindex", + "marketplace/ts-sdks/genkit", + "marketplace/ts-sdks/mastra" + ] } ] }, diff --git a/docs/index.mdx b/docs/index.mdx index 3d2f03d7..ae406b71 100644 --- a/docs/index.mdx +++ b/docs/index.mdx @@ -7,7 +7,7 @@ description: Agentic is the App Store for LLM Tools. - For developers interested in consuming Agentic tools. + For developers interested in using Agentic tools in their apps. For developers interested in publishing their own MCP server or OpenAPI diff --git a/docs/marketplace/index.mdx b/docs/marketplace/index.mdx index 19501f05..ff524f18 100644 --- a/docs/marketplace/index.mdx +++ b/docs/marketplace/index.mdx @@ -1,8 +1,78 @@ --- title: Quick Start -description: A quick start on using tools from Agentic's MCP marketplace. +description: A quick start on how to use tools from Agentic's marketplace. --- -Docs for Agentic's MCP marketplace are a WIP. +## TypeScript AI SDKs -Visit the [publishing quick start](/publishing/quickstart) to get started in the meantime. + + + How to use Agentic tools with the Vercel AI SDK. + + + + How to use Agentic tools with LangChain's TS SDK. + + + + How to use Agentic tools with the OpenAI Chat Completions API. + + + + How to use Agentic tools with the OpenAI Responses API. + + + + How to use Agentic tools with Llamaindex's TS SDK. + + + + How to use Agentic tools with the Firebase Genkit SDK. + + + + How to use Agentic tools with the Mastra SDK. + + + +## MCP Clients + +_MCP client docs are coming soon..._ + +## Python AI SDKs + +_Python docs are coming soon..._ + +## Want to publish your own MCP tools? + +Agentic makes it extremely easy to publish and monetize your own MCP tools. Regardless of whether you're starting from scratch or already have an existing API, you'll be up and running with a production-ready MCP product in minutes. + +Visit the [publishing docs](/publishing) to get started. diff --git a/docs/marketplace/ts-sdks/ai-sdk.mdx b/docs/marketplace/ts-sdks/ai-sdk.mdx new file mode 100644 index 00000000..f4363964 --- /dev/null +++ b/docs/marketplace/ts-sdks/ai-sdk.mdx @@ -0,0 +1,111 @@ +--- +title: Vercel AI SDK +description: How to use Agentic tools with the Vercel AI SDK. +--- + +## Install + + +```bash npm +npm install ai @agentic/ai-sdk @agentic/platform-tool-client +``` + +```bash pnpm +pnpm add ai @agentic/ai-sdk @agentic/platform-tool-client +``` + +```bash bun +bun add ai @agentic/ai-sdk @agentic/platform-tool-client +``` + +```bash yarn +yarn add ai @agentic/ai-sdk @agentic/platform-tool-client +``` + + + +## Usage + +This example uses the [`@agentic/search`](https://agentic.so/marketplace/projects/@agentic/search) tool. + +```ts +import 'dotenv/config' + +import { createAISDKTools } from '@agentic/ai-sdk' +import { AgenticToolClient } from '@agentic/platform-tool-client' +import { createOpenAI } from '@ai-sdk/openai' +import { generateText } from 'ai' + +async function main() { + const searchTool = await AgenticToolClient.fromIdentifier('@agentic/search') + const openai = createOpenAI({ compatibility: 'strict' }) + + const result = await generateText({ + model: openai('gpt-4o-mini'), + tools: createAISDKTools(searchTool), + toolChoice: 'required', + temperature: 0, + system: 'You are a helpful assistant. Be as concise as possible.', + prompt: 'What is the weather in San Francisco?' + }) + + console.log(JSON.stringify(result.toolResults[0], null, 2)) +} + +await main() +``` + + +This example also uses the [@ai-sdk/openai](https://ai-sdk.dev/providers/ai-sdk-providers/openai) provider, which adds OpenAI support to the Vercel AI SDK. + +_Note that OpenAI is not necessary to use Agentic; this is just an example._ + + +```bash npm +npm install @ai-sdk/openai dotenv +``` + +```bash pnpm +pnpm add @ai-sdk/openai dotenv +``` + +```bash bun +bun add @ai-sdk/openai dotenv +``` + +```bash yarn +yarn add @ai-sdk/openai dotenv +``` + + + + +## Running this example + +You can view the full source for this example here: https://github.com/transitive-bullshit/agentic/tree/main/examples/ts-sdks/ai-sdk + + + You'll need an [OpenAI API key](https://platform.openai.com/docs/quickstart) + to run this example. Store it in a local `.env` file as `OPENAI_API_KEY`. + + + + The + [`@agentic/search`](https://agentic.so/marketplace/projects/@agentic/search) + tool comes with a generous free tier, but once that runs out, you'll need to + sign up for a paid plan and add an `AGENTIC_API_KEY` to your `.env` file. + + +```sh +git clone git@github.com:transitive-bullshit/agentic.git +cd agentic +pnpm install +pnpm build +echo 'OPENAI_API_KEY=your-key' >> .env +npx tsx examples/ts-sdks/ai-sdk/bin/weather.ts +``` + +## Additional resources + +- [`@agentic/ai-sdk` source](https://github.com/transitive-bullshit/agentic/blob/main/stdlib/ai-sdk/src/ai-sdk.ts) +- [Vercel AI SDK docs](https://ai-sdk.dev) diff --git a/docs/marketplace/ts-sdks/genkit.mdx b/docs/marketplace/ts-sdks/genkit.mdx new file mode 100644 index 00000000..cc07bc49 --- /dev/null +++ b/docs/marketplace/ts-sdks/genkit.mdx @@ -0,0 +1,112 @@ +--- +title: Firebase Genkit +description: How to use Agentic tools with the Firebase Genkit SDK. +--- + +## Install + + +```bash npm +npm install genkit @agentic/genkit @agentic/platform-tool-client +``` + +```bash pnpm +pnpm add genkit @agentic/genkit @agentic/platform-tool-client +``` + +```bash bun +bun add genkit @agentic/genkit @agentic/platform-tool-client +``` + +```bash yarn +yarn add genkit @agentic/genkit @agentic/platform-tool-client +``` + + + +## Usage + +This example uses the [`@agentic/search`](https://agentic.so/marketplace/projects/@agentic/search) tool. + +```ts +import 'dotenv/config' + +import { createGenkitTools } from '@agentic/genkit' +import { AgenticToolClient } from '@agentic/platform-tool-client' +import { genkit } from 'genkit' +import { gpt4oMini, openAI } from 'genkitx-openai' + +async function main() { + const searchTool = await AgenticToolClient.fromIdentifier('@agentic/search') + + const ai = genkit({ + plugins: [openAI()] + }) + + const result = await ai.generate({ + model: gpt4oMini, + tools: createGenkitTools(ai, searchTool), + system: 'You are a helpful assistant. Be as concise as possible.', + prompt: 'What is the weather in San Francisco?' + }) + + console.log(result) +} + +await main() +``` + + +This example also uses the [genkitx-openai](https://github.com/TheFireCo/genkit-plugins/tree/main/plugins/openai) package, which adds OpenAI support to Genkit. + +_Note that OpenAI is not necessary to use Agentic; this is just an example._ + + +```bash npm +npm install genkitx-openai dotenv +``` + +```bash pnpm +pnpm add genkitx-openai dotenv +``` + +```bash bun +bun add genkitx-openai dotenv +``` + +```bash yarn +yarn add genkitx-openai dotenv +``` + + + + +## Running this example + +You can view the full source for this example here: https://github.com/transitive-bullshit/agentic/tree/main/examples/ts-sdks/genkit + + + You'll need an [OpenAI API key](https://platform.openai.com/docs/quickstart) + to run this example. Store it in a local `.env` file as `OPENAI_API_KEY`. + + + + The + [`@agentic/search`](https://agentic.so/marketplace/projects/@agentic/search) + tool comes with a generous free tier, but once that runs out, you'll need to + sign up for a paid plan and add an `AGENTIC_API_KEY` to your `.env` file. + + +```sh +git clone git@github.com:transitive-bullshit/agentic.git +cd agentic +pnpm install +pnpm build +echo 'OPENAI_API_KEY=your-key' >> .env +npx tsx examples/ts-sdks/genkit/bin/weather.ts +``` + +## Additional resources + +- [`@agentic/genkit` source](https://github.com/transitive-bullshit/agentic/blob/main/stdlib/genkit/src/genkit.ts) +- [Firebase Genkit docs](https://firebase.google.com/docs/genkit) diff --git a/docs/marketplace/ts-sdks/langchain.mdx b/docs/marketplace/ts-sdks/langchain.mdx new file mode 100644 index 00000000..4d2d0637 --- /dev/null +++ b/docs/marketplace/ts-sdks/langchain.mdx @@ -0,0 +1,124 @@ +--- +title: LangChain +description: How to use Agentic tools with the LangChain TS SDK. +--- + +## Install + + +```bash npm +npm install langchain @langchain/core @langchain/agents @agentic/langchain @agentic/platform-tool-client +``` + +```bash pnpm +pnpm add langchain @langchain/core @langchain/agents @agentic/langchain @agentic/platform-tool-client +``` + +```bash bun +bun add langchain @langchain/core @langchain/agents @agentic/langchain @agentic/platform-tool-client +``` + +```bash yarn +yarn add langchain @langchain/core @langchain/agents @agentic/langchain @agentic/platform-tool-client +``` + + + +## Usage + +This example uses the [`@agentic/search`](https://agentic.so/marketplace/projects/@agentic/search) tool. + +```ts +import 'dotenv/config' + +import { createLangChainTools } from '@agentic/langchain' +import { AgenticToolClient } from '@agentic/platform-tool-client' +import { ChatPromptTemplate } from '@langchain/core/prompts' +import { ChatOpenAI } from '@langchain/openai' +import { AgentExecutor, createToolCallingAgent } from 'langchain/agents' + +async function main() { + const searchTool = await AgenticToolClient.fromIdentifier('@agentic/search') + + const tools = createLangChainTools(searchTool) + const agent = createToolCallingAgent({ + llm: new ChatOpenAI({ model: 'gpt-4o-mini', temperature: 0 }), + tools, + prompt: ChatPromptTemplate.fromMessages([ + ['system', 'You are a helpful assistant. Be as concise as possible.'], + ['placeholder', '{chat_history}'], + ['human', '{input}'], + ['placeholder', '{agent_scratchpad}'] + ]) + }) + + const agentExecutor = new AgentExecutor({ + agent, + tools + // verbose: true + }) + + const result = await agentExecutor.invoke({ + input: 'What is the weather in San Francisco?' + }) + + console.log(result.output) +} + +await main() +``` + + +This example also uses the [@langchain/openai](https://js.langchain.com/docs/integrations/platforms/openai) package, which adds OpenAI support to LangChain. + +_Note that OpenAI is not necessary to use Agentic; this is just an example._ + + +```bash npm +npm install @langchain/openai dotenv +``` + +```bash pnpm +pnpm add @langchain/openai dotenv +``` + +```bash bun +bun add @langchain/openai dotenv +``` + +```bash yarn +yarn add @langchain/openai dotenv +``` + + + + +## Running this example + +You can view the full source for this example here: https://github.com/transitive-bullshit/agentic/tree/main/examples/ts-sdks/langchain + + + You'll need an [OpenAI API key](https://platform.openai.com/docs/quickstart) + to run this example. Store it in a local `.env` file as `OPENAI_API_KEY`. + + + + The + [`@agentic/search`](https://agentic.so/marketplace/projects/@agentic/search) + tool comes with a generous free tier, but once that runs out, you'll need to + sign up for a paid plan and add an `AGENTIC_API_KEY` to your `.env` file. + + +```sh +git clone git@github.com:transitive-bullshit/agentic.git +cd agentic +pnpm install +pnpm build +echo 'OPENAI_API_KEY=your-key' >> .env +npx tsx examples/ts-sdks/langchain/bin/weather.ts +``` + +## Additional resources + +- [`@agentic/langchain` source](https://github.com/transitive-bullshit/agentic/blob/main/stdlib/langchain/src/langchain.ts) +- [LangChain TS docs](https://js.langchain.com) diff --git a/docs/marketplace/ts-sdks/llamaindex.mdx b/docs/marketplace/ts-sdks/llamaindex.mdx new file mode 100644 index 00000000..a85bcb5b --- /dev/null +++ b/docs/marketplace/ts-sdks/llamaindex.mdx @@ -0,0 +1,112 @@ +--- +title: LlamaIndex +description: How to use Agentic tools with the LlamaIndex TS SDK. +--- + +## Install + + +```bash npm +npm install llamaindex @llamaindex/workflow @agentic/llamaindex @agentic/platform-tool-client +``` + +```bash pnpm +pnpm add llamaindex @llamaindex/workflow @agentic/llamaindex @agentic/platform-tool-client +``` + +```bash bun +bun add llamaindex @llamaindex/workflow @agentic/llamaindex @agentic/platform-tool-client +``` + +```bash yarn +yarn add llamaindex @llamaindex/workflow @agentic/llamaindex @agentic/platform-tool-client +``` + + + +## Usage + +This example uses the [`@agentic/search`](https://agentic.so/marketplace/projects/@agentic/search) tool. + +```ts +import 'dotenv/config' + +import { createLlamaIndexTools } from '@agentic/llamaindex' +import { AgenticToolClient } from '@agentic/platform-tool-client' +import { openai } from '@llamaindex/openai' +import { agent } from '@llamaindex/workflow' + +async function main() { + const searchTool = await AgenticToolClient.fromIdentifier('@agentic/search') + + const tools = createLlamaIndexTools(searchTool) + const weatherAgent = agent({ + llm: openai({ model: 'gpt-4o-mini', temperature: 0 }), + systemPrompt: 'You are a helpful assistant. Be as concise as possible.', + tools + }) + + const response = await weatherAgent.run( + 'What is the weather in San Francisco?' + ) + + console.log(response.data.result) +} + +await main() +``` + + +This example also uses the [@llamaindex/openai](https://ts.llamaindex.ai/docs/llamaindex/modules/models/llms/openai) provider, which adds OpenAI support to LlamaIndex. + +_Note that OpenAI is not necessary to use Agentic; this is just an example._ + + +```bash npm +npm install @llamaindex/openai dotenv +``` + +```bash pnpm +pnpm add @llamaindex/openai dotenv +``` + +```bash bun +bun add @llamaindex/openai dotenv +``` + +```bash yarn +yarn add @llamaindex/openai dotenv +``` + + + + +## Running this example + +You can view the full source for this example here: https://github.com/transitive-bullshit/agentic/tree/main/examples/ts-sdks/llamaindex + + + You'll need an [OpenAI API key](https://platform.openai.com/docs/quickstart) + to run this example. Store it in a local `.env` file as `OPENAI_API_KEY`. + + + + The + [`@agentic/search`](https://agentic.so/marketplace/projects/@agentic/search) + tool comes with a generous free tier, but once that runs out, you'll need to + sign up for a paid plan and add an `AGENTIC_API_KEY` to your `.env` file. + + +```sh +git clone git@github.com:transitive-bullshit/agentic.git +cd agentic +pnpm install +pnpm build +echo 'OPENAI_API_KEY=your-key' >> .env +npx tsx examples/ts-sdks/llamaindex/bin/weather.ts +``` + +## Additional resources + +- [`@agentic/llamaindex` source](https://github.com/transitive-bullshit/agentic/blob/main/stdlib/llamaindex/src/llamaindex.ts) +- [LlamaIndex TS docs](https://ts.llamaindex.ai) diff --git a/docs/marketplace/ts-sdks/mastra.mdx b/docs/marketplace/ts-sdks/mastra.mdx new file mode 100644 index 00000000..da44b7e3 --- /dev/null +++ b/docs/marketplace/ts-sdks/mastra.mdx @@ -0,0 +1,112 @@ +--- +title: Mastra +description: How to use Agentic tools with the Mastra AI Agent framework. +--- + +## Install + + +```bash npm +npm install @mastra/core @agentic/mastra @agentic/platform-tool-client +``` + +```bash pnpm +pnpm add @mastra/core @agentic/mastra @agentic/platform-tool-client +``` + +```bash bun +bun add @mastra/core @agentic/mastra @agentic/platform-tool-client +``` + +```bash yarn +yarn add @mastra/core @agentic/mastra @agentic/platform-tool-client +``` + + + +## Usage + +This example uses the [`@agentic/search`](https://agentic.so/marketplace/projects/@agentic/search) tool. + +```ts +import 'dotenv/config' + +import { createMastraTools } from '@agentic/mastra' +import { AgenticToolClient } from '@agentic/platform-tool-client' +import { openai } from '@ai-sdk/openai' +import { Agent } from '@mastra/core/agent' + +async function main() { + const searchTool = await AgenticToolClient.fromIdentifier('@agentic/search') + + const weatherAgent = new Agent({ + name: 'Weather Agent', + instructions: 'You are a helpful assistant. Be as concise as possible.', + model: openai('gpt-4o-mini'), + tools: createMastraTools(searchTool) + }) + + const res = await weatherAgent.generate( + 'What is the weather in San Francisco?' + ) + console.log(res.text) +} + +await main() +``` + + +This example also uses the [@ai-sdk/openai](https://ai-sdk.dev/providers/ai-sdk-providers/openai) provider, which adds OpenAI support to Mastra. + +_Note that OpenAI is not necessary to use Agentic; this is just an example._ + + +```bash npm +npm install @ai-sdk/openai dotenv +``` + +```bash pnpm +pnpm add @ai-sdk/openai dotenv +``` + +```bash bun +bun add @ai-sdk/openai dotenv +``` + +```bash yarn +yarn add @ai-sdk/openai dotenv +``` + + + + +## Running this example + +You can view the full source for this example here: +https://github.com/transitive-bullshit/agentic/tree/main/examples/ts-sdks/mastra + + + You'll need an [OpenAI API key](https://platform.openai.com/docs/quickstart) + to run this example. Store it in a local `.env` file as `OPENAI_API_KEY`. + + + + The + [`@agentic/search`](https://agentic.so/marketplace/projects/@agentic/search) + tool comes with a generous free tier, but once that runs out, you'll need to + sign up for a paid plan and add an `AGENTIC_API_KEY` to your `.env` file. + + +```sh +git clone git@github.com:transitive-bullshit/agentic.git +cd agentic +pnpm install +pnpm build +echo 'OPENAI_API_KEY=your-key' >> .env +npx tsx examples/ts-sdks/mastra/bin/weather.ts +``` + +## Additional resources + +- [`@agentic/mastra` source](https://github.com/transitive-bullshit/agentic/blob/main/stdlib/mastra/src/mastra.ts) +- [Mastra docs](https://mastra.ai) diff --git a/docs/marketplace/ts-sdks/openai-chat.mdx b/docs/marketplace/ts-sdks/openai-chat.mdx new file mode 100644 index 00000000..f066290f --- /dev/null +++ b/docs/marketplace/ts-sdks/openai-chat.mdx @@ -0,0 +1,124 @@ +--- +title: OpenAI Chat Completions +description: How to use Agentic tools with the OpenAI Chat Completions API. +--- + + + There's no need for an adapter with the OpenAI SDK since all agentic tools are + compatible with OpenAI by default. + + +## Install + + +```bash npm +npm install openai @agentic/platform-tool-client +``` + +```bash pnpm +pnpm add openai @agentic/platform-tool-client +``` + +```bash bun +bun add openai @agentic/platform-tool-client +``` + +```bash yarn +yarn add openai @agentic/platform-tool-client +``` + + + +## Usage + +This example uses the [`@agentic/search`](https://agentic.so/marketplace/projects/@agentic/search) tool. + +```ts +import 'dotenv/config' + +import { AgenticToolClient } from '@agentic/platform-tool-client' +import OpenAI from 'openai' + +async function main() { + const searchTool = await AgenticToolClient.fromIdentifier('@agentic/search') + const openai = new OpenAI() + + const messages: OpenAI.ChatCompletionMessageParam[] = [ + { + role: 'system', + content: 'You are a helpful assistant. Be as concise as possible.' + }, + { role: 'user', content: 'What is the weather in San Francisco?' } + ] + + { + // First call to OpenAI to invoke the tool + const res = await openai.chat.completions.create({ + messages, + model: 'gpt-4o-mini', + temperature: 0, + tools: searchTool.functions.toolSpecs, + tool_choice: 'required' + }) + + const message = res.choices[0]!.message! + const toolCall = message.tool_calls![0]!.function! + const toolResult = await searchTool.callTool( + toolCall.name, + toolCall.arguments + ) + + messages.push(message) + messages.push({ + role: 'tool', + tool_call_id: message.tool_calls![0]!.id, + content: JSON.stringify(toolResult) + }) + } + + { + // Second call to OpenAI to generate a text response + const res = await openai.chat.completions.create({ + messages, + model: 'gpt-4o-mini', + temperature: 0, + tools: searchTool.functions.toolSpecs + }) + const message = res.choices?.[0]?.message + console.log(message?.content) + } +} + +await main() +``` + +## Running this example + +You can view the full source for this example here: https://github.com/transitive-bullshit/agentic/tree/main/examples/ts-sdks/openai + + + You'll need an [OpenAI API key](https://platform.openai.com/docs/quickstart) + to run this example. Store it in a local `.env` file as `OPENAI_API_KEY`. + + + + The + [`@agentic/search`](https://agentic.so/marketplace/projects/@agentic/search) + tool comes with a generous free tier, but once that runs out, you'll need to + sign up for a paid plan and add an `AGENTIC_API_KEY` to your `.env` file. + + +```sh +git clone git@github.com:transitive-bullshit/agentic.git +cd agentic +pnpm install +pnpm build +echo 'OPENAI_API_KEY=your-key' >> .env +npx tsx examples/ts-sdks/openai/bin/weather.ts +``` + +## Additional resources + +- [OpenAI Chat Completions API docs](https://platform.openai.com/docs/api-reference/chat/create) +- [OpenAI Responses vs Chat Completions](https://platform.openai.com/docs/guides/responses-vs-chat-completions) +- [Using OpenAI's Responses API with Agentic](/marketplace/ts-sdks/openai-responses) diff --git a/docs/marketplace/ts-sdks/openai-responses.mdx b/docs/marketplace/ts-sdks/openai-responses.mdx new file mode 100644 index 00000000..87e448f3 --- /dev/null +++ b/docs/marketplace/ts-sdks/openai-responses.mdx @@ -0,0 +1,125 @@ +--- +title: OpenAI Responses +description: How to use Agentic tools with the OpenAI Responses API. +--- + + + There's no need for an adapter with the OpenAI SDK since all agentic tools are + compatible with OpenAI by default. + + +## Install + + +```bash npm +npm install openai @agentic/platform-tool-client +``` + +```bash pnpm +pnpm add openai @agentic/platform-tool-client +``` + +```bash bun +bun add openai @agentic/platform-tool-client +``` + +```bash yarn +yarn add openai @agentic/platform-tool-client +``` + + + +## Usage + +This example uses the [`@agentic/search`](https://agentic.so/marketplace/projects/@agentic/search) tool. + +```ts +import 'dotenv/config' + +import { assert } from '@agentic/core' +import { AgenticToolClient } from '@agentic/platform-tool-client' +import OpenAI from 'openai' + +async function main() { + const searchTool = await AgenticToolClient.fromIdentifier('@agentic/search') + const openai = new OpenAI() + + const messages: OpenAI.Responses.ResponseInput = [ + { + role: 'system', + content: 'You are a helpful assistant. Be as concise as possible.' + }, + { role: 'user', content: 'What is the weather in San Francisco?' } + ] + + { + // First call to OpenAI to invoke the tool + const res = await openai.responses.create({ + model: 'gpt-4o-mini', + temperature: 0, + tools: searchTool.functions.responsesToolSpecs, + tool_choice: 'required', + input: messages + }) + + const toolCall = res.output[0] + assert(toolCall?.type === 'function_call') + const toolResult = await searchTool.callTool( + toolCall.name, + toolCall.arguments + ) + + messages.push(toolCall) + messages.push({ + type: 'function_call_output', + call_id: toolCall.call_id, + output: JSON.stringify(toolResult) + }) + } + + { + // Second call to OpenAI to generate a text response + const res = await openai.responses.create({ + model: 'gpt-4o-mini', + temperature: 0, + tools: searchTool.functions.responsesToolSpecs, + input: messages + }) + + console.log(res.output_text) + } +} + +await main() +``` + +## Running this example + +You can view the full source for this example here: https://github.com/transitive-bullshit/agentic/tree/main/examples/ts-sdks/openai + + + You'll need an [OpenAI API key](https://platform.openai.com/docs/quickstart) + to run this example. Store it in a local `.env` file as `OPENAI_API_KEY`. + + + + The + [`@agentic/search`](https://agentic.so/marketplace/projects/@agentic/search) + tool comes with a generous free tier, but once that runs out, you'll need to + sign up for a paid plan and add an `AGENTIC_API_KEY` to your `.env` file. + + +```sh +git clone git@github.com:transitive-bullshit/agentic.git +cd agentic +pnpm install +pnpm build +echo 'OPENAI_API_KEY=your-key' >> .env +npx tsx examples/ts-sdks/openai/bin/weather-responses.ts +``` + +## Additional resources + +- [OpenAI Chat Completions API docs](https://platform.openai.com/docs/api-reference/chat/create) +- [OpenAI Responses vs Chat Completions](https://platform.openai.com/docs/guides/responses-vs-chat-completions) +- [Using OpenAI's Chat Completion API with Agentic](/marketplace/ts-sdks/openai-chat) diff --git a/docs/media/sdks/firebase.svg b/docs/media/sdks/firebase.svg new file mode 100644 index 00000000..ca735d95 --- /dev/null +++ b/docs/media/sdks/firebase.svg @@ -0,0 +1 @@ +Firebase \ No newline at end of file diff --git a/docs/media/sdks/langchain.svg b/docs/media/sdks/langchain.svg new file mode 100644 index 00000000..7fda56f8 --- /dev/null +++ b/docs/media/sdks/langchain.svg @@ -0,0 +1 @@ +LangChain \ No newline at end of file diff --git a/docs/media/sdks/llamaindex.svg b/docs/media/sdks/llamaindex.svg new file mode 100644 index 00000000..a4651c8f --- /dev/null +++ b/docs/media/sdks/llamaindex.svg @@ -0,0 +1,12 @@ + + + + + + + + + + + + diff --git a/docs/media/sdks/mastra.svg b/docs/media/sdks/mastra.svg new file mode 100644 index 00000000..7f5ed00c --- /dev/null +++ b/docs/media/sdks/mastra.svg @@ -0,0 +1,8 @@ + + + + + + + + diff --git a/docs/media/sdks/openai.svg b/docs/media/sdks/openai.svg new file mode 100644 index 00000000..d7a1c343 --- /dev/null +++ b/docs/media/sdks/openai.svg @@ -0,0 +1,3 @@ + + + diff --git a/docs/media/sdks/vercel.svg b/docs/media/sdks/vercel.svg new file mode 100644 index 00000000..821ecfff --- /dev/null +++ b/docs/media/sdks/vercel.svg @@ -0,0 +1 @@ +Vercel \ No newline at end of file diff --git a/docs/publishing/config/index.mdx b/docs/publishing/config/index.mdx index eebb4f1a..48ba6b26 100644 --- a/docs/publishing/config/index.mdx +++ b/docs/publishing/config/index.mdx @@ -5,11 +5,11 @@ description: Configuring your Agentic project. Every Agentic project needs a config file (`agentic.config.ts`, `agentic.config.js`, or `agentic.config.json`) to define the project's metadata, pricing, rate-limits, and any tool-specific behavior overrides. - + Configuring your project can feel a little overwhelming. Feel free to [reach out to us](/contact) if you're considering using Agentic's MCP Gateway, and I'd be happy to help walk you through setting your product up for success. - + ## Config Fields @@ -162,8 +162,8 @@ See [Tool Config](/publishing/config/tool-config) for details. ## Config Help - + Configuring your project can feel a little overwhelming. Feel free to [reach out to us](/contact) if you're considering using Agentic's MCP Gateway, and I'd be happy to help walk you through setting your product up for success. - + diff --git a/docs/publishing/config/tool-config.mdx b/docs/publishing/config/tool-config.mdx index b7088f76..aa915f2d 100644 --- a/docs/publishing/config/tool-config.mdx +++ b/docs/publishing/config/tool-config.mdx @@ -144,9 +144,9 @@ See [Rate Limits](/publishing/config/rate-limits) for details. ## Config Help - + Configuring your project can feel a little overwhelming with the amount of options available. Feel free to [reach out to us](/contact) if you're considering using Agentic's MCP Gateway, and I'd be happy to help walk you through setting your product up for success. - + diff --git a/docs/publishing/guides/existing-mcp-server.mdx b/docs/publishing/guides/existing-mcp-server.mdx index f7c25c33..a747cfab 100644 --- a/docs/publishing/guides/existing-mcp-server.mdx +++ b/docs/publishing/guides/existing-mcp-server.mdx @@ -60,6 +60,8 @@ agentic signup -e -p -u URL and that the URL supports the Streamable HTTP transport. +Your agentic config either be an `agentic.config.ts` file or an `agentic.config.json` file. The advantage of using a `ts` file is that you get full autocomplete and type safety. + @@ -136,8 +138,8 @@ Every time you make a change to your project, you can run `agentic deploy` which - The returned deployment will not have any information about the origin server, - because your origin server is considered hidden once deployed to Agentic's MCP + The returned deployment will not have any information about your origin + server, because the origin is considered hidden once deployed to Agentic's MCP gateway. @@ -323,7 +325,7 @@ curl -X POST -H "Content-Type: application/json" -d '{ "query": "example google ```bash -http -j https://gateway.agentic.com/mcp/search/search query='example google search' +http https://gateway.agentic.com/mcp/search/search query='example google search' ``` @@ -346,7 +348,7 @@ Now, your project will be available at `https://agentic.so/marketplace/projects/ You can share your product's public URL with customers, and they'll be able to subscribe to your product via Stripe. You can visit your [dashboard](https://agentic.so/app) to track customer usage and revenue. -Congrats, you now have a live MCP product! 🎉 +Congrats, you now have a live MCP product! 🎉 ## 7. (Optional) Submit your product to the public Agentic Marketplace diff --git a/docs/publishing/guides/existing-openapi-service.mdx b/docs/publishing/guides/existing-openapi-service.mdx index 55453b3c..6278c240 100644 --- a/docs/publishing/guides/existing-openapi-service.mdx +++ b/docs/publishing/guides/existing-openapi-service.mdx @@ -60,6 +60,8 @@ agentic signup -e -p -u `https` URL, and that your OpenAPI spec is a valid 3.0 or 3.1 spec. +Your agentic config either be an `agentic.config.ts` file or an `agentic.config.json` file. The advantage of using a `ts` file is that you get full autocomplete and type safety. + @@ -137,11 +139,17 @@ Every time you make a change to your project, you can run `agentic deploy` which prioritize this feature. + + The returned deployment will not have any information about your origin + server, because the origin is considered hidden once deployed to Agentic's MCP + gateway. + + - The returned deployment will not have any information about the origin server, - because your origin server is considered hidden once deployed to Agentic's MCP + The returned deployment will not have any information about your origin + server, because the origin is considered hidden once deployed to Agentic's MCP gateway. @@ -325,7 +333,7 @@ curl -X POST -H "Content-Type: application/json" -d '{ "query": "example google ```bash -http -j https://gateway.agentic.com/mcp/search/search query='example google search' +http https://gateway.agentic.com/mcp/search/search query='example google search' ``` @@ -348,7 +356,7 @@ Now, your project will be available at `https://agentic.so/marketplace/projects/ You can share your product's public URL with customers, and they'll be able to subscribe to your product via Stripe. You can visit your [dashboard](https://agentic.so/app) to track customer usage and revenue. -Congrats, you now have a live MCP product! 🎉 +Congrats, you now have a live MCP product! 🎉 ## 7. (Optional) Submit your product to the public Agentic Marketplace diff --git a/docs/publishing/guides/ts-mcp-hono.mdx b/docs/publishing/guides/ts-mcp-hono.mdx index 84ef49a8..475310f0 100644 --- a/docs/publishing/guides/ts-mcp-hono.mdx +++ b/docs/publishing/guides/ts-mcp-hono.mdx @@ -80,11 +80,11 @@ app.all('/mcp', async (c) => { serve({ fetch: app.fetch, port: 8787 }) ``` - + Hono is really flexible, so if you'd rather deploy your server to Cloudflare Workers instead of using Node.js (or any other platform), just follow [Hono's docs](https://hono.dev/docs/getting-started/basic). - + ## 3. Deploy your MCP server remotely diff --git a/docs/publishing/guides/ts-openapi-hono.mdx b/docs/publishing/guides/ts-openapi-hono.mdx index cc655d91..9fdfb287 100644 --- a/docs/publishing/guides/ts-openapi-hono.mdx +++ b/docs/publishing/guides/ts-openapi-hono.mdx @@ -87,11 +87,11 @@ serve({ fetch: app.fetch, port: 8787 }) Note that the auto-generated OpenAPI spec will be available at `/docs` in this example. - + Hono is really flexible, so if you'd rather deploy your server to Cloudflare Workers instead of using Node.js (or any other platform), just follow [Hono's docs](https://hono.dev/docs/getting-started/basic). - + ## 3. Deploy your OpenAPI server remotely diff --git a/docs/publishing/origin/index.mdx b/docs/publishing/origin/index.mdx index 393bd7db..36b3f694 100644 --- a/docs/publishing/origin/index.mdx +++ b/docs/publishing/origin/index.mdx @@ -9,13 +9,13 @@ description: Configuring your origin MCP server or OpenAPI service with Agentic' interested in hosting your origin server with Agentic's infrastructure, please [reach out to us](/contact) and we'll be happy to help you get set up. - + Remote origin servers are important because they allow for maximum flexibility with how you author and host your MCP server or OpenAPI service. By cleanly separating between Agentic's MCP gateway and your remote origin server, Agentic supports origin servers written in any language or framework and deployed to any cloud. - + ### Remote Origin MCP Server diff --git a/legacy/docs/sdks/ai-sdk.mdx b/legacy/docs/sdks/ai-sdk.mdx deleted file mode 100644 index 3ef98dc8..00000000 --- a/legacy/docs/sdks/ai-sdk.mdx +++ /dev/null @@ -1,92 +0,0 @@ ---- -title: Vercel AI SDK -description: Agentic adapter for the Vercel AI SDK. ---- - -- package: `@agentic/ai-sdk` -- exports: `function createAISDKTools` -- [source](https://github.com/transitive-bullshit/agentic/blob/main/packages/ai-sdk/src/ai-sdk.ts) -- [Vercel AI SDK docs](https://sdk.vercel.ai) - -## Install - - -```bash npm -npm install @agentic/ai-sdk ai -``` - -```bash yarn -yarn add @agentic/ai-sdk ai -``` - -```bash pnpm -pnpm add @agentic/ai-sdk ai -``` - - - -## Usage - -```ts -import 'dotenv/config' - -import { createAISDKTools } from '@agentic/ai-sdk' -import { WeatherClient } from '@agentic/weather' -import { openai } from '@ai-sdk/openai' -import { generateText } from 'ai' - -async function main() { - const weather = new WeatherClient() - - const result = await generateText({ - model: openai('gpt-4o-mini'), - tools: createAISDKTools(weather), - toolChoice: 'required', - temperature: 0, - system: 'You are a helpful assistant. Be as concise as possible.', - prompt: 'What is the weather in San Francisco?' - }) - - console.log(result.toolResults[0]) -} - -await main() -``` - -Note that this example snippet also requires you to install the AI SDK's OpenAI provider, the Agentic weather tool, and `dotenv`. - - -```bash npm -npm install @ai-sdk/openai @agentic/weather dotenv -``` - -```bash yarn -yarn add @ai-sdk/openai @agentic/weather dotenv -``` - -```bash pnpm -pnpm add @ai-sdk/openai @agentic/weather dotenv -``` - - - -## Running this example - - - You'll need a free API key from [weatherapi.com](https://www.weatherapi.com) - to run this example. Store it in a local `.env` file as `WEATHER_API_KEY`. - - - - You'll need an [OpenAI API key](https://platform.openai.com/docs/quickstart) - to run this example. Store it in a local `.env` file as `OPENAI_API_KEY`. - - -```sh -git clone git@github.com:transitive-bullshit/agentic.git -cd agentic -pnpm install -echo 'WEATHER_API_KEY=your-key' >> .env -echo 'OPENAI_API_KEY=your-key' >> .env -npx tsx examples/ai-sdk/bin/weather.ts -``` diff --git a/legacy/docs/sdks/dexter.mdx b/legacy/docs/sdks/dexter.mdx deleted file mode 100644 index a9fbff1e..00000000 --- a/legacy/docs/sdks/dexter.mdx +++ /dev/null @@ -1,75 +0,0 @@ ---- -title: Dexter -description: Agentic adapter for the Dexa Dexter SDK. ---- - -- package: `@agentic/dexter` -- exports: `function createDexterFunctions` -- [source](https://github.com/transitive-bullshit/agentic/blob/main/packages/dexter/src/dexter.ts) -- [Dexa Dexter SDK docs](https://dexter.dexa.ai) - -## Install - - -```bash npm -npm install @agentic/dexter @dexaai/dexter -``` - -```bash yarn -yarn add @agentic/dexter @dexaai/dexter -``` - -```bash pnpm -pnpm add @agentic/dexter @dexaai/dexter -``` - - - -## Usage - -```ts -import 'dotenv/config' - -import { createDexterFunctions } from '@agentic/dexter' -import { WeatherClient } from '@agentic/weather' -import { ChatModel, createAIRunner } from '@dexaai/dexter' - -async function main() { - const weather = new WeatherClient() - - const runner = createAIRunner({ - chatModel: new ChatModel({ - params: { model: 'gpt-4o-mini', temperature: 0 } - // debug: true - }), - functions: createDexterFunctions(weather), - systemMessage: 'You are a helpful assistant. Be as concise as possible.' - }) - - const result = await runner('What is the weather in San Francisco?') - console.log(result) -} - -await main() -``` - -## Running this example - - - You'll need a free API key from [weatherapi.com](https://www.weatherapi.com) - to run this example. Store it in a local `.env` file as `WEATHER_API_KEY`. - - - - You'll need an [OpenAI API key](https://platform.openai.com/docs/quickstart) - to run this example. Store it in a local `.env` file as `OPENAI_API_KEY`. - - -```sh -git clone git@github.com:transitive-bullshit/agentic.git -cd agentic -pnpm install -echo 'WEATHER_API_KEY=your-key' >> .env -echo 'OPENAI_API_KEY=your-key' >> .env -npx tsx examples/dexter/bin/weather.ts -``` diff --git a/legacy/docs/sdks/genkit.mdx b/legacy/docs/sdks/genkit.mdx deleted file mode 100644 index d715d258..00000000 --- a/legacy/docs/sdks/genkit.mdx +++ /dev/null @@ -1,79 +0,0 @@ ---- -title: Genkit -description: Agentic adapter for the Firebase Genkit SDK. ---- - -- package: `@agentic/genkit` -- exports: `function createGenkitTools` -- [source](https://github.com/transitive-bullshit/agentic/blob/main/packages/genkit/src/genkit.ts) -- [Firebase Genkit docs](https://firebase.google.com/docs/genkit) - -## Install - - -```bash npm -npm install @agentic/genkit genkit -``` - -```bash yarn -yarn add @agentic/genkit genkit -``` - -```bash pnpm -pnpm add @agentic/genkit genkit -``` - - - -## Usage - -This example also requires you to install the [genkitx-openai](https://github.com/TheFireCo/genkit-plugins/tree/main/plugins/openai) package, which adds support for OpenAI to Genkit. - -```ts -import 'dotenv/config' - -import { createGenkitTools } from '@agentic/genkit' -import { WeatherClient } from '@agentic/stdlib' -import { genkit } from 'genkit' -import { gpt4oMini, openAI } from 'genkitx-openai' - -async function main() { - const weather = new WeatherClient() - - const ai = genkit({ - plugins: [openAI()] - }) - - const result = await ai.generate({ - model: gpt4oMini, - tools: createGenkitTools(ai, weather), - system: 'You are a helpful assistant. Be as concise as possible.', - prompt: 'What is the weather in San Francisco?' - }) - - console.log(result) -} - -await main() -``` - -## Running this example - - - You'll need a free API key from [weatherapi.com](https://www.weatherapi.com) - to run this example. Store it in a local `.env` file as `WEATHER_API_KEY`. - - - - You'll need an [OpenAI API key](https://platform.openai.com/docs/quickstart) - to run this example. Store it in a local `.env` file as `OPENAI_API_KEY`. - - -```sh -git clone git@github.com:transitive-bullshit/agentic.git -cd agentic -pnpm install -echo 'WEATHER_API_KEY=your-key' >> .env -echo 'OPENAI_API_KEY=your-key' >> .env -npx tsx examples/genkit/bin/weather.ts -``` diff --git a/legacy/docs/sdks/langchain.mdx b/legacy/docs/sdks/langchain.mdx deleted file mode 100644 index 5f1a4c7d..00000000 --- a/legacy/docs/sdks/langchain.mdx +++ /dev/null @@ -1,87 +0,0 @@ ---- -title: LangChain -description: Agentic adapter for the LangChain JS SDK. ---- - -- package: `@agentic/langchain` -- exports: `function createLangChainTools` -- [source](https://github.com/transitive-bullshit/agentic/blob/main/packages/langchain/src/langchain.ts) -- [LangChain JS docs](https://js.langchain.com) - -## Install - - -```bash npm -npm install @agentic/langchain @langchain/core langchain -``` - -```bash yarn -yarn add @agentic/langchain @langchain/core langchain -``` - -```bash pnpm -pnpm add @agentic/langchain @langchain/core langchain -``` - - - -## Usage - -```ts -import { createLangChainTools } from '@agentic/langchain' -import { WeatherClient } from '@agentic/stdlib' -import { ChatPromptTemplate } from '@langchain/core/prompts' -import { ChatOpenAI } from '@langchain/openai' -import { AgentExecutor, createToolCallingAgent } from 'langchain/agents' - -async function main() { - const weather = new WeatherClient() - - const tools = createLangChainTools(weather) - const agent = createToolCallingAgent({ - llm: new ChatOpenAI({ model: 'gpt-4o-mini', temperature: 0 }), - tools, - prompt: ChatPromptTemplate.fromMessages([ - ['system', 'You are a helpful assistant. Be as concise as possible.'], - ['placeholder', '{chat_history}'], - ['human', '{input}'], - ['placeholder', '{agent_scratchpad}'] - ]) - }) - - const agentExecutor = new AgentExecutor({ - agent, - tools - // verbose: true - }) - - const result = await agentExecutor.invoke({ - input: 'What is the weather in San Francisco?' - }) - - console.log(result.output) -} - -await main() -``` - -## Running this example - - - You'll need a free API key from [weatherapi.com](https://www.weatherapi.com) - to run this example. Store it in a local `.env` file as `WEATHER_API_KEY`. - - - - You'll need an [OpenAI API key](https://platform.openai.com/docs/quickstart) - to run this example. Store it in a local `.env` file as `OPENAI_API_KEY`. - - -```sh -git clone git@github.com:transitive-bullshit/agentic.git -cd agentic -pnpm install -echo 'WEATHER_API_KEY=your-key' >> .env -echo 'OPENAI_API_KEY=your-key' >> .env -npx tsx examples/langchain/bin/weather.ts -``` diff --git a/legacy/docs/sdks/llamaindex.mdx b/legacy/docs/sdks/llamaindex.mdx deleted file mode 100644 index f0042afb..00000000 --- a/legacy/docs/sdks/llamaindex.mdx +++ /dev/null @@ -1,78 +0,0 @@ ---- -title: LlamaIndex -description: Agentic adapter for the LlamaIndex TS SDK. ---- - -- package: `@agentic/llamaindex` -- exports: `function createLlamaIndexTools` -- [source](https://github.com/transitive-bullshit/agentic/blob/main/packages/llamaindex/src/llamaindex.ts) -- [LlamaIndex TS docs](https://ts.llamaindex.ai) - -## Install - - -```bash npm -npm install @agentic/llamaindex llamaindex @llamaindex/openai @llamaindex/workflow -``` - -```bash yarn -yarn add @agentic/llamaindex llamaindex @llamaindex/openai @llamaindex/workflow -``` - -```bash pnpm -pnpm add @agentic/llamaindex llamaindex @llamaindex/openai @llamaindex/workflow -``` - - - -## Usage - -```ts -import 'dotenv/config' - -import { createLlamaIndexTools } from '@agentic/llamaindex' -import { WeatherClient } from '@agentic/stdlib' -import { openai } from '@llamaindex/openai' -import { agent } from '@llamaindex/workflow' - -async function main() { - const weather = new WeatherClient() - - const tools = createLlamaIndexTools(weather) - const weatherAgent = agent({ - name: 'Weather Agent', - llm: openai({ model: 'gpt-4o-mini', temperature: 0 }), - systemPrompt: 'You are a helpful assistant. Be as concise as possible.', - tools - }) - - const response = await weatherAgent.run( - 'What is the weather in San Francisco?' - ) - - console.log(response.data.result) -} - -await main() -``` - -## Running this example - - - You'll need a free API key from [weatherapi.com](https://www.weatherapi.com) - to run this example. Store it in a local `.env` file as `WEATHER_API_KEY`. - - - - You'll need an [OpenAI API key](https://platform.openai.com/docs/quickstart) - to run this example. Store it in a local `.env` file as `OPENAI_API_KEY`. - - -```sh -git clone git@github.com:transitive-bullshit/agentic.git -cd agentic -pnpm install -echo 'WEATHER_API_KEY=your-key' >> .env -echo 'OPENAI_API_KEY=your-key' >> .env -npx tsx examples/llamaindex/bin/weather.ts -``` diff --git a/legacy/docs/sdks/mastra.mdx b/legacy/docs/sdks/mastra.mdx deleted file mode 100644 index fe8c4399..00000000 --- a/legacy/docs/sdks/mastra.mdx +++ /dev/null @@ -1,93 +0,0 @@ ---- -title: Mastra -description: Agentic adapter for the Mastra AI Agent framework. ---- - -- package: `@agentic/mastra` -- exports: `function createMastraTools` -- [source](https://github.com/transitive-bullshit/agentic/blob/main/packages/mastra/src/mastra.ts) -- [Mastra docs](https://mastra.ai/docs) - -## Install - - -```bash npm -npm install @agentic/mastra @mastra/core -``` - -```bash yarn -yarn add @agentic/mastra @mastra/core -``` - -```bash pnpm -pnpm add @agentic/mastra @mastra/core -``` - - - -## Usage - -```ts -import 'dotenv/config' - -import { createMastraTools } from '@agentic/mastra' -import { WeatherClient } from '@agentic/weather' -import { openai } from '@ai-sdk/openai' -import { Agent } from '@mastra/core/agent' - -async function main() { - const weather = new WeatherClient() - - const weatherAgent = new Agent({ - name: 'Weather Agent', - instructions: 'You are a helpful assistant. Be as concise as possible.', - model: openai('gpt-4o-mini'), - tools: createMastraTools(weather) - }) - - const res = await weatherAgent.generate( - 'What is the weather in San Francisco?' - ) - console.log(res.text) -} - -await main() -``` - -Note that this example snippet also requires you to install the AI SDK's OpenAI provider, the Agentic weather tool, and `dotenv`. - - -```bash npm -npm install @ai-sdk/openai @agentic/weather dotenv -``` - -```bash yarn -yarn add @ai-sdk/openai @agentic/weather dotenv -``` - -```bash pnpm -pnpm add @ai-sdk/openai @agentic/weather dotenv -``` - - - -## Running this example - - - You'll need a free API key from [weatherapi.com](https://www.weatherapi.com) - to run this example. Store it in a local `.env` file as `WEATHER_API_KEY`. - - - - You'll need an [OpenAI API key](https://platform.openai.com/docs/quickstart) - to run this example. Store it in a local `.env` file as `OPENAI_API_KEY`. - - -```sh -git clone git@github.com:transitive-bullshit/agentic.git -cd agentic -pnpm install -echo 'WEATHER_API_KEY=your-key' >> .env -echo 'OPENAI_API_KEY=your-key' >> .env -npx tsx examples/mastra/bin/weather.ts -``` diff --git a/legacy/docs/sdks/openai.mdx b/legacy/docs/sdks/openai.mdx deleted file mode 100644 index ab3451b8..00000000 --- a/legacy/docs/sdks/openai.mdx +++ /dev/null @@ -1,156 +0,0 @@ ---- -title: OpenAI -description: How to use Agentic with the OpenAI TS SDK directly. ---- - - - There's no need for an adapter with the OpenAI SDK since all agentic tools are - compatible with OpenAI by default. You can use `AIFunctionSet.specs` for - function calling or `AIFunctionSet.toolSpecs` for parallel tool calling. - - -## Install - - -```bash npm -npm install @agentic/stdlib openai -``` - -```bash yarn -yarn add @agentic/stdlib openai -``` - -```bash pnpm -pnpm add @agentic/stdlib openai -``` - - - -## Usage - -```ts -import { WeatherClient } from '@agentic/stdlib' -import OpenAI from 'openai' - -const weather = new WeatherClient() -const openai = new OpenAI() - -const messages: OpenAI.ChatCompletionMessageParam[] = [ - { - role: 'system', - content: 'You are a helpful assistant. Be as concise as possible.' - }, - { role: 'user', content: 'What is the weather in San Francisco?' } -] - -const res = await openai.chat.completions.create({ - messages, - model: 'gpt-4o-mini', - temperature: 0, - tools: weather.functions.toolSpecs, - tool_choice: 'required' -}) -const message = res.choices[0]?.message! -console.log(JSON.stringify(message, null, 2)) -assert(message.tool_calls?.[0]?.function?.name === 'get_current_weather') - -const fn = weather.functions.get('get_current_weather')! - -const toolArgs = message.tool_calls[0].function.arguments -const toolResult = await fn(toolArgs) -console.log(JSON.stringify(toolResult, null, 2)) -``` - -### Running this example - - - You'll need a free API key from [weatherapi.com](https://www.weatherapi.com) - to run this example. Store it in a local `.env` file as `WEATHER_API_KEY`. - - - - You'll need an [OpenAI API key](https://platform.openai.com/docs/quickstart) - to run this example. Store it in a local `.env` file as `OPENAI_API_KEY`. - - -```sh -git clone git@github.com:transitive-bullshit/agentic.git -cd agentic -pnpm install -echo 'WEATHER_API_KEY=your-key' >> .env -echo 'OPENAI_API_KEY=your-key' >> .env -npx tsx examples/openai/bin/weather.ts -``` - -## OpenAI Responses API - -Agentic also supports the [OpenAI Responses API](https://platform.openai.com/docs/api-reference/responses) by referencing `AIFunctionSet.responsesToolSpecs` as in this example: - -```ts -import 'dotenv/config' - -import { WeatherClient } from '@agentic/stdlib' -import OpenAI from 'openai' - -async function main() { - const weather = new WeatherClient() - const openai = new OpenAI() - - const messages: OpenAI.Responses.ResponseInput = [ - { - role: 'system', - content: 'You are a helpful assistant. Be as concise as possible.' - }, - { role: 'user', content: 'What is the weather in San Francisco?' } - ] - - { - // Call to OpenAI to invoke the weather tool - const res = await openai.responses.create({ - model: 'gpt-4o-mini', - temperature: 0, - tools: weather.functions.responsesToolSpecs, - tool_choice: 'required', - input: messages - }) - - const message = res.output[0] - console.log(JSON.stringify(message, null, 2)) - assert(message?.type === 'function_call') - assert(message.name === 'get_current_weather') - - const fn = weather.functions.get('get_current_weather')! - const toolResult = await fn(message.arguments) - - messages.push(message) - messages.push({ - type: 'function_call_output', - call_id: message.call_id, - output: JSON.stringify(toolResult) - }) - } -} - -await main() -``` - -### Running this example - - - You'll need a free API key from [weatherapi.com](https://www.weatherapi.com) - to run this example. Store it in a local `.env` file as `WEATHER_API_KEY`. - - - - You'll need an [OpenAI API key](https://platform.openai.com/docs/quickstart) - to run this example. Store it in a local `.env` file as `OPENAI_API_KEY`. - - -```sh -git clone git@github.com:transitive-bullshit/agentic.git -cd agentic -pnpm install -echo 'WEATHER_API_KEY=your-key' >> .env -echo 'OPENAI_API_KEY=your-key' >> .env -npx tsx examples/openai/bin/weather-responses.ts -``` diff --git a/packages/platform/src/origin-adapters/mcp.ts b/packages/platform/src/origin-adapters/mcp.ts index ddd7c595..4d52b80a 100644 --- a/packages/platform/src/origin-adapters/mcp.ts +++ b/packages/platform/src/origin-adapters/mcp.ts @@ -28,7 +28,14 @@ export async function resolveMCPOriginAdapter({ ) const transport = new StreamableHTTPClientTransport(new URL(origin.url)) const client = new McpClient({ name, version }) - await client.connect(transport) + try { + await client.connect(transport) + } catch (err: any) { + throw new Error( + `Failed to connect to MCP server at ${origin.url} using the Streamable HTTP transport.Make sure your MCP server is running and accessible, and that your URL is using the correct path (/, /mcp, etc): ${err.message}`, + { cause: err } + ) + } const serverInfo = { name, diff --git a/packages/tool-client/package.json b/packages/tool-client/package.json index d3671f47..a9182263 100644 --- a/packages/tool-client/package.json +++ b/packages/tool-client/package.json @@ -31,7 +31,8 @@ "@agentic/platform-core": "workspace:*", "@agentic/platform-types": "workspace:*", "@agentic/platform-validators": "workspace:*", - "ky": "catalog:" + "ky": "catalog:", + "zod": "catalog:" }, "publishConfig": { "access": "public", diff --git a/packages/tool-client/src/agentic-tool-client.ts b/packages/tool-client/src/agentic-tool-client.ts index 5d5fb449..3536a16e 100644 --- a/packages/tool-client/src/agentic-tool-client.ts +++ b/packages/tool-client/src/agentic-tool-client.ts @@ -3,7 +3,8 @@ import { AIFunctionSet, AIFunctionsProvider, createAIFunction, - createJsonSchema + createJsonSchema, + getEnv } from '@agentic/core' import { AgenticApiClient } from '@agentic/platform-api-client' import { assert } from '@agentic/platform-core' @@ -11,13 +12,36 @@ import { parseDeploymentIdentifier } from '@agentic/platform-validators' import defaultKy, { type KyInstance } from 'ky' export type AgenticToolClientOptions = { + /** + * Optional API key for your subscription to the Agentic project. + * + * If not set, will default to the `AGENTIC_API_KEY` environment variable. + * + * If no `apiKey` is set, the client will make unauthenticated tool calls, + * which may or may not be supported by the target Agentic project. + */ + apiKey?: string + + /** + * Optional custom Agentic API client. + */ agenticApiClient?: AgenticApiClient + + /** + * Optional custom Agentic Gateway base URL. + * + * @default `https://gateway.agentic.so` + */ agenticGatewayBaseUrl?: string + + /** + * Optional custom Ky instance. + * + * Useful for overriding the default headers, retry logic, etc. + */ ky?: KyInstance } -// TODO: add support for optional apiKey - /** * Agentic tool client which makes it easy to use an Agentic tool products with * all of the major TypeScript LLM SDKs, without having to go through any MCP @@ -32,18 +56,21 @@ export type AgenticToolClientOptions = { * ``` */ export class AgenticToolClient extends AIFunctionsProvider { + readonly apiKey: string | undefined readonly project: Project readonly deployment: Deployment readonly agenticGatewayBaseUrl: string readonly ky: KyInstance protected constructor({ + apiKey, project, deployment, deploymentIdentifier, agenticGatewayBaseUrl, ky }: { + apiKey: string | undefined project: Project deployment: Deployment deploymentIdentifier: string @@ -52,10 +79,13 @@ export class AgenticToolClient extends AIFunctionsProvider { }) { super() + this.apiKey = apiKey this.project = project this.deployment = deployment this.agenticGatewayBaseUrl = agenticGatewayBaseUrl - this.ky = ky + this.ky = apiKey + ? ky.extend({ headers: { Authorization: `Bearer ${apiKey}` } }) + : ky this._functions = new AIFunctionSet( deployment.tools.map((tool) => { @@ -106,6 +136,7 @@ export class AgenticToolClient extends AIFunctionsProvider { static async fromIdentifier( projectOrDeploymentIdentifier: string, { + apiKey = getEnv('AGENTIC_API_KEY'), agenticApiClient = new AgenticApiClient(), agenticGatewayBaseUrl = 'https://gateway.agentic.so', ky = defaultKy @@ -141,6 +172,7 @@ export class AgenticToolClient extends AIFunctionsProvider { assert(deployment, `Deployment "${deploymentIdentifier}" not found`) return new AgenticToolClient({ + apiKey, project, deployment, deploymentIdentifier, diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index b2f3dec4..20bc5ad8 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -1396,6 +1396,9 @@ importers: ky: specifier: 'catalog:' version: 1.8.1 + zod: + specifier: 'catalog:' + version: 3.25.67 packages/types: dependencies: diff --git a/todo.md b/todo.md index 0fe02574..f76257f3 100644 --- a/todo.md +++ b/todo.md @@ -11,6 +11,9 @@ - should we bypass stripe for `free` plans to increase conversions? - handle browser back/forward with `?next=` - add some social proof to signup page + - example usage + - fix mcp examples + - add example usage to project detail pages - docs - main readme - sub readmes (https://www.npmjs.com/package/@agentic/cli) @@ -23,7 +26,6 @@ - finesse header (mobile) - create agentic products for legacy tools - add basic legal terms and privacy policy (and update links in stripe) -- add caching to public projects api endpoints - add support for [`@google/genai`](https://github.com/googleapis/js-genai) tools adapter - add feature about optimized context to marketing site - ensure all agentic tool inputSchemas support openai strict mode by default @@ -31,6 +33,9 @@ - mcp tool inputSchemas may not support openai strict mode either - maybe default `strict` to `false` in `createAIFunction` for now? - also add `@agentic/json-schema` to `createJsonSchema` parsing instead of current no-op +- add support to `@agentic/platform-tool-client` for +- double check example usage for all TS sdks +- add docs on using multiple tools with `AIFunctionSet` ## TODO: Post-MVP