kopia lustrzana https://github.com/transitive-bullshit/chatgpt-api
feat: update deps; remove unused core exports
rodzic
e71cb1bdb0
commit
3419b05d0a
|
@ -23,7 +23,7 @@ jobs:
|
|||
- name: Install pnpm
|
||||
uses: pnpm/action-setup@v4
|
||||
with:
|
||||
version: 9.12.2
|
||||
version: 10.4.1
|
||||
run_install: false
|
||||
|
||||
- name: Install Node.js
|
||||
|
|
|
@ -12,15 +12,15 @@ description: Agentic adapter for the Firebase Genkit SDK.
|
|||
|
||||
<CodeGroup>
|
||||
```bash npm
|
||||
npm install @agentic/genkit @genkit-ai/ai @genkit-ai/core
|
||||
npm install @agentic/genkit genkit
|
||||
```
|
||||
|
||||
```bash yarn
|
||||
yarn add @agentic/genkit @genkit-ai/ai @genkit-ai/core
|
||||
yarn add @agentic/genkit genkit
|
||||
```
|
||||
|
||||
```bash pnpm
|
||||
pnpm add @agentic/genkit @genkit-ai/ai @genkit-ai/core
|
||||
pnpm add @agentic/genkit genkit
|
||||
```
|
||||
|
||||
</CodeGroup>
|
||||
|
@ -34,30 +34,20 @@ import 'dotenv/config'
|
|||
|
||||
import { createGenkitTools } from '@agentic/genkit'
|
||||
import { WeatherClient } from '@agentic/stdlib'
|
||||
import { generate } from '@genkit-ai/ai'
|
||||
import { configureGenkit } from '@genkit-ai/core'
|
||||
import { genkit } from 'genkit'
|
||||
import { gpt4oMini, openAI } from 'genkitx-openai'
|
||||
|
||||
async function main() {
|
||||
const weather = new WeatherClient()
|
||||
|
||||
configureGenkit({
|
||||
const ai = genkit({
|
||||
plugins: [openAI()]
|
||||
})
|
||||
|
||||
const result = await generate({
|
||||
const result = await ai.generate({
|
||||
model: gpt4oMini,
|
||||
tools: createGenkitTools(weather),
|
||||
history: [
|
||||
{
|
||||
role: 'system',
|
||||
content: [
|
||||
{
|
||||
text: 'You are a helpful assistant. Be as concise as possible.'
|
||||
}
|
||||
]
|
||||
}
|
||||
],
|
||||
tools: createGenkitTools(ai, weather),
|
||||
system: 'You are a helpful assistant. Be as concise as possible.',
|
||||
prompt: 'What is the weather in San Francisco?'
|
||||
})
|
||||
|
||||
|
|
|
@ -1,36 +0,0 @@
|
|||
import 'dotenv/config'
|
||||
|
||||
import { createAIChain, Msg } from '@agentic/core'
|
||||
import { PerigonClient } from '@agentic/perigon'
|
||||
import { SerperClient } from '@agentic/serper'
|
||||
import { ChatModel } from '@dexaai/dexter'
|
||||
|
||||
async function main() {
|
||||
const perigon = new PerigonClient()
|
||||
const serper = new SerperClient()
|
||||
|
||||
const chatModel = new ChatModel({
|
||||
params: { model: 'gpt-4o-mini', temperature: 0 },
|
||||
debug: true
|
||||
})
|
||||
|
||||
const chain = createAIChain({
|
||||
name: 'search_news',
|
||||
chatFn: chatModel.run.bind(chatModel),
|
||||
tools: [perigon.functions.pick('search_news_stories'), serper],
|
||||
params: {
|
||||
messages: [
|
||||
Msg.system(
|
||||
'You are a helpful assistant. Be as concise as possible. Respond in markdown. Always cite your sources.'
|
||||
)
|
||||
]
|
||||
}
|
||||
})
|
||||
|
||||
const result = await chain(
|
||||
'Summarize the latest news stories about the upcoming US election.'
|
||||
)
|
||||
console.log(result)
|
||||
}
|
||||
|
||||
await main()
|
|
@ -1,35 +0,0 @@
|
|||
import 'dotenv/config'
|
||||
|
||||
import { extractObject, Msg } from '@agentic/core'
|
||||
import { ChatModel } from '@dexaai/dexter'
|
||||
import { z } from 'zod'
|
||||
|
||||
async function main() {
|
||||
const chatModel = new ChatModel({
|
||||
params: { model: 'gpt-4o-mini', temperature: 0 },
|
||||
debug: true
|
||||
})
|
||||
|
||||
const result = await extractObject({
|
||||
name: 'extract-user',
|
||||
chatFn: chatModel.run.bind(chatModel),
|
||||
params: {
|
||||
messages: [
|
||||
Msg.system('Extract a JSON user object from the given text.'),
|
||||
Msg.user(
|
||||
'Bob Vance is 42 years old and lives in Brooklyn, NY. He is a software engineer.'
|
||||
)
|
||||
]
|
||||
},
|
||||
schema: z.object({
|
||||
name: z.string(),
|
||||
age: z.number(),
|
||||
location: z.string().optional()
|
||||
}),
|
||||
strict: true
|
||||
})
|
||||
|
||||
console.log(result)
|
||||
}
|
||||
|
||||
await main()
|
|
@ -2,30 +2,20 @@ import 'dotenv/config'
|
|||
|
||||
import { createGenkitTools } from '@agentic/genkit'
|
||||
import { WeatherClient } from '@agentic/stdlib'
|
||||
import { generate } from '@genkit-ai/ai'
|
||||
import { configureGenkit } from '@genkit-ai/core'
|
||||
import { gpt4o, openAI } from 'genkitx-openai'
|
||||
import { genkit } from 'genkit'
|
||||
import { gpt4oMini, openAI } from 'genkitx-openai'
|
||||
|
||||
async function main() {
|
||||
const weather = new WeatherClient()
|
||||
|
||||
configureGenkit({
|
||||
const ai = genkit({
|
||||
plugins: [openAI()]
|
||||
})
|
||||
|
||||
const result = await generate({
|
||||
model: gpt4o,
|
||||
tools: createGenkitTools(weather),
|
||||
history: [
|
||||
{
|
||||
role: 'system',
|
||||
content: [
|
||||
{
|
||||
text: 'You are a helpful assistant. Be as concise as possible.'
|
||||
}
|
||||
]
|
||||
}
|
||||
],
|
||||
const result = await ai.generate({
|
||||
model: gpt4oMini,
|
||||
tools: createGenkitTools(ai, weather),
|
||||
system: 'You are a helpful assistant. Be as concise as possible.',
|
||||
prompt: 'What is the weather in San Francisco?'
|
||||
})
|
||||
|
||||
|
|
|
@ -10,9 +10,8 @@
|
|||
"dependencies": {
|
||||
"@agentic/genkit": "workspace:*",
|
||||
"@agentic/stdlib": "workspace:*",
|
||||
"@genkit-ai/ai": "^0.5.16",
|
||||
"@genkit-ai/core": "^0.5.16",
|
||||
"genkitx-openai": "^0.10.0",
|
||||
"genkit": "^1.0.4",
|
||||
"genkitx-openai": "^0.16.0",
|
||||
"zod": "^3.23.8"
|
||||
},
|
||||
"devDependencies": {
|
||||
|
|
26
package.json
26
package.json
|
@ -7,7 +7,7 @@
|
|||
"type": "git",
|
||||
"url": "git+https://github.com/transitive-bullshit/agentic.git"
|
||||
},
|
||||
"packageManager": "pnpm@9.12.2",
|
||||
"packageManager": "pnpm@10.4.1",
|
||||
"engines": {
|
||||
"node": ">=18"
|
||||
},
|
||||
|
@ -33,24 +33,24 @@
|
|||
"prepare": "husky"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@changesets/cli": "^2.27.10",
|
||||
"@changesets/cli": "^2.28.0",
|
||||
"@fisch0920/eslint-config": "^1.4.0",
|
||||
"@total-typescript/ts-reset": "^0.6.1",
|
||||
"@types/node": "^22.9.1",
|
||||
"@types/node": "^22.13.4",
|
||||
"del-cli": "^6.0.0",
|
||||
"dotenv": "^16.4.5",
|
||||
"dotenv": "^16.4.7",
|
||||
"eslint": "^8.57.1",
|
||||
"husky": "^9.1.7",
|
||||
"lint-staged": "^15.2.10",
|
||||
"npm-run-all2": "^7.0.0",
|
||||
"lint-staged": "^15.4.3",
|
||||
"npm-run-all2": "^7.0.2",
|
||||
"only-allow": "^1.2.1",
|
||||
"prettier": "^3.3.3",
|
||||
"tsup": "^8.3.0",
|
||||
"tsx": "^4.19.1",
|
||||
"turbo": "^2.3.0",
|
||||
"typescript": "^5.6.3",
|
||||
"vitest": "2.1.5",
|
||||
"zod": "^3.23.8"
|
||||
"prettier": "^3.5.1",
|
||||
"tsup": "^8.3.6",
|
||||
"tsx": "^4.19.3",
|
||||
"turbo": "^2.4.2",
|
||||
"typescript": "^5.7.3",
|
||||
"vitest": "3.0.6",
|
||||
"zod": "^3.24.2"
|
||||
},
|
||||
"lint-staged": {
|
||||
"*.{ts,tsx}": [
|
||||
|
|
|
@ -32,7 +32,7 @@
|
|||
},
|
||||
"dependencies": {
|
||||
"@agentic/core": "workspace:*",
|
||||
"ky": "^1.7.2"
|
||||
"ky": "^1.7.5"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"zod": "^3.23.8"
|
||||
|
|
|
@ -32,8 +32,8 @@
|
|||
},
|
||||
"dependencies": {
|
||||
"@agentic/core": "workspace:*",
|
||||
"ky": "^1.7.2",
|
||||
"p-throttle": "^6.2.0"
|
||||
"ky": "^1.7.5",
|
||||
"p-throttle": "^7.0.0"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"zod": "^3.23.8"
|
||||
|
|
|
@ -35,11 +35,11 @@
|
|||
"dedent": "^1.5.3",
|
||||
"delay": "^6.0.0",
|
||||
"jsonrepair": "^3.9.0",
|
||||
"ky": "^1.7.2",
|
||||
"ky": "^1.7.5",
|
||||
"openai-zod-to-json-schema": "^1.0.3",
|
||||
"p-map": "^7.0.2",
|
||||
"p-throttle": "^6.2.0",
|
||||
"type-fest": "^4.26.1",
|
||||
"p-throttle": "^7.0.0",
|
||||
"type-fest": "^4.35.0",
|
||||
"zod-validation-error": "^3.4.0"
|
||||
},
|
||||
"peerDependencies": {
|
||||
|
|
|
@ -1,234 +0,0 @@
|
|||
import type { SetOptional } from 'type-fest'
|
||||
import type { z } from 'zod'
|
||||
import pMap from 'p-map'
|
||||
|
||||
import type * as types from './types'
|
||||
import { AIFunctionSet } from './ai-function-set'
|
||||
import { AbortError } from './errors'
|
||||
import { Msg } from './message'
|
||||
import { asSchema, augmentSystemMessageWithJsonSchema } from './schema'
|
||||
import { getErrorMessage } from './utils'
|
||||
|
||||
export type AIChainParams<Result extends types.AIChainResult = string> = {
|
||||
/** Name of the chain */
|
||||
name: string
|
||||
|
||||
/** Chat completions function */
|
||||
chatFn: types.ChatFn
|
||||
|
||||
/** Description of the chain */
|
||||
description?: string
|
||||
|
||||
/** Optional chat completion params */
|
||||
params?: types.Simplify<
|
||||
Partial<Omit<types.ChatParams, 'tools' | 'functions'>>
|
||||
>
|
||||
|
||||
/** Optional tools */
|
||||
tools?: types.AIFunctionLike[]
|
||||
|
||||
/** Optional response schema */
|
||||
schema?: z.ZodType<Result> | types.Schema<Result>
|
||||
|
||||
/**
|
||||
* Whether or not the response schema should use OpenAI's structured output
|
||||
* generation.
|
||||
*/
|
||||
strict?: boolean
|
||||
|
||||
/** Max number of LLM calls to allow */
|
||||
maxCalls?: number
|
||||
|
||||
/** Max number of retries to allow */
|
||||
maxRetries?: number
|
||||
|
||||
/** Max concurrency when invoking tool calls */
|
||||
toolCallConcurrency?: number
|
||||
|
||||
/** Whether or not to inject the schema into the context */
|
||||
injectSchemaIntoSystemMessage?: boolean
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a chain of chat completion calls that can be invoked as a single
|
||||
* function. It is meant to simplify the process of resolving tool calls
|
||||
* and optionally adding validation to the final result.
|
||||
*
|
||||
* The returned function will invoke the `chatFn` up to `maxCalls` times,
|
||||
* resolving any tool calls to the included `functions` and retrying if
|
||||
* necessary up to `maxRetries`.
|
||||
*
|
||||
* The chain ends when a non-tool call is returned, and the final result can
|
||||
* optionally be validated against a Zod schema, which defaults to a `string`.
|
||||
*
|
||||
* To prevent possible infinite loops, the chain will throw an error if it
|
||||
* exceeds `maxCalls` (`maxCalls` is expected to be >= `maxRetries`).
|
||||
*/
|
||||
export function createAIChain<Result extends types.AIChainResult = string>({
|
||||
name,
|
||||
description,
|
||||
chatFn,
|
||||
params,
|
||||
schema: rawSchema,
|
||||
tools,
|
||||
maxCalls = 5,
|
||||
maxRetries = 2,
|
||||
toolCallConcurrency = 8,
|
||||
injectSchemaIntoSystemMessage = false,
|
||||
strict = false
|
||||
}: AIChainParams<Result>): types.AIChain<Result> {
|
||||
const functionSet = new AIFunctionSet(tools)
|
||||
const schema = rawSchema ? asSchema(rawSchema, { strict }) : undefined
|
||||
|
||||
// TODO: support custom stopping criteria (like setting a flag in a tool call)
|
||||
|
||||
const defaultParams: Partial<types.ChatParams> | undefined =
|
||||
schema && !functionSet.size
|
||||
? {
|
||||
response_format: strict
|
||||
? {
|
||||
type: 'json_schema',
|
||||
json_schema: {
|
||||
name,
|
||||
description,
|
||||
strict,
|
||||
schema: schema.jsonSchema
|
||||
}
|
||||
}
|
||||
: { type: 'json_object' }
|
||||
}
|
||||
: undefined
|
||||
|
||||
return async (chatParams) => {
|
||||
const { messages, ...modelParams }: SetOptional<types.ChatParams, 'model'> =
|
||||
typeof chatParams === 'string'
|
||||
? {
|
||||
...defaultParams,
|
||||
...params,
|
||||
messages: [...(params?.messages ?? []), Msg.user(chatParams)]
|
||||
}
|
||||
: {
|
||||
...defaultParams,
|
||||
...params,
|
||||
...chatParams,
|
||||
messages: [
|
||||
...(params?.messages ?? []),
|
||||
...(chatParams?.messages ?? [])
|
||||
]
|
||||
}
|
||||
|
||||
if (!messages.length) {
|
||||
throw new Error('AIChain error: "messages" is empty')
|
||||
}
|
||||
|
||||
if (schema && injectSchemaIntoSystemMessage) {
|
||||
const lastSystemMessageIndex = messages.findLastIndex(Msg.isSystem)
|
||||
const lastSystemMessageContent =
|
||||
messages[lastSystemMessageIndex]?.content!
|
||||
|
||||
const systemMessage = augmentSystemMessageWithJsonSchema({
|
||||
system: lastSystemMessageContent,
|
||||
schema: schema.jsonSchema
|
||||
})
|
||||
|
||||
if (lastSystemMessageIndex >= 0) {
|
||||
messages[lastSystemMessageIndex] = Msg.system(systemMessage!)
|
||||
} else {
|
||||
messages.unshift(Msg.system(systemMessage))
|
||||
}
|
||||
}
|
||||
|
||||
let numCalls = 0
|
||||
let numErrors = 0
|
||||
|
||||
do {
|
||||
++numCalls
|
||||
|
||||
const response = await chatFn({
|
||||
...modelParams,
|
||||
messages,
|
||||
tools: functionSet.size ? functionSet.toolSpecs : undefined
|
||||
})
|
||||
|
||||
const { message } = response
|
||||
messages.push(message)
|
||||
|
||||
try {
|
||||
if (Msg.isToolCall(message)) {
|
||||
if (!functionSet.size) {
|
||||
throw new AbortError('No functions provided to handle tool call')
|
||||
}
|
||||
|
||||
// Synchronously validate that all tool calls reference valid functions
|
||||
for (const toolCall of message.tool_calls) {
|
||||
const func = functionSet.get(toolCall.function.name)
|
||||
|
||||
if (!func) {
|
||||
throw new Error(
|
||||
`No function found with name ${toolCall.function.name}`
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
await pMap(
|
||||
message.tool_calls,
|
||||
async (toolCall) => {
|
||||
const func = functionSet.get(toolCall.function.name)!
|
||||
|
||||
// TODO: ideally we'd differentiate between tool argument validation
|
||||
// errors versus errors thrown from the tool implementation. Errors
|
||||
// from the underlying tool could be things like network errors, which
|
||||
// should be retried locally without re-calling the LLM.
|
||||
const result = await func(toolCall.function.arguments)
|
||||
|
||||
const toolResult = Msg.toolResult(result, toolCall.id)
|
||||
messages.push(toolResult)
|
||||
},
|
||||
{
|
||||
concurrency: toolCallConcurrency
|
||||
}
|
||||
)
|
||||
} else if (Msg.isFuncCall(message)) {
|
||||
throw new AbortError(
|
||||
'Function calls are not supported; expected tool call'
|
||||
)
|
||||
} else if (Msg.isRefusal(message)) {
|
||||
throw new AbortError(`Model refusal: ${message.refusal}`)
|
||||
} else if (Msg.isAssistant(message)) {
|
||||
if (schema) {
|
||||
return schema.parse(message.content)
|
||||
} else {
|
||||
return message.content as Result
|
||||
}
|
||||
}
|
||||
} catch (err: any) {
|
||||
numErrors++
|
||||
|
||||
if (err instanceof AbortError) {
|
||||
throw err
|
||||
}
|
||||
|
||||
console.warn(`Chain "${name}" error:`, err.message)
|
||||
|
||||
messages.push(
|
||||
Msg.user(
|
||||
`There was an error validating the response. Please check the error message and try again.\nError:\n${getErrorMessage(err)}`
|
||||
)
|
||||
)
|
||||
|
||||
if (numErrors > maxRetries) {
|
||||
throw new Error(
|
||||
`Chain ${name} failed after ${numErrors} errors: ${err.message}`,
|
||||
{
|
||||
cause: err
|
||||
}
|
||||
)
|
||||
}
|
||||
}
|
||||
} while (numCalls < maxCalls)
|
||||
|
||||
throw new Error(
|
||||
`Chain "${name}" aborted after reaching max ${maxCalls} calls`
|
||||
)
|
||||
}
|
||||
}
|
|
@ -1,19 +0,0 @@
|
|||
import type * as types from './types'
|
||||
import { type AIChainParams, createAIChain } from './create-ai-chain'
|
||||
|
||||
export type ExtractObjectParams<Result extends types.AIChainResult = string> =
|
||||
types.Simplify<
|
||||
types.SetRequired<
|
||||
Omit<AIChainParams<Result>, 'tools' | 'toolCallConcurrency' | 'params'>,
|
||||
'schema'
|
||||
> & {
|
||||
params: types.SetRequired<Partial<types.ChatParams>, 'messages'>
|
||||
}
|
||||
>
|
||||
|
||||
export function extractObject<Result extends types.AIChainResult = string>(
|
||||
args: ExtractObjectParams<Result>
|
||||
): Promise<Result> {
|
||||
const chain = createAIChain(args)
|
||||
return chain() as Promise<Result>
|
||||
}
|
|
@ -1,9 +1,7 @@
|
|||
export * from './ai-function-set'
|
||||
export * from './create-ai-chain'
|
||||
export * from './create-ai-function'
|
||||
export * from './echo'
|
||||
export * from './errors'
|
||||
export * from './extract-object'
|
||||
export * from './fns'
|
||||
export * from './message'
|
||||
export * from './parse-structured-output'
|
||||
|
|
|
@ -89,7 +89,13 @@ export type ChatMessageContentPart =
|
|||
/** Narrowed OpenAI Message types. */
|
||||
export namespace Msg {
|
||||
/** Possible roles for a message. */
|
||||
export type Role = 'system' | 'user' | 'assistant' | 'function' | 'tool'
|
||||
export type Role =
|
||||
| 'system'
|
||||
| 'developer'
|
||||
| 'user'
|
||||
| 'assistant'
|
||||
| 'function'
|
||||
| 'tool'
|
||||
|
||||
export namespace Call {
|
||||
/**
|
||||
|
@ -127,6 +133,13 @@ export namespace Msg {
|
|||
name?: string
|
||||
}
|
||||
|
||||
/** Message with text content for the developer. */
|
||||
export type Developer = {
|
||||
role: 'developer'
|
||||
content: string
|
||||
name?: string
|
||||
}
|
||||
|
||||
/** Message with text content from the user. */
|
||||
export type User = {
|
||||
role: 'user'
|
||||
|
@ -199,6 +212,24 @@ export namespace Msg {
|
|||
}
|
||||
}
|
||||
|
||||
/** Create a developer message. Cleans indentation and newlines by default. */
|
||||
export function developer(
|
||||
content: string,
|
||||
opts?: {
|
||||
/** Custom name for the message. */
|
||||
name?: string
|
||||
/** Whether to clean extra newlines and indentation. Defaults to true. */
|
||||
cleanContent?: boolean
|
||||
}
|
||||
): Msg.Developer {
|
||||
const { name, cleanContent = true } = opts ?? {}
|
||||
return {
|
||||
role: 'developer',
|
||||
content: cleanContent ? cleanStringForModel(content) : content,
|
||||
...(name ? { name } : {})
|
||||
}
|
||||
}
|
||||
|
||||
/** Create a user message. Cleans indentation and newlines by default. */
|
||||
export function user(
|
||||
content: string,
|
||||
|
@ -353,6 +384,10 @@ export namespace Msg {
|
|||
export function isSystem(message: Msg): message is Msg.System {
|
||||
return message.role === 'system'
|
||||
}
|
||||
/** Check if a message is a developer message. */
|
||||
export function isDeveloper(message: Msg): message is Msg.Developer {
|
||||
return message.role === 'developer'
|
||||
}
|
||||
/** Check if a message is a user message. */
|
||||
export function isUser(message: Msg): message is Msg.User {
|
||||
return message.role === 'user'
|
||||
|
@ -384,6 +419,7 @@ export namespace Msg {
|
|||
|
||||
/** Narrow a ChatModel.Message to a specific type. */
|
||||
export function narrow(message: Msg.System): Msg.System
|
||||
export function narrow(message: Msg.Developer): Msg.Developer
|
||||
export function narrow(message: Msg.User): Msg.User
|
||||
export function narrow(message: Msg.Assistant): Msg.Assistant
|
||||
export function narrow(message: Msg.Assistant): Msg.Refusal
|
||||
|
@ -395,6 +431,7 @@ export namespace Msg {
|
|||
message: Msg
|
||||
):
|
||||
| Msg.System
|
||||
| Msg.Developer
|
||||
| Msg.User
|
||||
| Msg.Assistant
|
||||
| Msg.Refusal
|
||||
|
@ -405,6 +442,9 @@ export namespace Msg {
|
|||
if (isSystem(message)) {
|
||||
return message
|
||||
}
|
||||
if (isDeveloper(message)) {
|
||||
return message
|
||||
}
|
||||
if (isUser(message)) {
|
||||
return message
|
||||
}
|
||||
|
|
|
@ -140,6 +140,9 @@ test('pruneEmptyDeep', () => {
|
|||
|
||||
test(
|
||||
'throttleKy should rate-limit requests to ky properly',
|
||||
{
|
||||
timeout: 60_000
|
||||
},
|
||||
async () => {
|
||||
const interval = 1000
|
||||
const throttle = pThrottle({
|
||||
|
@ -166,8 +169,5 @@ test(
|
|||
expect(duration >= interval - interval / 5).toBeTruthy()
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
timeout: 60_000
|
||||
}
|
||||
)
|
||||
|
|
|
@ -32,8 +32,8 @@
|
|||
},
|
||||
"dependencies": {
|
||||
"@agentic/core": "workspace:*",
|
||||
"ky": "^1.7.2",
|
||||
"p-throttle": "^6.2.0"
|
||||
"ky": "^1.7.5",
|
||||
"p-throttle": "^7.0.0"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"zod": "^3.23.8"
|
||||
|
|
|
@ -32,8 +32,8 @@
|
|||
},
|
||||
"dependencies": {
|
||||
"@agentic/core": "workspace:*",
|
||||
"ky": "^1.7.2",
|
||||
"p-throttle": "^6.2.0"
|
||||
"ky": "^1.7.5",
|
||||
"p-throttle": "^7.0.0"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"zod": "^3.23.8"
|
||||
|
|
|
@ -32,7 +32,7 @@
|
|||
},
|
||||
"dependencies": {
|
||||
"@agentic/core": "workspace:*",
|
||||
"ky": "^1.7.2"
|
||||
"ky": "^1.7.5"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"zod": "^3.23.8"
|
||||
|
|
|
@ -32,8 +32,8 @@
|
|||
},
|
||||
"dependencies": {
|
||||
"@agentic/core": "workspace:*",
|
||||
"ky": "^1.7.2",
|
||||
"p-throttle": "^6.2.0"
|
||||
"ky": "^1.7.5",
|
||||
"p-throttle": "^7.0.0"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"zod": "^3.23.8"
|
||||
|
|
|
@ -35,11 +35,11 @@
|
|||
"@agentic/core": "workspace:*"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"@genkit-ai/ai": "^0.5.16"
|
||||
"genkit": "^1.0.4"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@agentic/tsconfig": "workspace:*",
|
||||
"@genkit-ai/ai": "^0.5.16"
|
||||
"genkit": "^1.0.4"
|
||||
},
|
||||
"publishConfig": {
|
||||
"access": "public"
|
||||
|
|
|
@ -1,10 +1,12 @@
|
|||
import { EchoAITool } from '@agentic/core'
|
||||
import { Genkit } from 'genkit'
|
||||
import { describe, expect, test } from 'vitest'
|
||||
|
||||
import { createGenkitTools } from './genkit'
|
||||
|
||||
describe('genkit', () => {
|
||||
test('createGenkitTools', () => {
|
||||
expect(createGenkitTools(new EchoAITool())).toHaveLength(1)
|
||||
const genkit = new Genkit()
|
||||
expect(createGenkitTools(genkit, new EchoAITool())).toHaveLength(1)
|
||||
})
|
||||
})
|
||||
|
|
|
@ -1,16 +1,19 @@
|
|||
import type { Genkit } from 'genkit'
|
||||
import { type AIFunctionLike, AIFunctionSet } from '@agentic/core'
|
||||
import { defineTool } from '@genkit-ai/ai'
|
||||
import { z } from 'zod'
|
||||
|
||||
/**
|
||||
* Converts a set of Agentic stdlib AI functions to an array of Genkit-
|
||||
* compatible tools.
|
||||
*/
|
||||
export function createGenkitTools(...aiFunctionLikeTools: AIFunctionLike[]) {
|
||||
export function createGenkitTools(
|
||||
genkit: Genkit,
|
||||
...aiFunctionLikeTools: AIFunctionLike[]
|
||||
) {
|
||||
const fns = new AIFunctionSet(aiFunctionLikeTools)
|
||||
|
||||
return fns.map((fn) =>
|
||||
defineTool(
|
||||
genkit.defineTool(
|
||||
{
|
||||
name: fn.spec.name,
|
||||
description: fn.spec.description,
|
||||
|
|
|
@ -32,9 +32,9 @@
|
|||
},
|
||||
"dependencies": {
|
||||
"@agentic/core": "workspace:*",
|
||||
"ky": "^1.7.2",
|
||||
"ky": "^1.7.5",
|
||||
"octokit": "^4.0.2",
|
||||
"p-throttle": "^6.2.0"
|
||||
"p-throttle": "^7.0.0"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"zod": "^3.23.8"
|
||||
|
|
|
@ -32,7 +32,7 @@
|
|||
},
|
||||
"dependencies": {
|
||||
"@agentic/core": "workspace:*",
|
||||
"ky": "^1.7.2"
|
||||
"ky": "^1.7.5"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"zod": "^3.23.8"
|
||||
|
|
|
@ -32,7 +32,7 @@
|
|||
},
|
||||
"dependencies": {
|
||||
"@agentic/core": "workspace:*",
|
||||
"ky": "^1.7.2"
|
||||
"ky": "^1.7.5"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"zod": "^3.23.8"
|
||||
|
|
|
@ -44,7 +44,7 @@
|
|||
"access": "public"
|
||||
},
|
||||
"dependencies": {
|
||||
"ky": "^1.5.0",
|
||||
"p-throttle": "^6.1.0"
|
||||
"ky": "^1.7.5",
|
||||
"p-throttle": "^7.0.0"
|
||||
}
|
||||
}
|
||||
|
|
|
@ -32,8 +32,8 @@
|
|||
},
|
||||
"dependencies": {
|
||||
"@agentic/core": "workspace:*",
|
||||
"ky": "^1.7.2",
|
||||
"p-throttle": "^6.2.0"
|
||||
"ky": "^1.7.5",
|
||||
"p-throttle": "^7.0.0"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"zod": "^3.23.8"
|
||||
|
|
|
@ -32,7 +32,7 @@
|
|||
},
|
||||
"dependencies": {
|
||||
"@agentic/core": "workspace:*",
|
||||
"ky": "^1.7.2"
|
||||
"ky": "^1.7.5"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"zod": "^3.23.8"
|
||||
|
|
|
@ -32,7 +32,7 @@
|
|||
},
|
||||
"dependencies": {
|
||||
"@agentic/core": "workspace:*",
|
||||
"ky": "^1.7.2"
|
||||
"ky": "^1.7.5"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"zod": "^3.23.8"
|
||||
|
|
|
@ -32,8 +32,8 @@
|
|||
},
|
||||
"dependencies": {
|
||||
"@agentic/core": "workspace:*",
|
||||
"ky": "^1.7.2",
|
||||
"p-throttle": "^6.2.0"
|
||||
"ky": "^1.7.5",
|
||||
"p-throttle": "^7.0.0"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"zod": "^3.23.8"
|
||||
|
|
|
@ -32,8 +32,8 @@
|
|||
},
|
||||
"dependencies": {
|
||||
"@agentic/core": "workspace:*",
|
||||
"ky": "^1.7.2",
|
||||
"p-throttle": "^6.2.0"
|
||||
"ky": "^1.7.5",
|
||||
"p-throttle": "^7.0.0"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"zod": "^3.23.8"
|
||||
|
|
|
@ -32,7 +32,7 @@
|
|||
},
|
||||
"dependencies": {
|
||||
"@agentic/core": "workspace:*",
|
||||
"ky": "^1.7.2"
|
||||
"ky": "^1.7.5"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"zod": "^3.23.8"
|
||||
|
|
|
@ -32,8 +32,8 @@
|
|||
},
|
||||
"dependencies": {
|
||||
"@agentic/core": "workspace:*",
|
||||
"ky": "^1.7.2",
|
||||
"p-throttle": "^6.2.0"
|
||||
"ky": "^1.7.5",
|
||||
"p-throttle": "^7.0.0"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"zod": "^3.23.8"
|
||||
|
|
|
@ -32,8 +32,8 @@
|
|||
},
|
||||
"dependencies": {
|
||||
"@agentic/core": "workspace:*",
|
||||
"ky": "^1.7.2",
|
||||
"p-throttle": "^6.2.0"
|
||||
"ky": "^1.7.5",
|
||||
"p-throttle": "^7.0.0"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"zod": "^3.23.8"
|
||||
|
|
|
@ -32,7 +32,7 @@
|
|||
},
|
||||
"dependencies": {
|
||||
"@agentic/core": "workspace:*",
|
||||
"ky": "^1.7.2"
|
||||
"ky": "^1.7.5"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"zod": "^3.23.8"
|
||||
|
|
|
@ -32,7 +32,7 @@
|
|||
},
|
||||
"dependencies": {
|
||||
"@agentic/core": "workspace:*",
|
||||
"ky": "^1.7.2"
|
||||
"ky": "^1.7.5"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"zod": "^3.23.8"
|
||||
|
|
|
@ -32,7 +32,7 @@
|
|||
},
|
||||
"dependencies": {
|
||||
"@agentic/core": "workspace:*",
|
||||
"ky": "^1.7.2"
|
||||
"ky": "^1.7.5"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"zod": "^3.23.8"
|
||||
|
|
|
@ -32,7 +32,7 @@
|
|||
},
|
||||
"dependencies": {
|
||||
"@agentic/core": "workspace:*",
|
||||
"ky": "^1.7.2"
|
||||
"ky": "^1.7.5"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"zod": "^3.23.8"
|
||||
|
|
|
@ -32,8 +32,8 @@
|
|||
},
|
||||
"dependencies": {
|
||||
"@agentic/core": "workspace:*",
|
||||
"ky": "^1.7.2",
|
||||
"p-throttle": "^6.2.0"
|
||||
"ky": "^1.7.5",
|
||||
"p-throttle": "^7.0.0"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"zod": "^3.23.8"
|
||||
|
|
|
@ -32,8 +32,8 @@
|
|||
},
|
||||
"dependencies": {
|
||||
"@agentic/core": "workspace:*",
|
||||
"ky": "^1.7.2",
|
||||
"p-throttle": "^6.2.0"
|
||||
"ky": "^1.7.5",
|
||||
"p-throttle": "^7.0.0"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"zod": "^3.23.8"
|
||||
|
|
|
@ -32,7 +32,7 @@
|
|||
},
|
||||
"dependencies": {
|
||||
"@agentic/core": "workspace:*",
|
||||
"ky": "^1.7.2"
|
||||
"ky": "^1.7.5"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"zod": "^3.23.8"
|
||||
|
|
|
@ -33,10 +33,10 @@
|
|||
"dependencies": {
|
||||
"@agentic/core": "workspace:*",
|
||||
"@nangohq/node": "^0.42.2",
|
||||
"ky": "^1.7.2",
|
||||
"p-throttle": "^6.2.0",
|
||||
"ky": "^1.7.5",
|
||||
"p-throttle": "^7.0.0",
|
||||
"twitter-api-sdk": "^1.2.1",
|
||||
"type-fest": "^4.21.0"
|
||||
"type-fest": "^4.35.0"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"zod": "^3.23.8"
|
||||
|
|
|
@ -26,7 +26,7 @@ export function validateNangoConnectionOAuthScopes({
|
|||
scopes: string[]
|
||||
}) {
|
||||
const connectionScopes = new Set<string>(
|
||||
connection.credentials.raw.scope.split(' ')
|
||||
(connection.credentials as any).raw.scope.split(' ')
|
||||
)
|
||||
const missingScopes = new Set<string>()
|
||||
|
||||
|
|
|
@ -32,7 +32,7 @@
|
|||
},
|
||||
"dependencies": {
|
||||
"@agentic/core": "workspace:*",
|
||||
"ky": "^1.7.2"
|
||||
"ky": "^1.7.5"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"zod": "^3.23.8"
|
||||
|
|
|
@ -32,8 +32,8 @@
|
|||
},
|
||||
"dependencies": {
|
||||
"@agentic/core": "workspace:*",
|
||||
"ky": "^1.7.2",
|
||||
"p-throttle": "^6.2.0",
|
||||
"ky": "^1.7.5",
|
||||
"p-throttle": "^7.0.0",
|
||||
"wikibase-sdk": "^10.0.3"
|
||||
},
|
||||
"peerDependencies": {
|
||||
|
|
|
@ -32,8 +32,8 @@
|
|||
},
|
||||
"dependencies": {
|
||||
"@agentic/core": "workspace:*",
|
||||
"ky": "^1.7.2",
|
||||
"p-throttle": "^6.2.0"
|
||||
"ky": "^1.7.5",
|
||||
"p-throttle": "^7.0.0"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"zod": "^3.23.8"
|
||||
|
|
|
@ -32,7 +32,7 @@
|
|||
},
|
||||
"dependencies": {
|
||||
"@agentic/core": "workspace:*",
|
||||
"ky": "^1.7.2"
|
||||
"ky": "^1.7.5"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"zod": "^3.23.8"
|
||||
|
|
5744
pnpm-lock.yaml
5744
pnpm-lock.yaml
Plik diff jest za duży
Load Diff
Ładowanie…
Reference in New Issue