feat: tiny kitty bean toes

pull/715/head
Travis Fischer 2025-06-01 02:08:54 +07:00
rodzic 1a4e697f0f
commit bb65d09ab0
10 zmienionych plików z 38 dodań i 28 usunięć

Wyświetl plik

@ -53,7 +53,7 @@ export function registerV1ConsumersRefreshConsumerToken(
;[consumer] = await db
.update(schema.consumers)
.set({
token: createConsumerToken()
token: await createConsumerToken()
})
.where(eq(schema.consumers.id, consumer.id))
.returning()

Wyświetl plik

@ -80,7 +80,7 @@ export function registerV1DeploymentsCreateDeployment(
const projectId = project.id
// TODO: investigate better short hash generation
const hash = sha256().slice(0, 8)
const hash = (await sha256()).slice(0, 8)
const deploymentIdentifier = `${project.identifier}@${hash}`
assert(
validators.deploymentIdentifier(deploymentIdentifier),
@ -111,6 +111,8 @@ export function registerV1DeploymentsCreateDeployment(
// - origin API base URL
// - origin adapter OpenAPI or MCP specs
// - tool definitions
// TODO: fix types
const agenticProjectConfig = await resolveAgenticProjectConfig(body, {
label: `deployment "${deploymentIdentifier}"`,
logger

Wyświetl plik

@ -61,7 +61,7 @@ export function registerV1ProjectsCreateProject(
identifier,
teamId: teamMember?.teamId,
userId: user.id,
_secret: sha256()
_secret: await sha256()
})
.returning()
assert(project, 500, `Failed to create project "${body.name}"`)

Wyświetl plik

@ -148,7 +148,7 @@ export async function upsertConsumer(
userId,
projectId,
deploymentId,
token: createConsumerToken(),
token: await createConsumerToken(),
_stripeCustomerId: stripeCustomer.id
})
}

Wyświetl plik

@ -1,5 +1,7 @@
import { sha256 } from '@agentic/platform-core'
export function createConsumerToken(): string {
return sha256().slice(0, 24)
export async function createConsumerToken(): Promise<string> {
const hash = await sha256()
return hash.slice(0, 24)
}

Wyświetl plik

@ -29,7 +29,7 @@ export async function getUniqueNamespace(
namespace?: string,
{ label = 'Namespace' }: { label?: string } = {}
) {
namespace ??= `${label}_${sha256().slice(0, 24)}`
namespace ??= `${label}_${(await sha256()).slice(0, 24)}`
namespace = namespace
.replaceAll(/[^a-zA-Z0-9_-]/g, '')
.toLowerCase()
@ -48,7 +48,7 @@ export async function getUniqueNamespace(
throw err
}
const suffix = sha256().slice(0, 8)
const suffix = (await sha256()).slice(0, 8)
currentNamespace = `${namespace.slice(0, schema.namespaceMaxLength - 1 - suffix.length)}${suffix}`
}
} while (true)

Wyświetl plik

@ -36,6 +36,7 @@
"@hono/zod-validator": "catalog:",
"@modelcontextprotocol/sdk": "catalog:",
"eventid": "catalog:",
"fast-content-type-parse": "^3.0.0",
"hono": "catalog:",
"type-fest": "catalog:"
},

Wyświetl plik

@ -8,7 +8,7 @@ export async function fetchCache(
cacheKey,
fetchResponse
}: {
cacheKey?: string
cacheKey?: Request
fetchResponse: () => Promise<Response>
}
): Promise<Response> {

Wyświetl plik

@ -1,10 +1,11 @@
import contentType from 'content-type'
import stableJsonStringify from 'fast-json-stable-stringify'
import { hashObject, sha256 } from '@agentic/platform-core'
import contentType from 'fast-content-type-parse'
import { normalizeUrl } from './normalize-url'
import * as sha256 from './sha256'
export async function getFaasRequestCacheKey(request) {
export async function getRequestCacheKey(
request: Request
): Promise<Request | null> {
try {
const pragma = request.headers.get('pragma')
if (pragma === 'no-cache') {
@ -23,34 +24,35 @@ export async function getFaasRequestCacheKey(request) {
// useful for debugging since getting all the headers is awkward
// console.log(Object.fromEntries(request.headers.entries()))
const contentLength = parseInt(request.headers.get('content-length'))
const contentLength = Number.parseInt(
request.headers.get('content-length') ?? '0'
)
// TODO: what is a reasonable upper bound for hashing the POST body size?
if (contentLength && contentLength < 10000) {
const ct = contentType.parse(
if (contentLength && contentLength < 10_000) {
const { type } = contentType.safeParse(
request.headers.get('content-type') || 'application/octet-stream'
)
const type = ct && ct.type
let hash
// TODO: gracefully handle content-encoding compression
// TODO: more robust content-type detection
if (type && type.indexOf('json') >= 0) {
const bodyJson = await request.clone().json()
const bodyString = stableJsonStringify(bodyJson)
hash = await sha256.fromString(bodyString)
} else if (type && type.indexOf('text/') >= 0) {
if (type?.includes('json')) {
const bodyJson: any = await request.clone().json()
hash = hashObject(bodyJson)
} else if (type?.includes('text/')) {
const bodyString = await request.clone().text()
hash = await sha256.fromString(bodyString)
hash = await sha256(bodyString)
} else {
const bodyBuffer = await request.clone().arrayBuffer()
hash = await sha256.fromBuffer(bodyBuffer)
// TODO
// const bodyBuffer = await request.clone().arrayBuffer()
// hash = await sha256.fromBuffer(bodyBuffer)
return null
}
const cacheUrl = new URL(request.url)
cacheUrl.pathname = cacheUrl.pathname + '/' + hash
cacheUrl.searchParams.set('x-agentic-cache-key', hash)
const normalizedUrl = normalizeUrl(cacheUrl.toString())
const newReq = normalizeRequestHeaders(
@ -86,7 +88,7 @@ export async function getFaasRequestCacheKey(request) {
const requestHeaderWhitelist = new Set(['cache-control'])
function normalizeRequestHeaders(request) {
function normalizeRequestHeaders(request: Request) {
const headers = Object.fromEntries(request.headers.entries())
const keys = Object.keys(headers)

Wyświetl plik

@ -369,6 +369,9 @@ importers:
eventid:
specifier: 'catalog:'
version: 2.0.1
fast-content-type-parse:
specifier: ^3.0.0
version: 3.0.0
hono:
specifier: 'catalog:'
version: 4.7.10