kopia lustrzana https://github.com/transitive-bullshit/chatgpt-api
pull/715/head
rodzic
47e9284e73
commit
99e86f12e5
|
@ -24,15 +24,19 @@ export async function createRequestForOpenAPIOperation({
|
|||
`Unexpected origin adapter type: "${deployment.originAdapter.type}"`
|
||||
)
|
||||
|
||||
const tempInitialRequest = request.clone()
|
||||
|
||||
let incomingRequestParams: Record<string, any> = {}
|
||||
if (request.method === 'GET') {
|
||||
incomingRequestParams = Object.fromEntries(
|
||||
new URL(tempInitialRequest.url).searchParams.entries()
|
||||
new URL(request.url).searchParams.entries()
|
||||
)
|
||||
|
||||
// console.log('debug', {
|
||||
// url: request.url,
|
||||
// incomingRequestParams,
|
||||
// searchParams: new URL(request.url).searchParams
|
||||
// })
|
||||
} else if (request.method === 'POST') {
|
||||
incomingRequestParams = (await tempInitialRequest.json()) as Record<
|
||||
incomingRequestParams = (await request.clone().json()) as Record<
|
||||
string,
|
||||
any
|
||||
>
|
||||
|
@ -62,11 +66,14 @@ export async function createRequestForOpenAPIOperation({
|
|||
)
|
||||
|
||||
const headers: Record<string, string> = {}
|
||||
for (const [key, value] of request.headers.entries()) {
|
||||
headers[key] = value
|
||||
}
|
||||
|
||||
if (headerParams.length > 0) {
|
||||
for (const [key] of headerParams) {
|
||||
headers[key] =
|
||||
(tempInitialRequest.headers.get(key) as string) ??
|
||||
incomingRequestParams[key]
|
||||
(request.headers.get(key) as string) ?? incomingRequestParams[key]
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -24,7 +24,7 @@ export async function fetchCache(
|
|||
|
||||
if (cacheKey) {
|
||||
if (response.headers.has('Cache-Control')) {
|
||||
// cache will respect response headers
|
||||
// Note that cloudflare's `cache` should respect response headers.
|
||||
ctx.waitUntil(
|
||||
cache.put(cacheKey, response.clone()).catch((err) => {
|
||||
console.warn('cache put error', cacheKey, err)
|
||||
|
|
|
@ -3,20 +3,23 @@ import contentType from 'fast-content-type-parse'
|
|||
|
||||
import { normalizeUrl } from './normalize-url'
|
||||
|
||||
// TODO: what is a reasonable upper bound for hashing the POST body size?
|
||||
const MAX_POST_BODY_SIZE_BYTES = 10_000
|
||||
|
||||
export async function getRequestCacheKey(
|
||||
request: Request
|
||||
): Promise<Request | null> {
|
||||
): Promise<Request | undefined> {
|
||||
try {
|
||||
const pragma = request.headers.get('pragma')
|
||||
if (pragma === 'no-cache') {
|
||||
return null
|
||||
return
|
||||
}
|
||||
|
||||
const cacheControl = request.headers.get('cache-control')
|
||||
if (cacheControl) {
|
||||
const directives = new Set(cacheControl.split(',').map((s) => s.trim()))
|
||||
if (directives.has('no-store') || directives.has('no-cache')) {
|
||||
return null
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -28,8 +31,7 @@ export async function getRequestCacheKey(
|
|||
request.headers.get('content-length') ?? '0'
|
||||
)
|
||||
|
||||
// TODO: what is a reasonable upper bound for hashing the POST body size?
|
||||
if (contentLength && contentLength < 10_000) {
|
||||
if (contentLength && contentLength < MAX_POST_BODY_SIZE_BYTES) {
|
||||
const { type } = contentType.safeParse(
|
||||
request.headers.get('content-type') || 'application/octet-stream'
|
||||
)
|
||||
|
@ -48,7 +50,7 @@ export async function getRequestCacheKey(
|
|||
// TODO
|
||||
// const bodyBuffer = await request.clone().arrayBuffer()
|
||||
// hash = await sha256.fromBuffer(bodyBuffer)
|
||||
return null
|
||||
return
|
||||
}
|
||||
|
||||
const cacheUrl = new URL(request.url)
|
||||
|
@ -65,7 +67,7 @@ export async function getRequestCacheKey(
|
|||
return newReq
|
||||
}
|
||||
|
||||
return null
|
||||
return
|
||||
} else if (request.method === 'GET' || request.method === 'HEAD') {
|
||||
const url = request.url
|
||||
const normalizedUrl = normalizeUrl(url)
|
||||
|
@ -82,7 +84,7 @@ export async function getRequestCacheKey(
|
|||
return normalizeRequestHeaders(new Request(request))
|
||||
} catch (err) {
|
||||
console.error('error computing cache key', request.method, request.url, err)
|
||||
return null
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -3,6 +3,8 @@ import { assert, parseZodSchema } from '@agentic/platform-core'
|
|||
|
||||
import type { Context } from './lib/types'
|
||||
import { type AgenticEnv, envSchema } from './lib/env'
|
||||
import { fetchCache } from './lib/fetch-cache'
|
||||
import { getRequestCacheKey } from './lib/get-request-cache-key'
|
||||
import { handleOptions } from './lib/handle-options'
|
||||
import { resolveOriginRequest } from './lib/resolve-origin-request'
|
||||
|
||||
|
@ -47,12 +49,12 @@ export default {
|
|||
apiKey: env.AGENTIC_API_KEY
|
||||
})
|
||||
|
||||
const ctx: Context = {
|
||||
...inputCtx,
|
||||
req: inputReq,
|
||||
env,
|
||||
client
|
||||
}
|
||||
// NOTE: We have to mutate the given ExecutionContext because spreading it
|
||||
// into a new object causes its methods to be `undefined`.
|
||||
const ctx = inputCtx as Context
|
||||
ctx.req = inputReq
|
||||
ctx.env = env
|
||||
ctx.client = client
|
||||
|
||||
try {
|
||||
if (inputReq.method === 'OPTIONS') {
|
||||
|
@ -67,22 +69,23 @@ export default {
|
|||
|
||||
switch (resolvedOriginRequest.deployment.originAdapter.type) {
|
||||
case 'openapi':
|
||||
case 'raw': {
|
||||
assert(
|
||||
resolvedOriginRequest.originRequest,
|
||||
500,
|
||||
'Origin request is required'
|
||||
)
|
||||
originResponse = await fetch(resolvedOriginRequest.originRequest)
|
||||
break
|
||||
|
||||
case 'raw':
|
||||
assert(
|
||||
resolvedOriginRequest.originRequest,
|
||||
500,
|
||||
'Origin request is required'
|
||||
const cacheKey = await getRequestCacheKey(
|
||||
resolvedOriginRequest.originRequest
|
||||
)
|
||||
originResponse = await fetch(resolvedOriginRequest.originRequest)
|
||||
|
||||
originResponse = await fetchCache(ctx, {
|
||||
cacheKey,
|
||||
fetchResponse: () => fetch(resolvedOriginRequest.originRequest!)
|
||||
})
|
||||
break
|
||||
}
|
||||
|
||||
case 'mcp':
|
||||
throw new Error('MCP not yet supported')
|
||||
|
|
|
@ -40,7 +40,7 @@
|
|||
- share hono middleware and utils across apps/api and apps/gateway
|
||||
- or combine these together? ehhhh
|
||||
- MCP server vs REST gateway on public and internal sides
|
||||
- **REST**: `POST gateway.agentic.so/deploymentIdentifier/toolName`
|
||||
- **REST**: `GET/POST gateway.agentic.so/deploymentIdentifier/toolName`
|
||||
- => MCP: `MCPClient.callTool` with JSON body parameters
|
||||
- => OpenAPI: `GET/POST/ETC originUrl/toolName` operation with transformed JSON body params
|
||||
- **MCP**: `mcp.agentic.so/deploymentIdentifier/sse` MCP server?
|
||||
|
@ -50,9 +50,10 @@
|
|||
- => Raw HTTP: `METHOD originUrl/<pathname>` simple HTTP proxy request
|
||||
- add support for caching
|
||||
- add support for custom headers on responses
|
||||
- how to handle binary bodies and responses?
|
||||
- signed requests
|
||||
- revisit deployment identifiers so possibly be URL-friendly?
|
||||
- rename parseFaasIdentifier and move validators package into platform-types?
|
||||
- rename parseFaasIdentifier to `parseToolIdentifier` and move validators package into platform-types?
|
||||
|
||||
## License
|
||||
|
||||
|
|
Ładowanie…
Reference in New Issue