kopia lustrzana https://github.com/transitive-bullshit/chatgpt-api
pull/715/head
rodzic
47e9284e73
commit
99e86f12e5
|
@ -24,15 +24,19 @@ export async function createRequestForOpenAPIOperation({
|
||||||
`Unexpected origin adapter type: "${deployment.originAdapter.type}"`
|
`Unexpected origin adapter type: "${deployment.originAdapter.type}"`
|
||||||
)
|
)
|
||||||
|
|
||||||
const tempInitialRequest = request.clone()
|
|
||||||
|
|
||||||
let incomingRequestParams: Record<string, any> = {}
|
let incomingRequestParams: Record<string, any> = {}
|
||||||
if (request.method === 'GET') {
|
if (request.method === 'GET') {
|
||||||
incomingRequestParams = Object.fromEntries(
|
incomingRequestParams = Object.fromEntries(
|
||||||
new URL(tempInitialRequest.url).searchParams.entries()
|
new URL(request.url).searchParams.entries()
|
||||||
)
|
)
|
||||||
|
|
||||||
|
// console.log('debug', {
|
||||||
|
// url: request.url,
|
||||||
|
// incomingRequestParams,
|
||||||
|
// searchParams: new URL(request.url).searchParams
|
||||||
|
// })
|
||||||
} else if (request.method === 'POST') {
|
} else if (request.method === 'POST') {
|
||||||
incomingRequestParams = (await tempInitialRequest.json()) as Record<
|
incomingRequestParams = (await request.clone().json()) as Record<
|
||||||
string,
|
string,
|
||||||
any
|
any
|
||||||
>
|
>
|
||||||
|
@ -62,11 +66,14 @@ export async function createRequestForOpenAPIOperation({
|
||||||
)
|
)
|
||||||
|
|
||||||
const headers: Record<string, string> = {}
|
const headers: Record<string, string> = {}
|
||||||
|
for (const [key, value] of request.headers.entries()) {
|
||||||
|
headers[key] = value
|
||||||
|
}
|
||||||
|
|
||||||
if (headerParams.length > 0) {
|
if (headerParams.length > 0) {
|
||||||
for (const [key] of headerParams) {
|
for (const [key] of headerParams) {
|
||||||
headers[key] =
|
headers[key] =
|
||||||
(tempInitialRequest.headers.get(key) as string) ??
|
(request.headers.get(key) as string) ?? incomingRequestParams[key]
|
||||||
incomingRequestParams[key]
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -24,7 +24,7 @@ export async function fetchCache(
|
||||||
|
|
||||||
if (cacheKey) {
|
if (cacheKey) {
|
||||||
if (response.headers.has('Cache-Control')) {
|
if (response.headers.has('Cache-Control')) {
|
||||||
// cache will respect response headers
|
// Note that cloudflare's `cache` should respect response headers.
|
||||||
ctx.waitUntil(
|
ctx.waitUntil(
|
||||||
cache.put(cacheKey, response.clone()).catch((err) => {
|
cache.put(cacheKey, response.clone()).catch((err) => {
|
||||||
console.warn('cache put error', cacheKey, err)
|
console.warn('cache put error', cacheKey, err)
|
||||||
|
|
|
@ -3,20 +3,23 @@ import contentType from 'fast-content-type-parse'
|
||||||
|
|
||||||
import { normalizeUrl } from './normalize-url'
|
import { normalizeUrl } from './normalize-url'
|
||||||
|
|
||||||
|
// TODO: what is a reasonable upper bound for hashing the POST body size?
|
||||||
|
const MAX_POST_BODY_SIZE_BYTES = 10_000
|
||||||
|
|
||||||
export async function getRequestCacheKey(
|
export async function getRequestCacheKey(
|
||||||
request: Request
|
request: Request
|
||||||
): Promise<Request | null> {
|
): Promise<Request | undefined> {
|
||||||
try {
|
try {
|
||||||
const pragma = request.headers.get('pragma')
|
const pragma = request.headers.get('pragma')
|
||||||
if (pragma === 'no-cache') {
|
if (pragma === 'no-cache') {
|
||||||
return null
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
const cacheControl = request.headers.get('cache-control')
|
const cacheControl = request.headers.get('cache-control')
|
||||||
if (cacheControl) {
|
if (cacheControl) {
|
||||||
const directives = new Set(cacheControl.split(',').map((s) => s.trim()))
|
const directives = new Set(cacheControl.split(',').map((s) => s.trim()))
|
||||||
if (directives.has('no-store') || directives.has('no-cache')) {
|
if (directives.has('no-store') || directives.has('no-cache')) {
|
||||||
return null
|
return
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -28,8 +31,7 @@ export async function getRequestCacheKey(
|
||||||
request.headers.get('content-length') ?? '0'
|
request.headers.get('content-length') ?? '0'
|
||||||
)
|
)
|
||||||
|
|
||||||
// TODO: what is a reasonable upper bound for hashing the POST body size?
|
if (contentLength && contentLength < MAX_POST_BODY_SIZE_BYTES) {
|
||||||
if (contentLength && contentLength < 10_000) {
|
|
||||||
const { type } = contentType.safeParse(
|
const { type } = contentType.safeParse(
|
||||||
request.headers.get('content-type') || 'application/octet-stream'
|
request.headers.get('content-type') || 'application/octet-stream'
|
||||||
)
|
)
|
||||||
|
@ -48,7 +50,7 @@ export async function getRequestCacheKey(
|
||||||
// TODO
|
// TODO
|
||||||
// const bodyBuffer = await request.clone().arrayBuffer()
|
// const bodyBuffer = await request.clone().arrayBuffer()
|
||||||
// hash = await sha256.fromBuffer(bodyBuffer)
|
// hash = await sha256.fromBuffer(bodyBuffer)
|
||||||
return null
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
const cacheUrl = new URL(request.url)
|
const cacheUrl = new URL(request.url)
|
||||||
|
@ -65,7 +67,7 @@ export async function getRequestCacheKey(
|
||||||
return newReq
|
return newReq
|
||||||
}
|
}
|
||||||
|
|
||||||
return null
|
return
|
||||||
} else if (request.method === 'GET' || request.method === 'HEAD') {
|
} else if (request.method === 'GET' || request.method === 'HEAD') {
|
||||||
const url = request.url
|
const url = request.url
|
||||||
const normalizedUrl = normalizeUrl(url)
|
const normalizedUrl = normalizeUrl(url)
|
||||||
|
@ -82,7 +84,7 @@ export async function getRequestCacheKey(
|
||||||
return normalizeRequestHeaders(new Request(request))
|
return normalizeRequestHeaders(new Request(request))
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
console.error('error computing cache key', request.method, request.url, err)
|
console.error('error computing cache key', request.method, request.url, err)
|
||||||
return null
|
return
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -3,6 +3,8 @@ import { assert, parseZodSchema } from '@agentic/platform-core'
|
||||||
|
|
||||||
import type { Context } from './lib/types'
|
import type { Context } from './lib/types'
|
||||||
import { type AgenticEnv, envSchema } from './lib/env'
|
import { type AgenticEnv, envSchema } from './lib/env'
|
||||||
|
import { fetchCache } from './lib/fetch-cache'
|
||||||
|
import { getRequestCacheKey } from './lib/get-request-cache-key'
|
||||||
import { handleOptions } from './lib/handle-options'
|
import { handleOptions } from './lib/handle-options'
|
||||||
import { resolveOriginRequest } from './lib/resolve-origin-request'
|
import { resolveOriginRequest } from './lib/resolve-origin-request'
|
||||||
|
|
||||||
|
@ -47,12 +49,12 @@ export default {
|
||||||
apiKey: env.AGENTIC_API_KEY
|
apiKey: env.AGENTIC_API_KEY
|
||||||
})
|
})
|
||||||
|
|
||||||
const ctx: Context = {
|
// NOTE: We have to mutate the given ExecutionContext because spreading it
|
||||||
...inputCtx,
|
// into a new object causes its methods to be `undefined`.
|
||||||
req: inputReq,
|
const ctx = inputCtx as Context
|
||||||
env,
|
ctx.req = inputReq
|
||||||
client
|
ctx.env = env
|
||||||
}
|
ctx.client = client
|
||||||
|
|
||||||
try {
|
try {
|
||||||
if (inputReq.method === 'OPTIONS') {
|
if (inputReq.method === 'OPTIONS') {
|
||||||
|
@ -67,22 +69,23 @@ export default {
|
||||||
|
|
||||||
switch (resolvedOriginRequest.deployment.originAdapter.type) {
|
switch (resolvedOriginRequest.deployment.originAdapter.type) {
|
||||||
case 'openapi':
|
case 'openapi':
|
||||||
|
case 'raw': {
|
||||||
assert(
|
assert(
|
||||||
resolvedOriginRequest.originRequest,
|
resolvedOriginRequest.originRequest,
|
||||||
500,
|
500,
|
||||||
'Origin request is required'
|
'Origin request is required'
|
||||||
)
|
)
|
||||||
originResponse = await fetch(resolvedOriginRequest.originRequest)
|
|
||||||
break
|
|
||||||
|
|
||||||
case 'raw':
|
const cacheKey = await getRequestCacheKey(
|
||||||
assert(
|
resolvedOriginRequest.originRequest
|
||||||
resolvedOriginRequest.originRequest,
|
|
||||||
500,
|
|
||||||
'Origin request is required'
|
|
||||||
)
|
)
|
||||||
originResponse = await fetch(resolvedOriginRequest.originRequest)
|
|
||||||
|
originResponse = await fetchCache(ctx, {
|
||||||
|
cacheKey,
|
||||||
|
fetchResponse: () => fetch(resolvedOriginRequest.originRequest!)
|
||||||
|
})
|
||||||
break
|
break
|
||||||
|
}
|
||||||
|
|
||||||
case 'mcp':
|
case 'mcp':
|
||||||
throw new Error('MCP not yet supported')
|
throw new Error('MCP not yet supported')
|
||||||
|
|
|
@ -40,7 +40,7 @@
|
||||||
- share hono middleware and utils across apps/api and apps/gateway
|
- share hono middleware and utils across apps/api and apps/gateway
|
||||||
- or combine these together? ehhhh
|
- or combine these together? ehhhh
|
||||||
- MCP server vs REST gateway on public and internal sides
|
- MCP server vs REST gateway on public and internal sides
|
||||||
- **REST**: `POST gateway.agentic.so/deploymentIdentifier/toolName`
|
- **REST**: `GET/POST gateway.agentic.so/deploymentIdentifier/toolName`
|
||||||
- => MCP: `MCPClient.callTool` with JSON body parameters
|
- => MCP: `MCPClient.callTool` with JSON body parameters
|
||||||
- => OpenAPI: `GET/POST/ETC originUrl/toolName` operation with transformed JSON body params
|
- => OpenAPI: `GET/POST/ETC originUrl/toolName` operation with transformed JSON body params
|
||||||
- **MCP**: `mcp.agentic.so/deploymentIdentifier/sse` MCP server?
|
- **MCP**: `mcp.agentic.so/deploymentIdentifier/sse` MCP server?
|
||||||
|
@ -50,9 +50,10 @@
|
||||||
- => Raw HTTP: `METHOD originUrl/<pathname>` simple HTTP proxy request
|
- => Raw HTTP: `METHOD originUrl/<pathname>` simple HTTP proxy request
|
||||||
- add support for caching
|
- add support for caching
|
||||||
- add support for custom headers on responses
|
- add support for custom headers on responses
|
||||||
|
- how to handle binary bodies and responses?
|
||||||
- signed requests
|
- signed requests
|
||||||
- revisit deployment identifiers so possibly be URL-friendly?
|
- revisit deployment identifiers so possibly be URL-friendly?
|
||||||
- rename parseFaasIdentifier and move validators package into platform-types?
|
- rename parseFaasIdentifier to `parseToolIdentifier` and move validators package into platform-types?
|
||||||
|
|
||||||
## License
|
## License
|
||||||
|
|
||||||
|
|
Ładowanie…
Reference in New Issue