kopia lustrzana https://github.com/transitive-bullshit/chatgpt-api
pull/715/head
rodzic
a47acd376d
commit
5ad56e6067
|
@ -24,9 +24,6 @@ export async function getRequestCacheKey(
|
||||||
}
|
}
|
||||||
|
|
||||||
if (request.method === 'POST' || request.method === 'PUT') {
|
if (request.method === 'POST' || request.method === 'PUT') {
|
||||||
// useful for debugging since getting all the headers is awkward
|
|
||||||
// console.log(Object.fromEntries(request.headers.entries()))
|
|
||||||
|
|
||||||
const contentLength = Number.parseInt(
|
const contentLength = Number.parseInt(
|
||||||
request.headers.get('content-length') ?? '0'
|
request.headers.get('content-length') ?? '0'
|
||||||
)
|
)
|
||||||
|
@ -35,10 +32,7 @@ export async function getRequestCacheKey(
|
||||||
const { type } = contentType.safeParse(
|
const { type } = contentType.safeParse(
|
||||||
request.headers.get('content-type') || 'application/octet-stream'
|
request.headers.get('content-type') || 'application/octet-stream'
|
||||||
)
|
)
|
||||||
let hash
|
let hash: string
|
||||||
|
|
||||||
// TODO: gracefully handle content-encoding compression
|
|
||||||
// TODO: more robust content-type detection
|
|
||||||
|
|
||||||
if (type.includes('json')) {
|
if (type.includes('json')) {
|
||||||
const bodyJson: any = await request.clone().json()
|
const bodyJson: any = await request.clone().json()
|
||||||
|
@ -47,16 +41,20 @@ export async function getRequestCacheKey(
|
||||||
const bodyString = await request.clone().text()
|
const bodyString = await request.clone().text()
|
||||||
hash = await sha256(bodyString)
|
hash = await sha256(bodyString)
|
||||||
} else {
|
} else {
|
||||||
// TODO
|
const bodyBuffer = await request.clone().arrayBuffer()
|
||||||
// const bodyBuffer = await request.clone().arrayBuffer()
|
hash = await sha256(bodyBuffer)
|
||||||
// hash = await sha256.fromBuffer(bodyBuffer)
|
|
||||||
return
|
|
||||||
}
|
}
|
||||||
|
|
||||||
const cacheUrl = new URL(request.url)
|
const cacheUrl = new URL(request.url)
|
||||||
cacheUrl.searchParams.set('x-agentic-cache-key', hash)
|
cacheUrl.searchParams.set('x-agentic-cache-key', hash)
|
||||||
const normalizedUrl = normalizeUrl(cacheUrl.toString())
|
const normalizedUrl = normalizeUrl(cacheUrl.toString())
|
||||||
|
|
||||||
|
// Convert POST and PUT requests to GET with a query param containing
|
||||||
|
// a hash of the request body. This enables us to cache these requests
|
||||||
|
// more easily, since we want to move the the "cacheability" logic to a
|
||||||
|
// higher-level, config-based approach. E.g., individual tools can
|
||||||
|
// opt-in to aggressive caching by declaring themselves `pure` or
|
||||||
|
// `immutable` regardless of the HTTP method used to call the tool.
|
||||||
const newReq = normalizeRequestHeaders(
|
const newReq = normalizeRequestHeaders(
|
||||||
new Request(normalizedUrl, {
|
new Request(normalizedUrl, {
|
||||||
headers: request.headers,
|
headers: request.headers,
|
||||||
|
@ -94,7 +92,7 @@ export async function getRequestCacheKey(
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
const requestHeaderWhitelist = new Set(['cache-control'])
|
const requestHeaderWhitelist = new Set(['cache-control', 'mcp-session-id'])
|
||||||
|
|
||||||
function normalizeRequestHeaders(request: Request) {
|
function normalizeRequestHeaders(request: Request) {
|
||||||
const headers = Object.fromEntries(request.headers.entries())
|
const headers = Object.fromEntries(request.headers.entries())
|
||||||
|
|
|
@ -104,9 +104,18 @@ export function parseZodSchema<TSchema extends ZodType<any, any, any>>(
|
||||||
// return createHash('sha256').update(input).digest('hex')
|
// return createHash('sha256').update(input).digest('hex')
|
||||||
// }
|
// }
|
||||||
|
|
||||||
export async function sha256(input: string = crypto.randomUUID()) {
|
export async function sha256(
|
||||||
const textBuffer = new TextEncoder().encode(input)
|
input: string | ArrayBuffer = crypto.randomUUID()
|
||||||
const hashBuffer = await crypto.subtle.digest('SHA-256', textBuffer)
|
) {
|
||||||
|
let dataBuffer: ArrayBuffer
|
||||||
|
|
||||||
|
if (typeof input === 'string') {
|
||||||
|
dataBuffer = new TextEncoder().encode(input).buffer
|
||||||
|
} else {
|
||||||
|
dataBuffer = input
|
||||||
|
}
|
||||||
|
|
||||||
|
const hashBuffer = await crypto.subtle.digest('SHA-256', dataBuffer)
|
||||||
const hashArray = Array.from(new Uint8Array(hashBuffer))
|
const hashArray = Array.from(new Uint8Array(hashBuffer))
|
||||||
const hashHex = hashArray
|
const hashHex = hashArray
|
||||||
.map((b) => ('00' + b.toString(16)).slice(-2))
|
.map((b) => ('00' + b.toString(16)).slice(-2))
|
||||||
|
|
Ładowanie…
Reference in New Issue