kopia lustrzana https://github.com/transitive-bullshit/chatgpt-api
feat: WIP kittens
rodzic
b774480a15
commit
1a4e697f0f
|
@ -11,8 +11,8 @@ export async function fetchCache(
|
||||||
cacheKey?: string
|
cacheKey?: string
|
||||||
fetchResponse: () => Promise<Response>
|
fetchResponse: () => Promise<Response>
|
||||||
}
|
}
|
||||||
) {
|
): Promise<Response> {
|
||||||
let response
|
let response: Response | undefined
|
||||||
|
|
||||||
if (cacheKey) {
|
if (cacheKey) {
|
||||||
response = await cache.match(cacheKey)
|
response = await cache.match(cacheKey)
|
||||||
|
|
|
@ -0,0 +1,100 @@
|
||||||
|
import contentType from 'content-type'
|
||||||
|
import stableJsonStringify from 'fast-json-stable-stringify'
|
||||||
|
|
||||||
|
import { normalizeUrl } from './normalize-url'
|
||||||
|
import * as sha256 from './sha256'
|
||||||
|
|
||||||
|
export async function getFaasRequestCacheKey(request) {
|
||||||
|
try {
|
||||||
|
const pragma = request.headers.get('pragma')
|
||||||
|
if (pragma === 'no-cache') {
|
||||||
|
return null
|
||||||
|
}
|
||||||
|
|
||||||
|
const cacheControl = request.headers.get('cache-control')
|
||||||
|
if (cacheControl) {
|
||||||
|
const directives = new Set(cacheControl.split(',').map((s) => s.trim()))
|
||||||
|
if (directives.has('no-store') || directives.has('no-cache')) {
|
||||||
|
return null
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (request.method === 'POST' || request.method === 'PUT') {
|
||||||
|
// useful for debugging since getting all the headers is awkward
|
||||||
|
// console.log(Object.fromEntries(request.headers.entries()))
|
||||||
|
|
||||||
|
const contentLength = parseInt(request.headers.get('content-length'))
|
||||||
|
|
||||||
|
// TODO: what is a reasonable upper bound for hashing the POST body size?
|
||||||
|
if (contentLength && contentLength < 10000) {
|
||||||
|
const ct = contentType.parse(
|
||||||
|
request.headers.get('content-type') || 'application/octet-stream'
|
||||||
|
)
|
||||||
|
const type = ct && ct.type
|
||||||
|
let hash
|
||||||
|
|
||||||
|
// TODO: gracefully handle content-encoding compression
|
||||||
|
// TODO: more robust content-type detection
|
||||||
|
|
||||||
|
if (type && type.indexOf('json') >= 0) {
|
||||||
|
const bodyJson = await request.clone().json()
|
||||||
|
const bodyString = stableJsonStringify(bodyJson)
|
||||||
|
hash = await sha256.fromString(bodyString)
|
||||||
|
} else if (type && type.indexOf('text/') >= 0) {
|
||||||
|
const bodyString = await request.clone().text()
|
||||||
|
hash = await sha256.fromString(bodyString)
|
||||||
|
} else {
|
||||||
|
const bodyBuffer = await request.clone().arrayBuffer()
|
||||||
|
hash = await sha256.fromBuffer(bodyBuffer)
|
||||||
|
}
|
||||||
|
|
||||||
|
const cacheUrl = new URL(request.url)
|
||||||
|
cacheUrl.pathname = cacheUrl.pathname + '/' + hash
|
||||||
|
|
||||||
|
const normalizedUrl = normalizeUrl(cacheUrl.toString())
|
||||||
|
|
||||||
|
const newReq = normalizeRequestHeaders(
|
||||||
|
new Request(normalizedUrl, {
|
||||||
|
headers: request.headers,
|
||||||
|
method: 'GET'
|
||||||
|
})
|
||||||
|
)
|
||||||
|
|
||||||
|
return newReq
|
||||||
|
}
|
||||||
|
|
||||||
|
return null
|
||||||
|
} else if (request.method === 'GET' || request.method === 'HEAD') {
|
||||||
|
const url = request.url
|
||||||
|
const normalizedUrl = normalizeUrl(url)
|
||||||
|
|
||||||
|
if (url !== normalizedUrl) {
|
||||||
|
return normalizeRequestHeaders(
|
||||||
|
new Request(normalizedUrl, {
|
||||||
|
method: request.method
|
||||||
|
})
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return normalizeRequestHeaders(new Request(request))
|
||||||
|
} catch (err) {
|
||||||
|
console.error('error computing cache key', request.method, request.url, err)
|
||||||
|
return null
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const requestHeaderWhitelist = new Set(['cache-control'])
|
||||||
|
|
||||||
|
function normalizeRequestHeaders(request) {
|
||||||
|
const headers = Object.fromEntries(request.headers.entries())
|
||||||
|
const keys = Object.keys(headers)
|
||||||
|
|
||||||
|
for (const key of keys) {
|
||||||
|
if (!requestHeaderWhitelist.has(key)) {
|
||||||
|
request.headers.delete(key)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return request
|
||||||
|
}
|
|
@ -0,0 +1,3 @@
|
||||||
|
// Vitest Snapshot v1, https://vitest.dev/guide/snapshot.html
|
||||||
|
|
||||||
|
exports[`sha256 1`] = `"9f86d081884c7d659a2feaa0c55ad015a3bf4f1b2b0b822cd15d6c15b0f00a08"`;
|
|
@ -1,6 +1,6 @@
|
||||||
import { expect, test } from 'vitest'
|
import { expect, test } from 'vitest'
|
||||||
|
|
||||||
import { omit, pick } from './utils'
|
import { omit, pick, sha256 } from './utils'
|
||||||
|
|
||||||
test('pick', () => {
|
test('pick', () => {
|
||||||
expect(pick({ a: 1, b: 2, c: 3 }, 'a', 'c')).toEqual({ a: 1, c: 3 })
|
expect(pick({ a: 1, b: 2, c: 3 }, 'a', 'c')).toEqual({ a: 1, c: 3 })
|
||||||
|
@ -17,3 +17,24 @@ test('omit', () => {
|
||||||
})
|
})
|
||||||
expect(omit({ a: 1, b: 2, c: 3 }, 'foo', 'bar', 'c')).toEqual({ a: 1, b: 2 })
|
expect(omit({ a: 1, b: 2, c: 3 }, 'foo', 'bar', 'c')).toEqual({ a: 1, b: 2 })
|
||||||
})
|
})
|
||||||
|
|
||||||
|
test('sha256', async () => {
|
||||||
|
// Test default behavior (random UUID)
|
||||||
|
const hash1 = await sha256()
|
||||||
|
const hash2 = await sha256()
|
||||||
|
expect(hash1).toHaveLength(64) // SHA-256 produces 64 character hex string
|
||||||
|
expect(hash2).toHaveLength(64)
|
||||||
|
expect(hash1).not.toBe(hash2) // Different UUIDs should produce different hashes
|
||||||
|
|
||||||
|
const hash3 = await sha256('foo')
|
||||||
|
const hash4 = await sha256('foo')
|
||||||
|
expect(hash3).toBe(hash4) // Same input should produce the same hash
|
||||||
|
|
||||||
|
const hash5 = await sha256('foo1')
|
||||||
|
expect(hash1).not.toBe(hash5)
|
||||||
|
expect(hash2).not.toBe(hash5)
|
||||||
|
expect(hash3).not.toBe(hash5)
|
||||||
|
expect(hash4).not.toBe(hash5)
|
||||||
|
|
||||||
|
expect(await sha256('test')).toMatchSnapshot()
|
||||||
|
})
|
||||||
|
|
|
@ -1,5 +1,3 @@
|
||||||
import { createHash, randomUUID } from 'node:crypto'
|
|
||||||
|
|
||||||
import type { ContentfulStatusCode } from 'hono/utils/http-status'
|
import type { ContentfulStatusCode } from 'hono/utils/http-status'
|
||||||
import type { ZodSchema, ZodTypeDef } from 'zod'
|
import type { ZodSchema, ZodTypeDef } from 'zod'
|
||||||
import hashObjectImpl, { type Options as HashObjectOptions } from 'hash-object'
|
import hashObjectImpl, { type Options as HashObjectOptions } from 'hash-object'
|
||||||
|
@ -99,8 +97,19 @@ export function parseZodSchema<
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
export function sha256(input: string = randomUUID()) {
|
// import { createHash, randomUUID } from 'node:crypto'
|
||||||
return createHash('sha256').update(input).digest('hex')
|
// export function sha256Node(input: string = randomUUID()) {
|
||||||
|
// return createHash('sha256').update(input).digest('hex')
|
||||||
|
// }
|
||||||
|
|
||||||
|
export async function sha256(input: string = crypto.randomUUID()) {
|
||||||
|
const textBuffer = new TextEncoder().encode(input)
|
||||||
|
const hashBuffer = await crypto.subtle.digest('SHA-256', textBuffer)
|
||||||
|
const hashArray = Array.from(new Uint8Array(hashBuffer))
|
||||||
|
const hashHex = hashArray
|
||||||
|
.map((b) => ('00' + b.toString(16)).slice(-2))
|
||||||
|
.join('')
|
||||||
|
return hashHex
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
|
|
@ -1,5 +1,8 @@
|
||||||
{
|
{
|
||||||
"extends": "@fisch0920/config/tsconfig-node",
|
"extends": "@fisch0920/config/tsconfig-node",
|
||||||
|
"compilerOptions": {
|
||||||
|
"lib": ["ES2022"]
|
||||||
|
},
|
||||||
"include": ["src", "*.config.ts"],
|
"include": ["src", "*.config.ts"],
|
||||||
"exclude": ["node_modules"]
|
"exclude": ["node_modules"]
|
||||||
}
|
}
|
||||||
|
|
|
@ -21,6 +21,7 @@ export function mergeJsonSchemaObjects(
|
||||||
label: string
|
label: string
|
||||||
}
|
}
|
||||||
) {
|
) {
|
||||||
|
// TODO: Support cookie parameters
|
||||||
assert(
|
assert(
|
||||||
source !== 'cookie',
|
source !== 'cookie',
|
||||||
'Cookie parameters for OpenAPI operations are not yet supported. If you need cookie parameter support, please contact support@agentic.so.'
|
'Cookie parameters for OpenAPI operations are not yet supported. If you need cookie parameter support, please contact support@agentic.so.'
|
||||||
|
|
Ładowanie…
Reference in New Issue