kopia lustrzana https://github.com/transitive-bullshit/chatgpt-api
feat: add s3/r2 storage support to backend api and api-client
rodzic
bbbbb9a387
commit
62b8246cc3
|
@ -9,6 +9,7 @@ DATABASE_URL=
|
||||||
|
|
||||||
AGENTIC_WEB_BASE_URL=
|
AGENTIC_WEB_BASE_URL=
|
||||||
AGENTIC_GATEWAY_BASE_URL=
|
AGENTIC_GATEWAY_BASE_URL=
|
||||||
|
AGENTIC_STORAGE_BASE_URL='https://storage.agentic.so'
|
||||||
|
|
||||||
JWT_SECRET=
|
JWT_SECRET=
|
||||||
|
|
||||||
|
@ -29,3 +30,11 @@ AGENTIC_ADMIN_API_KEY=
|
||||||
# Used to simplify recreating the demo `@agentic/search` project during
|
# Used to simplify recreating the demo `@agentic/search` project during
|
||||||
# development while we're frequently resetting the database
|
# development while we're frequently resetting the database
|
||||||
AGENTIC_SEARCH_PROXY_SECRET=
|
AGENTIC_SEARCH_PROXY_SECRET=
|
||||||
|
|
||||||
|
# s3 connection settings (compatible with cloudflare r2)
|
||||||
|
S3_BUCKET='agentic'
|
||||||
|
S3_REGION='auto'
|
||||||
|
# example: "https://<id>.r2.cloudflarestorage.com"
|
||||||
|
S3_ENDPOINT=
|
||||||
|
S3_ACCESS_KEY_ID=
|
||||||
|
S3_ACCESS_KEY_SECRET=
|
||||||
|
|
|
@ -25,7 +25,8 @@
|
||||||
"drizzle-kit:prod": "dotenvx run -o -f .env.production -- drizzle-kit",
|
"drizzle-kit:prod": "dotenvx run -o -f .env.production -- drizzle-kit",
|
||||||
"clean": "del dist",
|
"clean": "del dist",
|
||||||
"test": "run-s test:*",
|
"test": "run-s test:*",
|
||||||
"test:typecheck": "tsc --noEmit"
|
"test:typecheck": "tsc --noEmit",
|
||||||
|
"test:unit": "dotenvx run -- vitest run"
|
||||||
},
|
},
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@agentic/platform": "workspace:*",
|
"@agentic/platform": "workspace:*",
|
||||||
|
@ -34,6 +35,8 @@
|
||||||
"@agentic/platform-hono": "workspace:*",
|
"@agentic/platform-hono": "workspace:*",
|
||||||
"@agentic/platform-types": "workspace:*",
|
"@agentic/platform-types": "workspace:*",
|
||||||
"@agentic/platform-validators": "workspace:*",
|
"@agentic/platform-validators": "workspace:*",
|
||||||
|
"@aws-sdk/client-s3": "^3.840.0",
|
||||||
|
"@aws-sdk/s3-request-presigner": "^3.840.0",
|
||||||
"@dicebear/collection": "catalog:",
|
"@dicebear/collection": "catalog:",
|
||||||
"@dicebear/core": "catalog:",
|
"@dicebear/core": "catalog:",
|
||||||
"@fisch0920/drizzle-orm": "catalog:",
|
"@fisch0920/drizzle-orm": "catalog:",
|
||||||
|
|
|
@ -0,0 +1,39 @@
|
||||||
|
<p align="center">
|
||||||
|
<a href="https://agentic.so">
|
||||||
|
<img alt="Agentic" src="https://raw.githubusercontent.com/transitive-bullshit/agentic/main/apps/web/public/agentic-social-image-light.jpg" width="640">
|
||||||
|
</a>
|
||||||
|
</p>
|
||||||
|
|
||||||
|
<p>
|
||||||
|
<a href="https://github.com/transitive-bullshit/agentic/actions/workflows/main.yml"><img alt="Build Status" src="https://github.com/transitive-bullshit/agentic/actions/workflows/main.yml/badge.svg" /></a>
|
||||||
|
<a href="https://prettier.io"><img alt="Prettier Code Formatting" src="https://img.shields.io/badge/code_style-prettier-brightgreen.svg" /></a>
|
||||||
|
</p>
|
||||||
|
|
||||||
|
# Agentic API <!-- omit from toc -->
|
||||||
|
|
||||||
|
> Backend API for the Agentic platform.
|
||||||
|
|
||||||
|
- [Website](https://agentic.so)
|
||||||
|
- [Docs](https://docs.agentic.so)
|
||||||
|
|
||||||
|
## Dependencies
|
||||||
|
|
||||||
|
- **Postgres**
|
||||||
|
- `DATABASE_URL` - Postgres connection string
|
||||||
|
- [On macOS](https://wiki.postgresql.org/wiki/Homebrew): `brew install postgresql && brew services start postgresql`
|
||||||
|
- You'll need to run `pnpm drizzle-kit push` to set up your database schema
|
||||||
|
- **S3** - Required to use file attachments
|
||||||
|
- Any S3-compatible provider is supported, such as [Cloudflare R2](https://developers.cloudflare.com/r2/)
|
||||||
|
- Alterantively, you can use a local S3 server like [MinIO](https://github.com/minio/minio#homebrew-recommended) or [LocalStack](https://github.com/localstack/localstack)
|
||||||
|
- To run LocalStack on macOS: `brew install localstack/tap/localstack-cli && localstack start -d`
|
||||||
|
- To run MinIO macOS: `brew install minio/stable/minio && minio server /data`
|
||||||
|
- I recommend using Cloudflare R2, though – it's amazing and should be free for most use cases!
|
||||||
|
- `S3_BUCKET` - Required
|
||||||
|
- `S3_REGION` - Optional; defaults to `auto`
|
||||||
|
- `S3_ENDPOINT` - Required; example: `https://<id>.r2.cloudflarestorage.com`
|
||||||
|
- `ACCESS_KEY_ID` - Required ([cloudflare R2 docs](https://developers.cloudflare.com/r2/api/s3/tokens/))
|
||||||
|
- `SECRET_ACCESS_KEY` - Required ([cloudflare R2 docs](https://developers.cloudflare.com/r2/api/s3/tokens/))
|
||||||
|
|
||||||
|
## License
|
||||||
|
|
||||||
|
[GNU AGPL 3.0](https://choosealicense.com/licenses/agpl-3.0/)
|
|
@ -48,7 +48,8 @@ export function registerV1GitHubOAuthInitFlow(
|
||||||
|
|
||||||
const state = crypto.randomUUID()
|
const state = crypto.randomUUID()
|
||||||
|
|
||||||
// TODO: unique identifier
|
// TODO: unique identifier!
|
||||||
|
// TODO: THIS IS IMPORTANT!! if multiple users are authenticating with github concurrently, this will currently really mess things up...
|
||||||
await authStorage.set(['github', state, 'redirectUri'], { redirectUri })
|
await authStorage.set(['github', state, 'redirectUri'], { redirectUri })
|
||||||
|
|
||||||
const publicRedirectUri = `${env.apiBaseUrl}/v1/auth/github/callback`
|
const publicRedirectUri = `${env.apiBaseUrl}/v1/auth/github/callback`
|
||||||
|
|
|
@ -1,3 +1,3 @@
|
||||||
import { DrizzleAuthStorage } from '@/lib/drizzle-auth-storage'
|
import { DrizzleAuthStorage } from '@/lib/auth/drizzle-auth-storage'
|
||||||
|
|
||||||
export const authStorage = DrizzleAuthStorage()
|
export const authStorage = DrizzleAuthStorage()
|
||||||
|
|
|
@ -39,6 +39,7 @@ import { registerV1GetPublicProjectByIdentifier } from './projects/get-public-pr
|
||||||
import { registerV1ListProjects } from './projects/list-projects'
|
import { registerV1ListProjects } from './projects/list-projects'
|
||||||
import { registerV1ListPublicProjects } from './projects/list-public-projects'
|
import { registerV1ListPublicProjects } from './projects/list-public-projects'
|
||||||
import { registerV1UpdateProject } from './projects/update-project'
|
import { registerV1UpdateProject } from './projects/update-project'
|
||||||
|
import { registerV1GetSignedStorageUploadUrl } from './storage/get-signed-storage-upload-url'
|
||||||
import { registerV1CreateTeam } from './teams/create-team'
|
import { registerV1CreateTeam } from './teams/create-team'
|
||||||
import { registerV1DeleteTeam } from './teams/delete-team'
|
import { registerV1DeleteTeam } from './teams/delete-team'
|
||||||
import { registerV1GetTeam } from './teams/get-team'
|
import { registerV1GetTeam } from './teams/get-team'
|
||||||
|
@ -95,6 +96,9 @@ registerV1CreateTeamMember(privateRouter)
|
||||||
registerV1UpdateTeamMember(privateRouter)
|
registerV1UpdateTeamMember(privateRouter)
|
||||||
registerV1DeleteTeamMember(privateRouter)
|
registerV1DeleteTeamMember(privateRouter)
|
||||||
|
|
||||||
|
// Storage
|
||||||
|
registerV1GetSignedStorageUploadUrl(privateRouter)
|
||||||
|
|
||||||
// Public projects
|
// Public projects
|
||||||
registerV1ListPublicProjects(publicRouter)
|
registerV1ListPublicProjects(publicRouter)
|
||||||
registerV1GetPublicProjectByIdentifier(publicRouter) // must be before `registerV1GetPublicProject`
|
registerV1GetPublicProjectByIdentifier(publicRouter) // must be before `registerV1GetPublicProject`
|
||||||
|
|
|
@ -0,0 +1,86 @@
|
||||||
|
import { assert } from '@agentic/platform-core'
|
||||||
|
import { createRoute, type OpenAPIHono, z } from '@hono/zod-openapi'
|
||||||
|
|
||||||
|
import type { AuthenticatedHonoEnv } from '@/lib/types'
|
||||||
|
import { db, eq, projectIdentifierSchema, schema } from '@/db'
|
||||||
|
import { acl } from '@/lib/acl'
|
||||||
|
import {
|
||||||
|
openapiAuthenticatedSecuritySchemas,
|
||||||
|
openapiErrorResponse404,
|
||||||
|
openapiErrorResponses
|
||||||
|
} from '@/lib/openapi-utils'
|
||||||
|
import {
|
||||||
|
getStorageObjectPublicUrl,
|
||||||
|
getStorageSignedUploadUrl
|
||||||
|
} from '@/lib/storage'
|
||||||
|
|
||||||
|
export const getSignedUploadUrlQuerySchema = z.object({
|
||||||
|
projectIdentifier: projectIdentifierSchema,
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Should be a hash of the contents of the file to upload with the correct
|
||||||
|
* file extension.
|
||||||
|
*
|
||||||
|
* @example `9f86d081884c7d659a2feaa0c55ad015a.png`
|
||||||
|
*/
|
||||||
|
key: z
|
||||||
|
.string()
|
||||||
|
.nonempty()
|
||||||
|
.describe(
|
||||||
|
'Should be a hash of the contents of the file to upload with the correct file extension (eg, "9f86d081884c7d659a2feaa0c55ad015a.png").'
|
||||||
|
)
|
||||||
|
})
|
||||||
|
|
||||||
|
const route = createRoute({
|
||||||
|
description:
|
||||||
|
"Gets a signed URL for uploading a file to Agentic's blob storage. Files are namespaced to a given project and are identified by a key that should be a hash of the file's contents, with the correct file extension.",
|
||||||
|
tags: ['storage'],
|
||||||
|
operationId: 'getSignedStorageUploadUrl',
|
||||||
|
method: 'get',
|
||||||
|
path: 'storage/signed-upload-url',
|
||||||
|
security: openapiAuthenticatedSecuritySchemas,
|
||||||
|
request: {
|
||||||
|
query: getSignedUploadUrlQuerySchema
|
||||||
|
},
|
||||||
|
responses: {
|
||||||
|
200: {
|
||||||
|
description: 'A signed upload URL',
|
||||||
|
content: {
|
||||||
|
'application/json': {
|
||||||
|
schema: z.object({
|
||||||
|
signedUploadUrl: z
|
||||||
|
.string()
|
||||||
|
.url()
|
||||||
|
.describe('The signed upload URL.'),
|
||||||
|
publicObjectUrl: z
|
||||||
|
.string()
|
||||||
|
.url()
|
||||||
|
.describe('The public URL the object will have once uploaded.')
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
...openapiErrorResponses,
|
||||||
|
...openapiErrorResponse404
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
|
export function registerV1GetSignedStorageUploadUrl(
|
||||||
|
app: OpenAPIHono<AuthenticatedHonoEnv>
|
||||||
|
) {
|
||||||
|
return app.openapi(route, async (c) => {
|
||||||
|
const { projectIdentifier, key } = c.req.valid('query')
|
||||||
|
|
||||||
|
const project = await db.query.projects.findFirst({
|
||||||
|
where: eq(schema.projects.identifier, projectIdentifier)
|
||||||
|
})
|
||||||
|
assert(project, 404, `Project not found "${projectIdentifier}"`)
|
||||||
|
await acl(c, project, { label: 'Project' })
|
||||||
|
|
||||||
|
const compoundKey = `${project.identifier}/${key}`
|
||||||
|
const signedUploadUrl = await getStorageSignedUploadUrl(compoundKey)
|
||||||
|
const publicObjectUrl = getStorageObjectPublicUrl(compoundKey)
|
||||||
|
|
||||||
|
return c.json({ signedUploadUrl, publicObjectUrl })
|
||||||
|
})
|
||||||
|
}
|
|
@ -0,0 +1,46 @@
|
||||||
|
export interface AuthStorageAdapter {
|
||||||
|
get(key: string[]): Promise<Record<string, any> | undefined>
|
||||||
|
remove(key: string[]): Promise<void>
|
||||||
|
set(key: string[], value: any, expiry?: Date): Promise<void>
|
||||||
|
scan(prefix: string[]): AsyncIterable<[string[], any]>
|
||||||
|
}
|
||||||
|
|
||||||
|
const SEPERATOR = String.fromCodePoint(0x1f)
|
||||||
|
|
||||||
|
export function joinKey(key: string[]) {
|
||||||
|
return key.join(SEPERATOR)
|
||||||
|
}
|
||||||
|
|
||||||
|
export function splitKey(key: string) {
|
||||||
|
return key.split(SEPERATOR)
|
||||||
|
}
|
||||||
|
|
||||||
|
export namespace AuthStorage {
|
||||||
|
function encode(key: string[]) {
|
||||||
|
return key.map((k) => k.replaceAll(SEPERATOR, ''))
|
||||||
|
}
|
||||||
|
export function get<T>(adapter: AuthStorageAdapter, key: string[]) {
|
||||||
|
return adapter.get(encode(key)) as Promise<T | null>
|
||||||
|
}
|
||||||
|
|
||||||
|
export function set(
|
||||||
|
adapter: AuthStorageAdapter,
|
||||||
|
key: string[],
|
||||||
|
value: any,
|
||||||
|
ttl?: number
|
||||||
|
) {
|
||||||
|
const expiry = ttl ? new Date(Date.now() + ttl * 1000) : undefined
|
||||||
|
return adapter.set(encode(key), value, expiry)
|
||||||
|
}
|
||||||
|
|
||||||
|
export function remove(adapter: AuthStorageAdapter, key: string[]) {
|
||||||
|
return adapter.remove(encode(key))
|
||||||
|
}
|
||||||
|
|
||||||
|
export function scan<T>(
|
||||||
|
adapter: AuthStorageAdapter,
|
||||||
|
key: string[]
|
||||||
|
): AsyncIterable<[string[], T]> {
|
||||||
|
return adapter.scan(encode(key))
|
||||||
|
}
|
||||||
|
}
|
|
@ -1,7 +1,8 @@
|
||||||
import { and, db, eq, gt, isNull, like, or, schema } from '@/db'
|
import { and, db, eq, gt, isNull, like, or, schema } from '@/db'
|
||||||
import { joinKey, splitKey, type StorageAdapter } from '@/lib/storage'
|
|
||||||
|
|
||||||
export function DrizzleAuthStorage(): StorageAdapter {
|
import { type AuthStorageAdapter, joinKey, splitKey } from './auth-storage'
|
||||||
|
|
||||||
|
export function DrizzleAuthStorage(): AuthStorageAdapter {
|
||||||
return {
|
return {
|
||||||
async get(key: string[]) {
|
async get(key: string[]) {
|
||||||
const id = joinKey(key)
|
const id = joinKey(key)
|
|
@ -12,6 +12,11 @@ export const envSchema = baseEnvSchema
|
||||||
|
|
||||||
AGENTIC_WEB_BASE_URL: z.string().url(),
|
AGENTIC_WEB_BASE_URL: z.string().url(),
|
||||||
AGENTIC_GATEWAY_BASE_URL: z.string().url(),
|
AGENTIC_GATEWAY_BASE_URL: z.string().url(),
|
||||||
|
AGENTIC_STORAGE_BASE_URL: z
|
||||||
|
.string()
|
||||||
|
.url()
|
||||||
|
.optional()
|
||||||
|
.default('https://storage.agentic.so'),
|
||||||
|
|
||||||
JWT_SECRET: z.string().nonempty(),
|
JWT_SECRET: z.string().nonempty(),
|
||||||
|
|
||||||
|
@ -30,7 +35,13 @@ export const envSchema = baseEnvSchema
|
||||||
|
|
||||||
// Used to simplify recreating the demo `@agentic/search` project during
|
// Used to simplify recreating the demo `@agentic/search` project during
|
||||||
// development while we're frequently resetting the database
|
// development while we're frequently resetting the database
|
||||||
AGENTIC_SEARCH_PROXY_SECRET: z.string().nonempty()
|
AGENTIC_SEARCH_PROXY_SECRET: z.string().nonempty(),
|
||||||
|
|
||||||
|
S3_BUCKET: z.string().nonempty().optional().default('agentic'),
|
||||||
|
S3_REGION: z.string().nonempty().optional().default('auto'),
|
||||||
|
S3_ENDPOINT: z.string().nonempty().url(),
|
||||||
|
S3_ACCESS_KEY_ID: z.string().nonempty(),
|
||||||
|
S3_ACCESS_KEY_SECRET: z.string().nonempty()
|
||||||
})
|
})
|
||||||
.strip()
|
.strip()
|
||||||
export type RawEnv = z.infer<typeof envSchema>
|
export type RawEnv = z.infer<typeof envSchema>
|
||||||
|
|
|
@ -0,0 +1,30 @@
|
||||||
|
import { describe, expect, it } from 'vitest'
|
||||||
|
|
||||||
|
import {
|
||||||
|
deleteStorageObject,
|
||||||
|
getStorageObject,
|
||||||
|
putStorageObject
|
||||||
|
} from './storage'
|
||||||
|
|
||||||
|
describe('Storage', () => {
|
||||||
|
it('putObject, getObject, deleteObject', async () => {
|
||||||
|
if (!process.env.ACCESS_KEY_ID) {
|
||||||
|
// TODO: ignore on CI
|
||||||
|
expect(true).toEqual(true)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
await putStorageObject('test.txt', 'hello world', {
|
||||||
|
ContentType: 'text/plain'
|
||||||
|
})
|
||||||
|
|
||||||
|
const obj = await getStorageObject('test.txt')
|
||||||
|
expect(obj.ContentType).toEqual('text/plain')
|
||||||
|
|
||||||
|
const body = await obj.Body?.transformToString()
|
||||||
|
expect(body).toEqual('hello world')
|
||||||
|
|
||||||
|
const res = await deleteStorageObject('test.txt')
|
||||||
|
expect(res.$metadata.httpStatusCode).toEqual(204)
|
||||||
|
})
|
||||||
|
})
|
|
@ -1,46 +1,91 @@
|
||||||
export interface StorageAdapter {
|
import {
|
||||||
get(key: string[]): Promise<Record<string, any> | undefined>
|
DeleteObjectCommand,
|
||||||
remove(key: string[]): Promise<void>
|
type DeleteObjectCommandInput,
|
||||||
set(key: string[], value: any, expiry?: Date): Promise<void>
|
GetObjectCommand,
|
||||||
scan(prefix: string[]): AsyncIterable<[string[], any]>
|
type GetObjectCommandInput,
|
||||||
|
PutObjectCommand,
|
||||||
|
type PutObjectCommandInput,
|
||||||
|
S3Client
|
||||||
|
} from '@aws-sdk/client-s3'
|
||||||
|
import { getSignedUrl } from '@aws-sdk/s3-request-presigner'
|
||||||
|
|
||||||
|
import { env } from './env'
|
||||||
|
|
||||||
|
// This storage client is designed to work with any S3-compatible storage provider.
|
||||||
|
// For Cloudflare R2, see https://developers.cloudflare.com/r2/examples/aws/aws-sdk-js-v3/
|
||||||
|
|
||||||
|
const Bucket = env.S3_BUCKET
|
||||||
|
|
||||||
|
export const storageClient = new S3Client({
|
||||||
|
region: env.S3_REGION,
|
||||||
|
endpoint: env.S3_ENDPOINT,
|
||||||
|
credentials: {
|
||||||
|
accessKeyId: env.S3_ACCESS_KEY_ID,
|
||||||
|
secretAccessKey: env.S3_ACCESS_KEY_SECRET
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
|
// This ensures that buckets are created automatically if they don't exist on
|
||||||
|
// Cloudflare R2. It won't affect other providers.
|
||||||
|
// @see https://developers.cloudflare.com/r2/examples/aws/custom-header/
|
||||||
|
storageClient.middlewareStack.add(
|
||||||
|
(next, _) => async (args) => {
|
||||||
|
const r = args.request as RequestInit
|
||||||
|
r.headers = {
|
||||||
|
'cf-create-bucket-if-missing': 'true',
|
||||||
|
...r.headers
|
||||||
|
}
|
||||||
|
|
||||||
|
return next(args)
|
||||||
|
},
|
||||||
|
{ step: 'build', name: 'customHeaders' }
|
||||||
|
)
|
||||||
|
|
||||||
|
export async function getStorageObject(
|
||||||
|
key: string,
|
||||||
|
opts?: Omit<GetObjectCommandInput, 'Bucket' | 'Key'>
|
||||||
|
) {
|
||||||
|
return storageClient.send(new GetObjectCommand({ Bucket, Key: key, ...opts }))
|
||||||
}
|
}
|
||||||
|
|
||||||
const SEPERATOR = String.fromCodePoint(0x1f)
|
export async function putStorageObject(
|
||||||
|
key: string,
|
||||||
export function joinKey(key: string[]) {
|
value: PutObjectCommandInput['Body'],
|
||||||
return key.join(SEPERATOR)
|
opts?: Omit<PutObjectCommandInput, 'Bucket' | 'Key' | 'Body'>
|
||||||
|
) {
|
||||||
|
return storageClient.send(
|
||||||
|
new PutObjectCommand({ Bucket, Key: key, Body: value, ...opts })
|
||||||
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
export function splitKey(key: string) {
|
export async function deleteStorageObject(
|
||||||
return key.split(SEPERATOR)
|
key: string,
|
||||||
|
opts?: Omit<DeleteObjectCommandInput, 'Bucket' | 'Key'>
|
||||||
|
) {
|
||||||
|
return storageClient.send(
|
||||||
|
new DeleteObjectCommand({ Bucket, Key: key, ...opts })
|
||||||
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
export namespace Storage {
|
export function getStorageObjectInternalUrl(key: string) {
|
||||||
function encode(key: string[]) {
|
return `${env.AGENTIC_STORAGE_BASE_URL}/${Bucket}/${key}`
|
||||||
return key.map((k) => k.replaceAll(SEPERATOR, ''))
|
}
|
||||||
}
|
|
||||||
export function get<T>(adapter: StorageAdapter, key: string[]) {
|
export function getStorageObjectPublicUrl(key: string) {
|
||||||
return adapter.get(encode(key)) as Promise<T | null>
|
return `${env.AGENTIC_STORAGE_BASE_URL}/${key}`
|
||||||
}
|
}
|
||||||
|
|
||||||
export function set(
|
export async function getStorageSignedUploadUrl(
|
||||||
adapter: StorageAdapter,
|
key: string,
|
||||||
key: string[],
|
{
|
||||||
value: any,
|
expiresIn = 5 * 60 * 1000 // 5 minutes
|
||||||
ttl?: number
|
}: {
|
||||||
) {
|
expiresIn?: number
|
||||||
const expiry = ttl ? new Date(Date.now() + ttl * 1000) : undefined
|
} = {}
|
||||||
return adapter.set(encode(key), value, expiry)
|
) {
|
||||||
}
|
return getSignedUrl(
|
||||||
|
storageClient,
|
||||||
export function remove(adapter: StorageAdapter, key: string[]) {
|
new PutObjectCommand({ Bucket, Key: key }),
|
||||||
return adapter.remove(encode(key))
|
{ expiresIn }
|
||||||
}
|
)
|
||||||
|
|
||||||
export function scan<T>(
|
|
||||||
adapter: StorageAdapter,
|
|
||||||
key: string[]
|
|
||||||
): AsyncIterable<[string[], T]> {
|
|
||||||
return adapter.scan(encode(key))
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,145 +0,0 @@
|
||||||
import { issuer } from '@agentic/openauth'
|
|
||||||
import { GithubProvider } from '@agentic/openauth/provider/github'
|
|
||||||
import { PasswordProvider } from '@agentic/openauth/provider/password'
|
|
||||||
import { assert, pick } from '@agentic/platform-core'
|
|
||||||
import { isValidPassword } from '@agentic/platform-validators'
|
|
||||||
|
|
||||||
import { type RawUser } from '@/db'
|
|
||||||
import { subjects } from '@/lib/auth/subjects'
|
|
||||||
import { upsertOrLinkUserAccount } from '@/lib/auth/upsert-or-link-user-account'
|
|
||||||
import { DrizzleAuthStorage } from '@/lib/drizzle-auth-storage'
|
|
||||||
import { env } from '@/lib/env'
|
|
||||||
import { getGitHubClient } from '@/lib/external/github'
|
|
||||||
|
|
||||||
import { resend } from './lib/external/resend'
|
|
||||||
|
|
||||||
// Initialize OpenAuth issuer which is a Hono app for all auth routes.
|
|
||||||
// TODO: fix this type...
|
|
||||||
export const authRouter: any = issuer({
|
|
||||||
subjects,
|
|
||||||
storage: DrizzleAuthStorage(),
|
|
||||||
ttl: {
|
|
||||||
access: 60 * 60 * 24 * 30, // 30 days
|
|
||||||
refresh: 60 * 60 * 24 * 365 // 1 year
|
|
||||||
// Used for creating longer-lived tokens for testing
|
|
||||||
// access: 60 * 60 * 24 * 366, // 1 year
|
|
||||||
// refresh: 60 * 60 * 24 * 365 * 5 // 5 years
|
|
||||||
},
|
|
||||||
providers: {
|
|
||||||
github: GithubProvider({
|
|
||||||
clientID: env.GITHUB_CLIENT_ID,
|
|
||||||
clientSecret: env.GITHUB_CLIENT_SECRET,
|
|
||||||
scopes: ['user:email']
|
|
||||||
}),
|
|
||||||
password: PasswordProvider({
|
|
||||||
loginUrl: async () => `${env.WEB_AUTH_BASE_URL}/login`,
|
|
||||||
registerUrl: async () => `${env.WEB_AUTH_BASE_URL}/signup`,
|
|
||||||
changeUrl: async () => `${env.WEB_AUTH_BASE_URL}/forgot-password`,
|
|
||||||
sendCode: async (email, code) => {
|
|
||||||
// eslint-disable-next-line no-console
|
|
||||||
console.log('sending verify code email', { email, code })
|
|
||||||
|
|
||||||
await resend.sendVerifyCodeEmail({ code, to: email })
|
|
||||||
},
|
|
||||||
validatePassword: (password) => {
|
|
||||||
if (password.length < 3) {
|
|
||||||
return 'Password must be at least 3 characters'
|
|
||||||
}
|
|
||||||
|
|
||||||
if (password.length > 1024) {
|
|
||||||
return 'Password must be less than 1024 characters'
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!isValidPassword(password)) {
|
|
||||||
return 'Invalid password'
|
|
||||||
}
|
|
||||||
|
|
||||||
return undefined
|
|
||||||
}
|
|
||||||
})
|
|
||||||
},
|
|
||||||
success: async (ctx, value) => {
|
|
||||||
const { provider } = value
|
|
||||||
let user: RawUser | undefined
|
|
||||||
|
|
||||||
// eslint-disable-next-line no-console
|
|
||||||
console.log('Auth success', provider, JSON.stringify(value, null, 2))
|
|
||||||
|
|
||||||
function getPartialOAuthAccount() {
|
|
||||||
assert(provider === 'github', `Unsupported OAuth provider "${provider}"`)
|
|
||||||
const now = Date.now()
|
|
||||||
|
|
||||||
return {
|
|
||||||
provider,
|
|
||||||
accessToken: value.tokenset.access,
|
|
||||||
refreshToken: value.tokenset.refresh,
|
|
||||||
// `expires_in` and `refresh_token_expires_in` are given in seconds
|
|
||||||
accessTokenExpiresAt: new Date(
|
|
||||||
now + value.tokenset.raw.expires_in * 1000
|
|
||||||
),
|
|
||||||
refreshTokenExpiresAt: new Date(
|
|
||||||
now + value.tokenset.raw.refresh_token_expires_in * 1000
|
|
||||||
),
|
|
||||||
scope: (value.tokenset.raw.scope as string) || undefined
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if (provider === 'github') {
|
|
||||||
const client = getGitHubClient({ accessToken: value.tokenset.access })
|
|
||||||
const { data: ghUser } = await client.rest.users.getAuthenticated()
|
|
||||||
|
|
||||||
if (!ghUser.email) {
|
|
||||||
const { data: emails } = await client.request('GET /user/emails')
|
|
||||||
const primary = emails.find((e) => e.primary)
|
|
||||||
const verified = emails.find((e) => e.verified)
|
|
||||||
const fallback = emails.find((e) => e.email)
|
|
||||||
const email = primary?.email || verified?.email || fallback?.email
|
|
||||||
ghUser.email = email!
|
|
||||||
}
|
|
||||||
|
|
||||||
assert(
|
|
||||||
ghUser.email,
|
|
||||||
'Error authenticating with GitHub: user email is required.'
|
|
||||||
)
|
|
||||||
|
|
||||||
user = await upsertOrLinkUserAccount({
|
|
||||||
partialAccount: {
|
|
||||||
accountId: `${ghUser.id}`,
|
|
||||||
accountUsername: ghUser.login.toLowerCase(),
|
|
||||||
...getPartialOAuthAccount()
|
|
||||||
},
|
|
||||||
partialUser: {
|
|
||||||
email: ghUser.email,
|
|
||||||
isEmailVerified: true,
|
|
||||||
name: ghUser.name || undefined,
|
|
||||||
username: ghUser.login.toLowerCase(),
|
|
||||||
image: ghUser.avatar_url
|
|
||||||
}
|
|
||||||
})
|
|
||||||
} else if (provider === 'password') {
|
|
||||||
user = await upsertOrLinkUserAccount({
|
|
||||||
partialAccount: {
|
|
||||||
provider,
|
|
||||||
accountId: value.email
|
|
||||||
},
|
|
||||||
partialUser: {
|
|
||||||
email: value.email,
|
|
||||||
isEmailVerified: true
|
|
||||||
}
|
|
||||||
})
|
|
||||||
} else {
|
|
||||||
assert(
|
|
||||||
user,
|
|
||||||
400,
|
|
||||||
`Authentication error: unsupported auth provider "${provider}"`
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
assert(
|
|
||||||
user,
|
|
||||||
500,
|
|
||||||
`Authentication error for auth provider "${provider}": Unexpected error initializing user`
|
|
||||||
)
|
|
||||||
return ctx.subject('user', pick(user, 'id', 'username'))
|
|
||||||
}
|
|
||||||
})
|
|
|
@ -29,6 +29,7 @@
|
||||||
"@agentic/platform-core": "workspace:*",
|
"@agentic/platform-core": "workspace:*",
|
||||||
"@agentic/platform-types": "workspace:*",
|
"@agentic/platform-types": "workspace:*",
|
||||||
"@standard-schema/spec": "catalog:",
|
"@standard-schema/spec": "catalog:",
|
||||||
|
"file-type": "^21.0.0",
|
||||||
"ky": "catalog:",
|
"ky": "catalog:",
|
||||||
"type-fest": "catalog:"
|
"type-fest": "catalog:"
|
||||||
},
|
},
|
||||||
|
|
|
@ -11,7 +11,8 @@ import type {
|
||||||
User
|
User
|
||||||
} from '@agentic/platform-types'
|
} from '@agentic/platform-types'
|
||||||
import type { Simplify } from 'type-fest'
|
import type { Simplify } from 'type-fest'
|
||||||
import { assert, sanitizeSearchParams } from '@agentic/platform-core'
|
import { assert, sanitizeSearchParams, sha256 } from '@agentic/platform-core'
|
||||||
|
import { fileTypeFromBuffer } from 'file-type'
|
||||||
import defaultKy, { type KyInstance } from 'ky'
|
import defaultKy, { type KyInstance } from 'ky'
|
||||||
|
|
||||||
import type { OnUpdateAuthSessionFunction } from './types'
|
import type { OnUpdateAuthSessionFunction } from './types'
|
||||||
|
@ -336,6 +337,103 @@ export class AgenticApiClient {
|
||||||
.json()
|
.json()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Gets a signed URL for uploading a file to Agentic's blob storage.
|
||||||
|
*
|
||||||
|
* Files are namespaced to a given project and are identified by a key that
|
||||||
|
* should be a hash of the file's contents, with the correct file extension.
|
||||||
|
*
|
||||||
|
* @example
|
||||||
|
* ```ts
|
||||||
|
* const { signedUploadUrl, publicObjectUrl } = await client.getSignedStorageUploadUrl({
|
||||||
|
* projectIdentifier: '@username/my-project',
|
||||||
|
* key: '9f86d081884c7d659a2feaa0c55ad015a.png'
|
||||||
|
* })
|
||||||
|
* ```
|
||||||
|
*/
|
||||||
|
async getSignedStorageUploadUrl(
|
||||||
|
searchParams: OperationParameters<'getSignedStorageUploadUrl'>
|
||||||
|
): Promise<{
|
||||||
|
/** The signed upload URL. */
|
||||||
|
signedUploadUrl: string
|
||||||
|
|
||||||
|
/** The public URL the object will have once uploaded. */
|
||||||
|
publicObjectUrl: string
|
||||||
|
}> {
|
||||||
|
return this.ky
|
||||||
|
.get(`v1/storage/signed-upload-url`, {
|
||||||
|
searchParams: sanitizeSearchParams(searchParams)
|
||||||
|
})
|
||||||
|
.json()
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Uploads a file to Agentic's blob storage for a given project.
|
||||||
|
*
|
||||||
|
* @example
|
||||||
|
* ```ts
|
||||||
|
* const publicObjectUrl = await client.uploadFileToStorage(
|
||||||
|
* new URL('https://example.com/image.png'),
|
||||||
|
* { projectIdentifier: '@username/my-project' }
|
||||||
|
* )
|
||||||
|
* ```
|
||||||
|
*/
|
||||||
|
async uploadFileToStorage(
|
||||||
|
source: string | ArrayBuffer | URL,
|
||||||
|
{
|
||||||
|
projectIdentifier
|
||||||
|
}: {
|
||||||
|
projectIdentifier: string
|
||||||
|
}
|
||||||
|
): Promise<string> {
|
||||||
|
let sourceBuffer: ArrayBuffer
|
||||||
|
|
||||||
|
if (typeof source === 'string') {
|
||||||
|
try {
|
||||||
|
source = new URL(source)
|
||||||
|
} catch {
|
||||||
|
// Not a URL
|
||||||
|
throw new Error(`Invalid source file URL: ${source}`)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (source instanceof URL) {
|
||||||
|
sourceBuffer = await defaultKy.get(source).arrayBuffer()
|
||||||
|
} else if (source instanceof ArrayBuffer) {
|
||||||
|
sourceBuffer = source
|
||||||
|
} else {
|
||||||
|
throw new Error(`Invalid source file: ${source}`)
|
||||||
|
}
|
||||||
|
|
||||||
|
const [hash, fileType] = await Promise.all([
|
||||||
|
sha256(sourceBuffer),
|
||||||
|
fileTypeFromBuffer(sourceBuffer)
|
||||||
|
])
|
||||||
|
|
||||||
|
const key = fileType ? `${hash}.${fileType.ext}` : hash
|
||||||
|
|
||||||
|
const { signedUploadUrl, publicObjectUrl } =
|
||||||
|
await this.getSignedStorageUploadUrl({
|
||||||
|
projectIdentifier,
|
||||||
|
key
|
||||||
|
})
|
||||||
|
|
||||||
|
try {
|
||||||
|
// Check if the object already exists.
|
||||||
|
await defaultKy.head(publicObjectUrl)
|
||||||
|
} catch {
|
||||||
|
// Object doesn't exist yet, so upload it.
|
||||||
|
await defaultKy.post(signedUploadUrl, {
|
||||||
|
body: sourceBuffer,
|
||||||
|
headers: {
|
||||||
|
'Content-Type': fileType?.mime ?? 'application/octet-stream'
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
return publicObjectUrl
|
||||||
|
}
|
||||||
|
|
||||||
/** Lists projects that have been published publicly to the marketplace. */
|
/** Lists projects that have been published publicly to the marketplace. */
|
||||||
async listPublicProjects<
|
async listPublicProjects<
|
||||||
TPopulate extends NonNullable<
|
TPopulate extends NonNullable<
|
||||||
|
|
|
@ -247,6 +247,23 @@ export interface paths {
|
||||||
patch?: never;
|
patch?: never;
|
||||||
trace?: never;
|
trace?: never;
|
||||||
};
|
};
|
||||||
|
"/v1/storage/signed-upload-url": {
|
||||||
|
parameters: {
|
||||||
|
query?: never;
|
||||||
|
header?: never;
|
||||||
|
path?: never;
|
||||||
|
cookie?: never;
|
||||||
|
};
|
||||||
|
/** @description Gets a signed URL for uploading a file to Agentic's blob storage. Files are namespaced to a given project and are identified by a key that should be a hash of the file's contents, with the correct file extension. */
|
||||||
|
get: operations["getSignedStorageUploadUrl"];
|
||||||
|
put?: never;
|
||||||
|
post?: never;
|
||||||
|
delete?: never;
|
||||||
|
options?: never;
|
||||||
|
head?: never;
|
||||||
|
patch?: never;
|
||||||
|
trace?: never;
|
||||||
|
};
|
||||||
"/v1/projects": {
|
"/v1/projects": {
|
||||||
parameters: {
|
parameters: {
|
||||||
query?: never;
|
query?: never;
|
||||||
|
@ -1811,6 +1828,46 @@ export interface operations {
|
||||||
404: components["responses"]["404"];
|
404: components["responses"]["404"];
|
||||||
};
|
};
|
||||||
};
|
};
|
||||||
|
getSignedStorageUploadUrl: {
|
||||||
|
parameters: {
|
||||||
|
query: {
|
||||||
|
/** @description Public project identifier (e.g. "@namespace/project-slug") */
|
||||||
|
projectIdentifier: components["schemas"]["ProjectIdentifier"];
|
||||||
|
/** @description Should be a hash of the contents of the file to upload with the correct file extension (eg, "9f86d081884c7d659a2feaa0c55ad015a.png"). */
|
||||||
|
key: string;
|
||||||
|
};
|
||||||
|
header?: never;
|
||||||
|
path?: never;
|
||||||
|
cookie?: never;
|
||||||
|
};
|
||||||
|
requestBody?: never;
|
||||||
|
responses: {
|
||||||
|
/** @description A signed upload URL */
|
||||||
|
200: {
|
||||||
|
headers: {
|
||||||
|
[name: string]: unknown;
|
||||||
|
};
|
||||||
|
content: {
|
||||||
|
"application/json": {
|
||||||
|
/**
|
||||||
|
* Format: uri
|
||||||
|
* @description The signed upload URL.
|
||||||
|
*/
|
||||||
|
signedUploadUrl: string;
|
||||||
|
/**
|
||||||
|
* Format: uri
|
||||||
|
* @description The public URL the object will have once uploaded.
|
||||||
|
*/
|
||||||
|
publicObjectUrl: string;
|
||||||
|
};
|
||||||
|
};
|
||||||
|
};
|
||||||
|
400: components["responses"]["400"];
|
||||||
|
401: components["responses"]["401"];
|
||||||
|
403: components["responses"]["403"];
|
||||||
|
404: components["responses"]["404"];
|
||||||
|
};
|
||||||
|
};
|
||||||
listProjects: {
|
listProjects: {
|
||||||
parameters: {
|
parameters: {
|
||||||
query?: {
|
query?: {
|
||||||
|
|
703
pnpm-lock.yaml
703
pnpm-lock.yaml
Plik diff jest za duży
Load Diff
Ładowanie…
Reference in New Issue