kopia lustrzana https://github.com/transitive-bullshit/chatgpt-api
pull/715/head
rodzic
f2c4d2442e
commit
f539c7ed38
|
@ -6,16 +6,11 @@
|
|||
# ------------------------------------------------------------------------------
|
||||
|
||||
DATABASE_URL=
|
||||
|
||||
JWT_SECRET=
|
||||
|
||||
SENTRY_DSN=
|
||||
|
||||
GCP_PROJECT_ID=
|
||||
GCP_LOG_NAME='local-dev'
|
||||
METADATA_SERVER_DETECTION='none'
|
||||
|
||||
STRIPE_PUBLISHABLE_KEY=
|
||||
STRIPE_SECRET_KEY=
|
||||
|
||||
WORKOS_CLIENT_ID=
|
||||
WORKOS_API_KEY=
|
||||
WORKOS_SESSION_SECRET=
|
||||
STRIPE_WEBHOOK_SECRET=
|
||||
|
|
|
@ -39,7 +39,6 @@
|
|||
"@agentic/validators": "workspace:*",
|
||||
"@fisch0920/drizzle-orm": "^0.43.7",
|
||||
"@fisch0920/drizzle-zod": "^0.7.9",
|
||||
"@google-cloud/logging": "^11.2.0",
|
||||
"@hono/node-server": "^1.14.1",
|
||||
"@hono/sentry": "^1.2.1",
|
||||
"@hono/zod-openapi": "^0.19.6",
|
||||
|
@ -52,8 +51,6 @@
|
|||
"hono": "^4.7.9",
|
||||
"jsonwebtoken": "^9.0.2",
|
||||
"p-all": "^5.0.0",
|
||||
"pino": "^9.6.0",
|
||||
"pino-abstract-transport": "^2.0.0",
|
||||
"postgres": "^3.4.5",
|
||||
"restore-cursor": "catalog:",
|
||||
"stripe": "^18.1.0",
|
||||
|
|
|
@ -131,20 +131,21 @@ export function registerV1ConsumersUpsertConsumer(
|
|||
// consumer._stripeAccount = project._stripeAccount
|
||||
await upsertStripeConnectCustomer({ stripeCustomer, consumer, project })
|
||||
|
||||
console.log('SUBSCRIPTION', existing ? 'UPDATE' : 'CREATE', {
|
||||
const logger = c.get('logger')
|
||||
logger.info('SUBSCRIPTION', existing ? 'UPDATE' : 'CREATE', {
|
||||
project,
|
||||
deployment,
|
||||
consumer
|
||||
})
|
||||
|
||||
const { subscription, consumer: updatedConsumer } =
|
||||
await upsertStripeSubscription({
|
||||
await upsertStripeSubscription(c, {
|
||||
consumer,
|
||||
user,
|
||||
project,
|
||||
deployment
|
||||
})
|
||||
console.log({ subscription })
|
||||
logger.info('subscription', subscription)
|
||||
|
||||
return c.json(parseZodSchema(schema.consumerSelectSchema, updatedConsumer))
|
||||
})
|
||||
|
|
|
@ -3,7 +3,7 @@ import { createRoute, type OpenAPIHono } from '@hono/zod-openapi'
|
|||
import type { AuthenticatedEnv } from '@/lib/types'
|
||||
import { db, schema } from '@/db'
|
||||
import { aclTeamMember } from '@/lib/acl-team-member'
|
||||
import { getProviderToken } from '@/lib/auth/get-provider-token'
|
||||
import { createProviderToken } from '@/lib/auth/create-provider-token'
|
||||
import { ensureAuthUser } from '@/lib/ensure-auth-user'
|
||||
import {
|
||||
openapiAuthenticatedSecuritySchemas,
|
||||
|
@ -64,7 +64,7 @@ export function registerV1ProjectsCreateProject(
|
|||
userId: user.id,
|
||||
id,
|
||||
_secret: sha256(),
|
||||
_providerToken: getProviderToken({ id })
|
||||
_providerToken: createProviderToken({ id })
|
||||
})
|
||||
.returning()
|
||||
assert(project, 500, `Failed to create project "${body.name}"`)
|
||||
|
|
|
@ -2,7 +2,7 @@ import jwt from 'jsonwebtoken'
|
|||
|
||||
import { env } from '@/lib/env'
|
||||
|
||||
export function getProviderToken(project: { id: string }) {
|
||||
export function createProviderToken(project: { id: string }) {
|
||||
// TODO: Possibly in the future store stripe account ID as well and require
|
||||
// provider tokens to refresh after account changes?
|
||||
return jwt.sign({ projectId: project.id }, env.JWT_SECRET)
|
|
@ -13,20 +13,26 @@ import {
|
|||
import { stripe } from '@/lib/stripe'
|
||||
import { assert } from '@/lib/utils'
|
||||
|
||||
export async function upsertStripeSubscription({
|
||||
consumer,
|
||||
user,
|
||||
deployment,
|
||||
project
|
||||
}: {
|
||||
consumer: RawConsumer
|
||||
user: RawUser
|
||||
deployment: RawDeployment
|
||||
project: RawProject
|
||||
}): Promise<{
|
||||
import type { AuthenticatedContext } from '../types'
|
||||
|
||||
export async function upsertStripeSubscription(
|
||||
ctx: AuthenticatedContext,
|
||||
{
|
||||
consumer,
|
||||
user,
|
||||
deployment,
|
||||
project
|
||||
}: {
|
||||
consumer: RawConsumer
|
||||
user: RawUser
|
||||
deployment: RawDeployment
|
||||
project: RawProject
|
||||
}
|
||||
): Promise<{
|
||||
subscription: Stripe.Subscription
|
||||
consumer: RawConsumer
|
||||
}> {
|
||||
const logger = ctx.get('logger')
|
||||
const stripeConnectParams = project._stripeAccountId
|
||||
? [
|
||||
{
|
||||
|
@ -61,9 +67,9 @@ export async function upsertStripeSubscription({
|
|||
...stripeConnectParams
|
||||
)
|
||||
const existingItems = existing.items.data
|
||||
console.log()
|
||||
console.log('existing subscription', JSON.stringify(existing, null, 2))
|
||||
console.log()
|
||||
logger.debug()
|
||||
logger.debug('existing subscription', JSON.stringify(existing, null, 2))
|
||||
logger.debug()
|
||||
|
||||
const update: Stripe.SubscriptionUpdateParams = {}
|
||||
|
||||
|
@ -87,7 +93,7 @@ export async function upsertStripeSubscription({
|
|||
|
||||
for (const metric of pricingPlan.metrics) {
|
||||
const { slug: metricSlug } = metric
|
||||
console.log({
|
||||
logger.debug({
|
||||
metricSlug,
|
||||
plan: pricingPlan.stripeMetricPlans[metricSlug],
|
||||
id: consumer.stripeSubscriptionMetricItems[metricSlug]
|
||||
|
@ -101,7 +107,7 @@ export async function upsertStripeSubscription({
|
|||
|
||||
const invalidItems = items.filter((item) => !item.plan)
|
||||
if (plan && invalidItems.length) {
|
||||
console.error('billing warning found invalid items', invalidItems)
|
||||
logger.error('billing warning found invalid items', invalidItems)
|
||||
}
|
||||
|
||||
items = items.filter((item) => item.plan)
|
||||
|
@ -113,7 +119,7 @@ export async function upsertStripeSubscription({
|
|||
)
|
||||
|
||||
if (!existingItem) {
|
||||
console.error(
|
||||
logger.error(
|
||||
'billing warning found new item that has a subscription item id but should not',
|
||||
{ item }
|
||||
)
|
||||
|
@ -166,7 +172,7 @@ export async function upsertStripeSubscription({
|
|||
24 * 60 * 60 * pricingPlan.trialPeriodDays
|
||||
}
|
||||
|
||||
console.log('subscription', action, { items })
|
||||
logger.debug('subscription', action, { items })
|
||||
} else {
|
||||
update.cancel_at_period_end = true
|
||||
}
|
||||
|
@ -234,7 +240,7 @@ export async function upsertStripeSubscription({
|
|||
createParams.application_fee_percent = project.applicationFeePercent
|
||||
}
|
||||
|
||||
console.log('subscription', action, { items })
|
||||
logger.debug('subscription', action, { items })
|
||||
subscription = await stripe.subscriptions.create(
|
||||
createParams,
|
||||
...stripeConnectParams
|
||||
|
@ -244,10 +250,7 @@ export async function upsertStripeSubscription({
|
|||
}
|
||||
|
||||
assert(subscription, 500, 'Missing stripe subscription')
|
||||
|
||||
console.log()
|
||||
console.log('subscription', JSON.stringify(subscription, null, 2))
|
||||
console.log()
|
||||
logger.debug('subscription', subscription)
|
||||
|
||||
const consumerUpdate: ConsumerUpdate = consumer
|
||||
|
||||
|
@ -309,7 +312,7 @@ export async function upsertStripeSubscription({
|
|||
pricingPlan?.metrics.find((metric) => metric.slug === metricSlug)
|
||||
|
||||
for (const metricSlug of metricSlugs) {
|
||||
console.log({
|
||||
logger.debug({
|
||||
metricSlug,
|
||||
pricingPlan
|
||||
})
|
||||
|
@ -340,13 +343,11 @@ export async function upsertStripeSubscription({
|
|||
}
|
||||
}
|
||||
|
||||
console.log()
|
||||
console.log()
|
||||
console.log('consumer update', {
|
||||
logger.debug()
|
||||
logger.debug('consumer update', {
|
||||
...consumer,
|
||||
...consumerUpdate
|
||||
})
|
||||
console.log()
|
||||
|
||||
const [updatedConsumer] = await db
|
||||
.update(schema.consumers)
|
||||
|
|
|
@ -8,15 +8,13 @@ export const envSchema = z.object({
|
|||
NODE_ENV: z
|
||||
.enum(['development', 'test', 'production'])
|
||||
.default('development'),
|
||||
|
||||
DATABASE_URL: z.string().url(),
|
||||
|
||||
JWT_SECRET: z.string(),
|
||||
PORT: z.number().default(3000),
|
||||
SENTRY_DSN: z.string().url(),
|
||||
|
||||
WORKOS_CLIENT_ID: z.string(),
|
||||
WORKOS_API_KEY: z.string(),
|
||||
WORKOS_SESSION_SECRET: z.string(),
|
||||
|
||||
STRIPE_SECRET_KEY: z.string(),
|
||||
STRIPE_PUBLISHABLE_KEY: z.string(),
|
||||
STRIPE_WEBHOOK_SECRET: z.string()
|
||||
|
|
|
@ -1,129 +0,0 @@
|
|||
import type { pino } from 'pino'
|
||||
import { EventId } from 'eventid'
|
||||
|
||||
/** ==========================================================================
|
||||
* GCP logging helpers taken from their official repo.
|
||||
* @see https://github.com/GoogleCloudPlatform/cloud-solutions/blob/main/projects/pino-logging-gcp-config/src/pino_gcp_config.ts
|
||||
* ======================================================================== */
|
||||
|
||||
/** Monotonically increasing ID for insertId. */
|
||||
const eventId = new EventId()
|
||||
|
||||
const PINO_TO_GCP_LOG_LEVELS = Object.freeze(
|
||||
Object.fromEntries([
|
||||
['trace', 'DEBUG'],
|
||||
['debug', 'DEBUG'],
|
||||
['info', 'INFO'],
|
||||
['warn', 'WARNING'],
|
||||
['error', 'ERROR'],
|
||||
['fatal', 'CRITICAL']
|
||||
])
|
||||
) as Record<pino.Level, string>
|
||||
|
||||
/**
|
||||
* Converts pino log level to Google severity level.
|
||||
*
|
||||
* @see pino.LoggerOptions.formatters.level
|
||||
*/
|
||||
export function pinoLevelToGcpSeverity(
|
||||
pinoSeverityLabel: string,
|
||||
pinoSeverityLevel: number
|
||||
): Record<string, unknown> {
|
||||
const pinoLevel = pinoSeverityLabel as pino.Level
|
||||
const severity = PINO_TO_GCP_LOG_LEVELS[pinoLevel] ?? 'INFO'
|
||||
return {
|
||||
severity,
|
||||
level: pinoSeverityLevel
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a JSON fragment string containing the timestamp in GCP logging
|
||||
* format.
|
||||
*
|
||||
* @example ', "timestamp": { "seconds": 123456789, "nanos": 123000000 }'
|
||||
*
|
||||
* Creating a string with seconds/nanos is ~10x faster than formatting the
|
||||
* timestamp as an ISO string.
|
||||
*
|
||||
* @see https://cloud.google.com/logging/docs/agent/logging/configuration#timestamp-processing
|
||||
*
|
||||
* As Javascript Date uses millisecond precision, in
|
||||
* {@link formatLogObject} the logger adds a monotonically increasing insertId
|
||||
* into the log object to preserve log order inside GCP logging.
|
||||
*
|
||||
* @see https://github.com/googleapis/nodejs-logging/blob/main/src/entry.ts#L189
|
||||
*/
|
||||
export function getGcpLoggingTimestamp() {
|
||||
const seconds = Date.now() / 1000
|
||||
const secondsRounded = Math.floor(seconds)
|
||||
|
||||
// The following line is 2x as fast as seconds % 1000
|
||||
// Uses Math.round, not Math.floor due to JS floating point...
|
||||
// eg for a Date.now()=1713024754120
|
||||
// (seconds-secondsRounded)*1000 => 119.99988555908203
|
||||
const millis = Math.round((seconds - secondsRounded) * 1000)
|
||||
|
||||
return `,"timestamp":{"seconds":${secondsRounded},"nanos":${millis}000000}`
|
||||
}
|
||||
|
||||
/**
|
||||
* Reformats log entry record for GCP.
|
||||
*
|
||||
* * Adds OpenTelemetry properties with correct key.
|
||||
* * Adds stack_trace if an Error is given in the err property.
|
||||
* * Adds serviceContext
|
||||
* * Adds sequential insertId to preserve logging order.
|
||||
*/
|
||||
export function formatGcpLogObject(
|
||||
entry: Record<string, unknown>
|
||||
): Record<string, unknown> {
|
||||
// OpenTelemetry adds properties trace_id, span_id, trace_flags. If these
|
||||
// are present, not null and not blank, convert them to the property keys
|
||||
// specified by GCP logging.
|
||||
//
|
||||
// @see https://cloud.google.com/logging/docs/structured-logging#special-payload-fields
|
||||
// @see https://github.com/open-telemetry/opentelemetry-specification/blob/main/specification/logs/data-model.md#trace-context-fields
|
||||
if ((entry.trace_id as string | undefined)?.length) {
|
||||
entry['logging.googleapis.com/trace'] = entry.trace_id
|
||||
delete entry.trace_id
|
||||
}
|
||||
|
||||
if ((entry.span_id as string | undefined)?.length) {
|
||||
entry['logging.googleapis.com/spanId'] = entry.span_id
|
||||
delete entry.span_id
|
||||
}
|
||||
|
||||
// Trace flags is a bit field even though there is one on defined bit,
|
||||
// so lets convert it to an int and test against a bitmask.
|
||||
// @see https://www.w3.org/TR/trace-context/#trace-flags
|
||||
const traceFlagsBits = Number.parseInt(entry.trace_flags as string)
|
||||
if (!!traceFlagsBits && (traceFlagsBits & 0x1) === 1) {
|
||||
entry['logging.googleapis.com/trace_sampled'] = true
|
||||
}
|
||||
delete entry.trace_flags
|
||||
|
||||
// If there is an Error, add the stack trace for Event Reporting.
|
||||
if (entry.err instanceof Error && entry.err.stack) {
|
||||
entry.stack_trace = entry.err.stack
|
||||
}
|
||||
|
||||
// Add a sequential EventID.
|
||||
//
|
||||
// This is required because Javascript Date has a very coarse granularity
|
||||
// (millisecond), which makes it quite likely that multiple log entries
|
||||
// would have the same timestamp.
|
||||
//
|
||||
// The GCP Logging API doesn't guarantee to preserve insertion order for
|
||||
// entries with the same timestamp. The service does use `insertId` as a
|
||||
// secondary ordering for entries with the same timestamp. `insertId` needs
|
||||
// to be globally unique (within the project) however.
|
||||
//
|
||||
// We use a globally unique monotonically increasing EventId as the
|
||||
// insertId.
|
||||
//
|
||||
// @see https://github.com/googleapis/nodejs-logging/blob/main/src/entry.ts#L189
|
||||
entry['logging.googleapis.com/insertId'] = eventId.new()
|
||||
|
||||
return entry
|
||||
}
|
|
@ -1,50 +0,0 @@
|
|||
/* eslint-disable no-process-env */
|
||||
|
||||
import build from 'pino-abstract-transport'
|
||||
|
||||
/**
|
||||
* Pino transport that send logs to GCP cloud logging.
|
||||
*
|
||||
* Google Cloud setup and auth instructions are in the root readme.
|
||||
*
|
||||
* For information about Pino transports:
|
||||
* @see https://getpino.io/#/docs/transports?id=writing-a-transport
|
||||
*/
|
||||
export default async function gcpTransport() {
|
||||
// Dynamically import @google-cloud/logging only if/when this function is called
|
||||
// This prevent the GCP bloatware from being loaded in prod, where this is not used.
|
||||
const { Logging } = await import('@google-cloud/logging')
|
||||
|
||||
const projectId = process.env.GCP_PROJECT_ID || 'agentic-426105'
|
||||
const logName = process.env.GCP_LOG_NAME || 'local-dev'
|
||||
|
||||
if (!process.env.METADATA_SERVER_DETECTION) {
|
||||
console.error(
|
||||
'Metadata server detection is not set. Set `METADATA_SERVER_DETECTION=none` in the repo root `.env`.'
|
||||
)
|
||||
}
|
||||
|
||||
const logging = new Logging({ projectId })
|
||||
const log = logging.log(logName)
|
||||
|
||||
return build(async function (source: AsyncIterable<Record<string, any>>) {
|
||||
for await (const obj of source) {
|
||||
try {
|
||||
const { severity, ...rest } = obj
|
||||
const entry = log.entry(
|
||||
{
|
||||
severity,
|
||||
resource: { type: 'global' }
|
||||
},
|
||||
rest
|
||||
)
|
||||
await log.write(entry)
|
||||
} catch (err) {
|
||||
console.error(
|
||||
'Error sending log to GCP. Consult `readme.md` for setup instructions.',
|
||||
err
|
||||
)
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
|
@ -1,36 +0,0 @@
|
|||
/* eslint-disable simple-import-sort/imports */
|
||||
/* eslint-disable import/first */
|
||||
|
||||
import { afterEach, describe, expect, it, vi } from 'vitest'
|
||||
|
||||
import { mockSentryNode } from '../test'
|
||||
|
||||
// Mock Sentry before importing logger
|
||||
mockSentryNode()
|
||||
|
||||
import * as Sentry from '@sentry/node'
|
||||
import { logger } from './logger'
|
||||
|
||||
describe('logger', () => {
|
||||
afterEach(() => {
|
||||
// We only clear the usage data so it remains a spy.
|
||||
vi.clearAllMocks()
|
||||
})
|
||||
|
||||
it('should call Sentry.captureException when calling logger.error() with an Error', () => {
|
||||
const error = new Error('test error')
|
||||
logger.error(error)
|
||||
expect(Sentry.captureException).toHaveBeenCalledWith(error)
|
||||
})
|
||||
|
||||
it('should call Sentry.captureException when calling logger.error() with an {err: Error}', () => {
|
||||
const error = new Error('test error')
|
||||
logger.error({ err: error }, 'With some message')
|
||||
expect(Sentry.captureException).toHaveBeenCalledWith(error)
|
||||
})
|
||||
|
||||
it('should not call Sentry.captureException for logger.warn()', () => {
|
||||
logger.warn('some warning message')
|
||||
expect(Sentry.captureException).not.toHaveBeenCalled()
|
||||
})
|
||||
})
|
|
@ -1,116 +1,113 @@
|
|||
import fs from 'node:fs'
|
||||
|
||||
import * as Sentry from '@sentry/node'
|
||||
import { type Logger, pino } from 'pino'
|
||||
|
||||
import { isBrowser, isDev, isProd } from '@/lib/env'
|
||||
import type { Environment, Service } from '@/lib/types'
|
||||
import { env } from '@/lib/env'
|
||||
|
||||
import {
|
||||
formatGcpLogObject,
|
||||
getGcpLoggingTimestamp,
|
||||
pinoLevelToGcpSeverity
|
||||
} from './gcp-formatters'
|
||||
import { getTraceId } from './utils'
|
||||
|
||||
const gcpTransportPath = `${import.meta.dirname}/gcp-transport.js`
|
||||
|
||||
// TODO: Transport imports are hacky; find a better workaround
|
||||
const transportExists = fs.existsSync(gcpTransportPath)
|
||||
|
||||
export const logger = pino({
|
||||
messageKey: 'message',
|
||||
level: isProd ? 'info' : 'trace',
|
||||
timestamp: () => getGcpLoggingTimestamp(),
|
||||
// Add the Sentry trace ID to the log context
|
||||
mixin(_obj, _level, mixinLogger) {
|
||||
try {
|
||||
// Check if the logger already has a traceId in its bindings
|
||||
const currentBindings = mixinLogger.bindings()
|
||||
if (
|
||||
currentBindings &&
|
||||
typeof currentBindings === 'object' &&
|
||||
'traceId' in currentBindings &&
|
||||
currentBindings.traceId
|
||||
) {
|
||||
// If traceId already exists in bindings, use that
|
||||
const traceId = currentBindings.traceId
|
||||
return { traceId, meta: { traceId } }
|
||||
}
|
||||
|
||||
// Otherwise, get the trace ID from Sentry
|
||||
const traceId = getTraceId()
|
||||
|
||||
// Duplicate in the `meta` field
|
||||
return traceId ? { traceId, meta: { traceId } } : {}
|
||||
} catch (err) {
|
||||
Sentry.captureException(err)
|
||||
return {}
|
||||
}
|
||||
},
|
||||
formatters: {
|
||||
level: pinoLevelToGcpSeverity,
|
||||
log: (entry: Record<string, unknown>) => formatGcpLogObject(entry)
|
||||
},
|
||||
transport:
|
||||
isDev && !isBrowser && transportExists
|
||||
? { target: gcpTransportPath }
|
||||
: undefined,
|
||||
hooks: {
|
||||
logMethod(args, method, level) {
|
||||
// Only capture errors if the log level is at least 50 (error)
|
||||
if (level >= 50) {
|
||||
let foundError: Error | undefined
|
||||
const arg0 = args[0] as unknown
|
||||
const arg1 = args[1] as unknown
|
||||
|
||||
for (const arg of [arg0, arg1]) {
|
||||
if (arg instanceof Error) {
|
||||
foundError = arg
|
||||
} else if (arg && typeof arg === 'object') {
|
||||
if ('err' in arg && arg.err instanceof Error) {
|
||||
foundError = arg.err
|
||||
}
|
||||
|
||||
if ('error' in arg && arg.error instanceof Error) {
|
||||
foundError = arg.error
|
||||
}
|
||||
}
|
||||
|
||||
if (foundError) {
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
if (foundError) {
|
||||
Sentry.captureException(foundError)
|
||||
}
|
||||
}
|
||||
|
||||
return method.apply(this, args)
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
// TODO: Add more groups
|
||||
export type LogGroup = 'api'
|
||||
|
||||
/** Standardized way to extend the logger with helpful info */
|
||||
export function extendedLogger({
|
||||
logger: baseLogger = logger,
|
||||
...args
|
||||
}: {
|
||||
group: LogGroup
|
||||
name: string
|
||||
/** A more specific subtype of the name */
|
||||
nameSubtype?: string
|
||||
/** The eventId to add to the logger */
|
||||
eventId?: string
|
||||
logger?: Logger
|
||||
}): Logger {
|
||||
const { group, name, nameSubtype } = args
|
||||
return baseLogger.child(args, {
|
||||
msgPrefix: `[${group}:${name}${nameSubtype ? `:${nameSubtype}` : ''}] `
|
||||
})
|
||||
export interface Logger {
|
||||
trace(message?: any, ...detail: any[]): void
|
||||
debug(message?: any, ...detail: any[]): void
|
||||
info(message?: any, ...detail: any[]): void
|
||||
warn(message?: any, ...detail: any[]): void
|
||||
error(message?: any, ...detail: any[]): void
|
||||
}
|
||||
|
||||
export type { Logger } from 'pino'
|
||||
export type LogLevel = 'trace' | 'debug' | 'info' | 'warn' | 'error'
|
||||
|
||||
export class ConsoleLogger implements Logger {
|
||||
protected readonly environment: Environment
|
||||
protected readonly service: Service
|
||||
protected readonly requestId: string
|
||||
protected readonly metadata: Record<string, unknown>
|
||||
protected readonly console: Console
|
||||
|
||||
constructor({
|
||||
requestId,
|
||||
service,
|
||||
environment = env.NODE_ENV,
|
||||
metadata = {},
|
||||
console = globalThis.console
|
||||
}: {
|
||||
requestId: string
|
||||
service: Service
|
||||
environment?: Environment
|
||||
metadata?: Record<string, unknown>
|
||||
console?: Console
|
||||
}) {
|
||||
this.requestId = requestId
|
||||
this.service = service
|
||||
this.environment = environment
|
||||
this.metadata = metadata
|
||||
this.console = console
|
||||
}
|
||||
|
||||
trace(message?: any, ...detail: any[]) {
|
||||
this.console.trace(this._marshal('trace', message, ...detail))
|
||||
}
|
||||
|
||||
debug(message?: any, ...detail: any[]) {
|
||||
this.console.debug(this._marshal('debug', message, ...detail))
|
||||
}
|
||||
|
||||
info(message?: any, ...detail: any[]) {
|
||||
this.console.info(this._marshal('info', message, ...detail))
|
||||
}
|
||||
|
||||
warn(message?: any, ...detail: any[]) {
|
||||
this.console.warn(this._marshal('warn', message, ...detail))
|
||||
}
|
||||
|
||||
error(message?: any, ...detail: any[]) {
|
||||
this.console.error(this._marshal('error', message, ...detail))
|
||||
}
|
||||
|
||||
protected _marshal(level: LogLevel, message?: any, ...detail: any[]): string {
|
||||
const log = {
|
||||
type: 'log',
|
||||
level,
|
||||
message,
|
||||
detail,
|
||||
time: Date.now(),
|
||||
env: this.environment,
|
||||
service: this.service,
|
||||
requestId: this.requestId,
|
||||
traceId: getTraceId(),
|
||||
metadata: this.metadata
|
||||
}
|
||||
|
||||
if (level === 'error') {
|
||||
let foundError: Error | undefined
|
||||
for (const arg of detail) {
|
||||
if (!arg) {
|
||||
continue
|
||||
}
|
||||
|
||||
if (arg instanceof Error) {
|
||||
foundError = arg
|
||||
break
|
||||
}
|
||||
|
||||
if (typeof arg !== 'object') {
|
||||
continue
|
||||
}
|
||||
|
||||
if ('err' in arg && arg.err instanceof Error) {
|
||||
foundError = arg.err
|
||||
break
|
||||
}
|
||||
|
||||
if ('error' in arg && arg.error instanceof Error) {
|
||||
foundError = arg.error
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
if (foundError) {
|
||||
Sentry.captureException(foundError)
|
||||
}
|
||||
}
|
||||
|
||||
return JSON.stringify(log, null, this.environment === 'development' ? 2 : 0)
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,26 +1,5 @@
|
|||
import * as Sentry from '@sentry/node'
|
||||
|
||||
/** Get the URL for the logs in the GCP console. */
|
||||
function getGCPLogsUrl(): string {
|
||||
const timestamp = new Date().toISOString()
|
||||
const queryParts = [
|
||||
'resource.type = "cloud_run_revision"',
|
||||
'resource.labels.service_name = "agentic"'
|
||||
]
|
||||
const traceId = getTraceId()
|
||||
|
||||
if (traceId) {
|
||||
queryParts.push(`jsonPayload.meta.traceId = "${traceId}"`)
|
||||
}
|
||||
|
||||
const query = queryParts.join(' AND ')
|
||||
const url = `https://console.cloud.google.com/logs/query;query=${encodeURIComponent(
|
||||
query
|
||||
)};summaryFields=jsonPayload%252Fmeta%252FtraceId:false:32:beginning;aroundTime=${timestamp};duration=PT1H?project=agentic-internal-tools`
|
||||
|
||||
return url
|
||||
}
|
||||
|
||||
/** Get the ID of the trace from the root span of the current span. */
|
||||
export function getTraceId(): string | undefined {
|
||||
try {
|
||||
|
@ -37,18 +16,8 @@ export function getTraceId(): string | undefined {
|
|||
}
|
||||
}
|
||||
|
||||
/** Get the Sentry trace link for the current span. */
|
||||
function getSentryTraceURL(): string {
|
||||
/** Get the Sentry trace URL for the current span. */
|
||||
export function getSentryTraceURL(): string {
|
||||
const traceId = getTraceId()
|
||||
return `https://agentic-platform.sentry.io/performance/trace/${traceId}`
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the logs and trace URLs for the current event.
|
||||
*/
|
||||
export function getDebugURLs(): { logs: string; trace: string } {
|
||||
return {
|
||||
logs: getGCPLogsUrl(),
|
||||
trace: getSentryTraceURL()
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,7 +1,14 @@
|
|||
import { createMiddleware } from 'hono/factory'
|
||||
import { logger as honoLogger } from 'hono/logger'
|
||||
|
||||
import { logger } from '@/lib/logger'
|
||||
import type { DefaultEnv } from '@/lib/types'
|
||||
|
||||
import { unless } from './unless'
|
||||
|
||||
export const accessLogger = unless(honoLogger(logger.trace), '/v1/health')
|
||||
export const accessLogger = unless(
|
||||
createMiddleware<DefaultEnv>(async (ctx, next) => {
|
||||
const logger = ctx.get('logger')
|
||||
await honoLogger(logger.trace)(ctx, next)
|
||||
}),
|
||||
'/v1/health'
|
||||
)
|
||||
|
|
|
@ -23,7 +23,6 @@ export const authenticate = createMiddleware<AuthenticatedEnv>(
|
|||
assert(token, 401, 'Unauthorized')
|
||||
|
||||
const payload = await jwt.verify(token, env.JWT_SECRET)
|
||||
console.log({ payload })
|
||||
assert(payload, 401, 'Unauthorized')
|
||||
assert(payload.type === 'user', 401, 'Unauthorized')
|
||||
assert(
|
||||
|
|
|
@ -3,11 +3,12 @@ import * as Sentry from '@sentry/node'
|
|||
import { createMiddleware } from 'hono/factory'
|
||||
import { HTTPException } from 'hono/http-exception'
|
||||
|
||||
import type { AuthenticatedEnv } from '@/lib/types'
|
||||
import type { DefaultEnv } from '@/lib/types'
|
||||
import { HttpError } from '@/lib/errors'
|
||||
import { logger } from '@/lib/logger'
|
||||
|
||||
export const errorHandler = createMiddleware<AuthenticatedEnv>(
|
||||
import { env } from '../env'
|
||||
|
||||
export const errorHandler = createMiddleware<DefaultEnv>(
|
||||
async function errorHandlerMiddleware(ctx, next) {
|
||||
try {
|
||||
await next()
|
||||
|
@ -15,7 +16,7 @@ export const errorHandler = createMiddleware<AuthenticatedEnv>(
|
|||
if (!ctx.res.status) {
|
||||
throw new HttpError({ statusCode: 404, message: 'Not Found' })
|
||||
}
|
||||
} catch (err) {
|
||||
} catch (err: any) {
|
||||
let message = 'Internal Server Error'
|
||||
let status: ContentfulStatusCode = 500
|
||||
|
||||
|
@ -25,11 +26,16 @@ export const errorHandler = createMiddleware<AuthenticatedEnv>(
|
|||
} else if (err instanceof HttpError) {
|
||||
message = err.message
|
||||
status = err.statusCode
|
||||
} else if (env.NODE_ENV === 'development' || env.NODE_ENV === 'test') {
|
||||
message = err.message ?? message
|
||||
}
|
||||
|
||||
const logger = ctx.get('logger')
|
||||
if (status >= 500) {
|
||||
logger.error({ err, status, message })
|
||||
logger.error(message, { err, status })
|
||||
Sentry.captureException(err)
|
||||
} else {
|
||||
logger.warn(message, { err, status })
|
||||
}
|
||||
|
||||
ctx.json({ error: message }, status)
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
export * from './access-logger'
|
||||
export * from './authenticate'
|
||||
export * from './error-handler'
|
||||
export * from './init'
|
||||
export * from './me'
|
||||
export * from './response-time'
|
||||
export * from './team'
|
||||
|
|
|
@ -0,0 +1,25 @@
|
|||
import { EventId } from 'eventid'
|
||||
import { createMiddleware } from 'hono/factory'
|
||||
|
||||
import type { DefaultEnv } from '@/lib/types'
|
||||
|
||||
import { ConsoleLogger } from '../logger'
|
||||
|
||||
/** Monotonically increasing ID for insertId. */
|
||||
const eventId = new EventId()
|
||||
|
||||
export const init = createMiddleware<DefaultEnv>(
|
||||
async function initMiddleware(ctx, next) {
|
||||
const requestId = eventId.new()
|
||||
ctx.set('requestId', requestId)
|
||||
ctx.res.headers.set('X-Request-Id', requestId)
|
||||
|
||||
const logger = new ConsoleLogger({
|
||||
requestId,
|
||||
service: 'api'
|
||||
})
|
||||
ctx.set('logger', logger)
|
||||
|
||||
await next()
|
||||
}
|
||||
)
|
|
@ -1,8 +1,8 @@
|
|||
import { createMiddleware } from 'hono/factory'
|
||||
|
||||
import type { AuthenticatedEnv } from '@/lib/types'
|
||||
import type { DefaultEnv } from '@/lib/types'
|
||||
|
||||
export const responseTime = createMiddleware<AuthenticatedEnv>(
|
||||
export const responseTime = createMiddleware<DefaultEnv>(
|
||||
async function responseTimeMiddleware(ctx, next) {
|
||||
const start = Date.now()
|
||||
await next()
|
||||
|
|
|
@ -7,7 +7,6 @@ import * as Sentry from '@sentry/node'
|
|||
Sentry.init({
|
||||
dsn: process.env.SENTRY_DSN, // eslint-disable-line no-process-env
|
||||
environment: process.env.NODE_ENV || 'development', // eslint-disable-line no-process-env
|
||||
release: process.env.COMMIT_SHA, // eslint-disable-line no-process-env
|
||||
tracesSampleRate: 1.0,
|
||||
integrations: [Sentry.extraErrorDataIntegration()]
|
||||
})
|
|
@ -1,20 +1,13 @@
|
|||
import type { Logger } from 'pino'
|
||||
import { vi } from 'vitest'
|
||||
|
||||
import type { Logger } from '@/lib/logger'
|
||||
|
||||
export function setupMockLogger() {
|
||||
return {
|
||||
child: () =>
|
||||
({
|
||||
trace: vi.fn(),
|
||||
debug: vi.fn(),
|
||||
info: vi.fn(),
|
||||
warn: vi.fn(),
|
||||
error: vi.fn()
|
||||
}) as unknown as Logger,
|
||||
trace: vi.fn(),
|
||||
debug: vi.fn(),
|
||||
info: vi.fn(),
|
||||
warn: vi.fn(),
|
||||
error: vi.fn()
|
||||
} as unknown as Logger
|
||||
} as Logger
|
||||
}
|
||||
|
|
|
@ -2,16 +2,32 @@ import type { Context } from 'hono'
|
|||
|
||||
import type { RawTeamMember, RawUser } from '@/db'
|
||||
|
||||
export type AuthenticatedEnvVariables = {
|
||||
import type { Env } from './env'
|
||||
import type { Logger } from './logger'
|
||||
|
||||
export type Environment = Env['NODE_ENV']
|
||||
export type Service = 'api'
|
||||
|
||||
export type DefaultEnvVariables = {
|
||||
requestId: string
|
||||
logger: Logger
|
||||
}
|
||||
|
||||
export type AuthenticatedEnvVariables = DefaultEnvVariables & {
|
||||
userId: string
|
||||
user?: RawUser
|
||||
teamMember?: RawTeamMember
|
||||
}
|
||||
|
||||
export type DefaultEnv = {
|
||||
Variables: DefaultEnvVariables
|
||||
}
|
||||
|
||||
export type AuthenticatedEnv = {
|
||||
Variables: AuthenticatedEnvVariables
|
||||
}
|
||||
|
||||
export type DefaultContext = Context<DefaultEnv>
|
||||
export type AuthenticatedContext = Context<AuthenticatedEnv>
|
||||
|
||||
// TODO: currently unused
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
import { createHash, randomUUID } from 'node:crypto'
|
||||
|
||||
import type { ContentfulStatusCode } from 'hono/utils/http-status'
|
||||
import type { ZodSchema } from 'zod'
|
||||
import type { ZodSchema, ZodTypeDef } from 'zod'
|
||||
|
||||
import { HttpError, ZodValidationError } from './errors'
|
||||
|
||||
|
@ -31,15 +31,19 @@ export function assert(
|
|||
}
|
||||
}
|
||||
|
||||
export function parseZodSchema<T>(
|
||||
schema: ZodSchema<T>,
|
||||
export function parseZodSchema<
|
||||
Output,
|
||||
Def extends ZodTypeDef = ZodTypeDef,
|
||||
Input = Output
|
||||
>(
|
||||
schema: ZodSchema<Output, Def, Input>,
|
||||
input: unknown,
|
||||
{
|
||||
error
|
||||
}: {
|
||||
error?: string
|
||||
} = {}
|
||||
): T {
|
||||
): Output {
|
||||
try {
|
||||
return schema.parse(input)
|
||||
} catch (err) {
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
import '@/lib/instrument'
|
||||
import '@/lib/sentry'
|
||||
|
||||
import { serve } from '@hono/node-server'
|
||||
import { sentry } from '@hono/sentry'
|
||||
|
@ -16,10 +16,11 @@ export const app = new OpenAPIHono()
|
|||
|
||||
app.use(sentry())
|
||||
app.use(compress())
|
||||
// app.use(middleware.accessLogger)
|
||||
app.use(cors())
|
||||
app.use(middleware.init)
|
||||
app.use(middleware.accessLogger)
|
||||
app.use(middleware.responseTime)
|
||||
app.use(middleware.errorHandler)
|
||||
app.use(cors())
|
||||
|
||||
app.route('/v1', apiV1)
|
||||
|
||||
|
@ -32,6 +33,8 @@ const server = serve({
|
|||
fetch: app.fetch,
|
||||
port: env.PORT
|
||||
})
|
||||
|
||||
// eslint-disable-next-line no-console
|
||||
console.log(`Server running on port ${env.PORT}`)
|
||||
|
||||
initExitHooks({ server })
|
||||
|
|
|
@ -1,7 +1,6 @@
|
|||
{
|
||||
"extends": "@fisch0920/config/tsconfig-node",
|
||||
"compilerOptions": {
|
||||
"strictNullChecks": true,
|
||||
"baseUrl": ".",
|
||||
"paths": {
|
||||
"@/*": ["src/*"]
|
||||
|
|
|
@ -10,7 +10,8 @@ export default [
|
|||
drizzle
|
||||
},
|
||||
rules: {
|
||||
...drizzle.configs.recommended.rules
|
||||
...drizzle.configs.recommended.rules,
|
||||
'no-console': 'error'
|
||||
}
|
||||
}
|
||||
]
|
||||
|
|
812
pnpm-lock.yaml
812
pnpm-lock.yaml
Plik diff jest za duży
Load Diff
|
@ -5,10 +5,6 @@
|
|||
|
||||
# Agentic <!-- omit from toc -->
|
||||
|
||||
## TODO
|
||||
|
||||
- simplify logger
|
||||
|
||||
## License
|
||||
|
||||
UNLICENSED PROPRIETARY © [Agentic](https://x.com/transitive_bs)
|
||||
|
|
Ładowanie…
Reference in New Issue