From 7f52c1464bc4852837a8bccb1f4ad70117db32c9 Mon Sep 17 00:00:00 2001 From: David Sheldrick Date: Fri, 17 Jan 2025 11:02:11 +0000 Subject: [PATCH] worker debug logging (#5219) This PR begins to address some pains we've felt with regard to logging on cloudflare workers: 1. a console.log invocation is only flushed to the console (or cf dashboard) once a request completes. so if a request hangs for some reason you never see the log output. 2. logs are very eagerly sampled, which makes it hard to rely on them for debugging because you never know whether a log statement wasn't reached or whether it was just dropped. so this PR - adds a Logger durable object that you can connect to via a websocket to get an instant stream of every log statement, no waiting for requests to finish. - wraps the Logger durable object with a Logger class to consolidate code. The logger durable object is on in dev and preview envs, but is not available in staging or production yet because that would require auth and filtering user DO events by user. You can try it on this PR by going to https://pr-5219-preview-deploy.tldraw.com/__debug-tail and then opening the app in another tab also tackles https://linear.app/tldraw/issue/INT-648/investigate-flaky-preview-deploys somewhat ### Change type - [x] `other` --- apps/dotcom/client/src/routes.tsx | 1 + .../src/tla/pages/worker-debug-tail.tsx | 40 +++++++++++ apps/dotcom/sync-worker/src/Logger.ts | 44 ++++++++++++ .../sync-worker/src/TLLoggerDurableObject.ts | 65 ++++++++++++++++++ .../sync-worker/src/TLPostgresReplicator.ts | 68 ++++++++++++------- .../sync-worker/src/TLUserDurableObject.ts | 37 +++++----- apps/dotcom/sync-worker/src/UserDataSyncer.ts | 39 +++++------ apps/dotcom/sync-worker/src/testRoutes.ts | 4 +- apps/dotcom/sync-worker/src/types.ts | 15 ++-- .../sync-worker/src/utils/durableObjects.ts | 5 ++ apps/dotcom/sync-worker/src/worker.ts | 16 ++++- apps/dotcom/sync-worker/wrangler.toml | 20 ++++-- packages/worker-shared/src/sentry.ts | 2 +- 13 files changed, 277 insertions(+), 79 deletions(-) create mode 100644 apps/dotcom/client/src/tla/pages/worker-debug-tail.tsx create mode 100644 apps/dotcom/sync-worker/src/Logger.ts create mode 100644 apps/dotcom/sync-worker/src/TLLoggerDurableObject.ts diff --git a/apps/dotcom/client/src/routes.tsx b/apps/dotcom/client/src/routes.tsx index 5e91a51b456f..3a5ac7e2f9cd 100644 --- a/apps/dotcom/client/src/routes.tsx +++ b/apps/dotcom/client/src/routes.tsx @@ -155,6 +155,7 @@ export const router = createRoutesFromElements( }} > {isClerkCookieSet || isOverrideFlagSet ? tlaRoutes : legacyRoutes} + import('./tla/pages/worker-debug-tail')} /> import('./pages/not-found')} /> ) diff --git a/apps/dotcom/client/src/tla/pages/worker-debug-tail.tsx b/apps/dotcom/client/src/tla/pages/worker-debug-tail.tsx new file mode 100644 index 000000000000..e090417c14c0 --- /dev/null +++ b/apps/dotcom/client/src/tla/pages/worker-debug-tail.tsx @@ -0,0 +1,40 @@ +import { useEffect, useRef } from 'react' +import { MULTIPLAYER_SERVER } from '../../utils/config' + +export function Component() { + const ref = useRef(null) + const isAutoScroll = useRef(true) + useEffect(() => { + const elem = ref.current + if (!elem) return + const socket = new WebSocket(MULTIPLAYER_SERVER + '/app/__debug-tail') + socket.onmessage = (msg) => { + const div = document.createElement('pre') + div.textContent = msg.data + elem.appendChild(div) + if (isAutoScroll.current) { + elem.scrollTo({ top: elem.scrollHeight }) + } + } + socket.onerror = (err) => { + console.error(err) + } + socket.onclose = () => { + setTimeout(() => { + window.location.reload() + }, 500) + } + + const onScroll = () => { + isAutoScroll.current = elem.scrollTop + elem.clientHeight > elem.scrollHeight - 100 + } + elem.addEventListener('scroll', onScroll) + return () => { + socket.close() + elem.removeEventListener('scroll', onScroll) + } + }, []) + return ( +
+ ) +} diff --git a/apps/dotcom/sync-worker/src/Logger.ts b/apps/dotcom/sync-worker/src/Logger.ts new file mode 100644 index 000000000000..df990f154e4e --- /dev/null +++ b/apps/dotcom/sync-worker/src/Logger.ts @@ -0,0 +1,44 @@ +import { createSentry } from '@tldraw/worker-shared' +import { Environment, isDebugLogging } from './types' +import { getLogger } from './utils/durableObjects' + +export class Logger { + readonly logger + constructor( + env: Environment, + private prefix: string, + private sentry?: ReturnType + ) { + if (isDebugLogging(env)) { + this.logger = getLogger(env) + } + } + private outgoing: string[] = [] + + private isRunning = false + + debug(...args: any[]) { + if (!this.logger && !this.sentry) return + const msg = `[${this.prefix} ${new Date().toISOString()}]: ${args.map((a) => (typeof a === 'object' ? JSON.stringify(a) : a)).join(' ')}` + this.outgoing.push(msg) + this.processQueue() + } + + private async processQueue() { + if (this.isRunning) return + this.isRunning = true + try { + while (this.outgoing.length) { + const batch = this.outgoing + this.outgoing = [] + await this.logger?.debug(batch) + for (const message of batch) { + // eslint-disable-next-line @typescript-eslint/no-deprecated + this.sentry?.addBreadcrumb({ message }) + } + } + } finally { + this.isRunning = false + } + } +} diff --git a/apps/dotcom/sync-worker/src/TLLoggerDurableObject.ts b/apps/dotcom/sync-worker/src/TLLoggerDurableObject.ts new file mode 100644 index 000000000000..01f5f508e0f0 --- /dev/null +++ b/apps/dotcom/sync-worker/src/TLLoggerDurableObject.ts @@ -0,0 +1,65 @@ +import { DurableObject } from 'cloudflare:workers' +import { IRequest } from 'itty-router' +import { Environment, isDebugLogging } from './types' + +export class TLLoggerDurableObject extends DurableObject { + private readonly isDebugEnv + private readonly db + constructor(ctx: DurableObjectState, env: Environment) { + super(ctx, env) + this.isDebugEnv = isDebugLogging(env) + this.db = this.ctx.storage.sql + this.db.exec( + `CREATE TABLE IF NOT EXISTS logs ( + message TEXT NOT NULL + ); + CREATE TRIGGER IF NOT EXISTS limit_logs + AFTER INSERT ON logs + BEGIN + DELETE FROM logs WHERE rowid NOT IN ( + SELECT rowid FROM logs ORDER BY rowid DESC LIMIT 20000 + ); + END;` + ) + } + + private sockets = new Set() + + async debug(messages: string[]) { + if (!this.isDebugEnv) return + for (const message of messages) { + this.db.exec(`INSERT INTO logs (message) VALUES (?)`, message) + } + + const sockets = Array.from(this.sockets) + if (this.sockets.size === 0) return + for (const message of messages) { + sockets.forEach((socket) => { + socket.send(message + '\n') + }) + } + } + + getFullHistory() { + return this.db + .exec('SELECT message FROM logs ORDER BY rowid ASC') + .toArray() + .map((row) => row.message) + } + + override async fetch(_req: IRequest) { + if (!this.isDebugEnv) return new Response('Not Found', { status: 404 }) + const { 0: clientWebSocket, 1: serverWebSocket } = new WebSocketPair() + serverWebSocket.accept() + + this.sockets.add(serverWebSocket) + const cleanup = () => { + this.sockets.delete(serverWebSocket) + serverWebSocket.close() + } + serverWebSocket.addEventListener('close', cleanup) + serverWebSocket.addEventListener('error', cleanup) + serverWebSocket.send('Connected to logger\n' + this.getFullHistory().join('\n')) + return new Response(null, { status: 101, webSocket: clientWebSocket }) + } +} diff --git a/apps/dotcom/sync-worker/src/TLPostgresReplicator.ts b/apps/dotcom/sync-worker/src/TLPostgresReplicator.ts index d49f96f21dcf..37f21d3c102d 100644 --- a/apps/dotcom/sync-worker/src/TLPostgresReplicator.ts +++ b/apps/dotcom/sync-worker/src/TLPostgresReplicator.ts @@ -1,4 +1,4 @@ -import { ROOM_PREFIX, TlaFile, ZTable } from '@tldraw/dotcom-shared' +import { DB, ROOM_PREFIX, TlaFile, ZTable } from '@tldraw/dotcom-shared' import { ExecutionQueue, assert, @@ -9,10 +9,12 @@ import { } from '@tldraw/utils' import { createSentry } from '@tldraw/worker-shared' import { DurableObject } from 'cloudflare:workers' +import { Kysely, sql } from 'kysely' import postgres from 'postgres' +import { Logger } from './Logger' import type { TLDrawDurableObject } from './TLDrawDurableObject' import { ZReplicationEventWithoutSequenceInfo } from './UserDataSyncer' -import { createPostgresConnection } from './postgres' +import { createPostgresConnection, createPostgresConnectionPool } from './postgres' import { Analytics, Environment, TLPostgresReplicatorEvent } from './types' import { EventData, writeDataPoint } from './utils/analytics' import { getUserDurableObject } from './utils/durableObjects' @@ -82,6 +84,7 @@ export class TLPostgresReplicator extends DurableObject { } private measure: Analytics | undefined private postgresUpdates = 0 + private lastPostgresMessageTime = Date.now() private lastRpmLogTime = Date.now() // we need to guarantee in-order delivery of messages to users @@ -103,6 +106,9 @@ export class TLPostgresReplicator extends DurableObject { } } + private log + + private readonly db: Kysely constructor(ctx: DurableObjectState, env: Environment) { super(ctx, env) this.sentry = createSentry(ctx, env) @@ -115,8 +121,15 @@ export class TLPostgresReplicator extends DurableObject { }) ) - this.reboot(false) + // debug logging in preview envs by default + this.log = new Logger(env, 'TLPostgresReplicator', this.sentry) + this.db = createPostgresConnectionPool(env, 'TLPostgresReplicator') + this.alarm() + this.reboot(false).catch((e) => { + this.captureException(e) + this.__test__panic() + }) this.measure = env.MEASURE } @@ -161,19 +174,20 @@ export class TLPostgresReplicator extends DurableObject { this.ctx.abort() } - private debug(...args: any[]) { - // uncomment for dev time debugging - // console.log('[TLPostgresReplicator]:', ...args) - if (this.sentry) { - // eslint-disable-next-line @typescript-eslint/no-deprecated - this.sentry.addBreadcrumb({ - message: `[TLPostgresReplicator]: ${args.join(' ')}`, - }) - } - } override async alarm() { this.ctx.storage.setAlarm(Date.now() + 1000) this.maybeLogRpm() + // If we haven't heard anything from postgres for 5 seconds, do a little transaction + // to update a random string as a kind of 'ping' to keep the connection alive + // If we haven't heard anything for 10 seconds, reboot + if (Date.now() - this.lastPostgresMessageTime > 10000) { + this.log.debug('rebooting due to inactivity') + this.reboot() + } else if (Date.now() - this.lastPostgresMessageTime > 5000) { + sql`insert into replicator_boot_id ("replicatorId", "bootId") values (${this.ctx.id.toString()}, ${uniqueId()}) on conflict ("replicatorId") do update set "bootId" = excluded."bootId"`.execute( + this.db + ) + } } private maybeLogRpm() { @@ -192,13 +206,13 @@ export class TLPostgresReplicator extends DurableObject { private async reboot(delay = true) { this.logEvent({ type: 'reboot' }) - this.debug('reboot push') + this.log.debug('reboot push') await this.queue.push(async () => { if (delay) { await sleep(1000) } const start = Date.now() - this.debug('rebooting') + this.log.debug('rebooting') const res = await Promise.race([ this.boot().then(() => 'ok'), sleep(3000).then(() => 'timeout'), @@ -207,7 +221,7 @@ export class TLPostgresReplicator extends DurableObject { this.captureException(e) return 'error' }) - this.debug('rebooted', res) + this.log.debug('rebooted', res) if (res === 'ok') { this.logEvent({ type: 'reboot_duration', duration: Date.now() - start }) } else { @@ -217,7 +231,8 @@ export class TLPostgresReplicator extends DurableObject { } private async boot() { - this.debug('booting') + this.log.debug('booting') + this.lastPostgresMessageTime = Date.now() // clean up old resources if necessary if (this.state.type === 'connected') { this.state.subscription.unsubscribe() @@ -275,8 +290,13 @@ export class TLPostgresReplicator extends DurableObject { } private handleEvent(row: postgres.Row | null, event: postgres.ReplicationEvent) { + this.lastPostgresMessageTime = Date.now() + if (event.relation.table === 'replicator_boot_id') { + // ping, ignore + return + } this.postgresUpdates++ - this.debug('handleEvent', event) + this.log.debug('handleEvent', event) assert(this.state.type === 'connected', 'state should be connected in handleEvent') try { switch (event.relation.table) { @@ -380,7 +400,7 @@ export class TLPostgresReplicator extends DurableObject { .toArray()[0] if (!sub) { // the file was deleted before the file state - this.debug('file state deleted before file', row) + this.log.debug('file state deleted before file', row) } else { this.sql.exec( `DELETE FROM user_file_subscriptions WHERE userId = ? AND fileId = ?`, @@ -441,7 +461,7 @@ export class TLPostgresReplicator extends DurableObject { private handleUserEvent(row: postgres.Row | null, event: postgres.ReplicationEvent) { assert(row?.id, 'user id is required') - this.debug('USER EVENT', event.command, row.id) + this.log.debug('USER EVENT', event.command, row.id) this.messageUser(row.id, { type: 'row_update', row: row as any, @@ -456,7 +476,7 @@ export class TLPostgresReplicator extends DurableObject { } async ping() { - this.debug('ping') + this.log.debug('ping') return { sequenceId: this.state.sequenceId } } @@ -505,7 +525,7 @@ export class TLPostgresReplicator extends DurableObject { sequenceId: this.state.sequenceId + ':' + sequenceIdSuffix, }) if (res === 'unregister') { - this.debug('unregistering user', userId, event) + this.log.debug('unregistering user', userId, event) this.unregisterUser(userId) } }) @@ -520,9 +540,11 @@ export class TLPostgresReplicator extends DurableObject { } async registerUser(userId: string) { - this.debug('registering user', userId) + this.log.debug('registering user', userId) this.logEvent({ type: 'register_user' }) + this.log.debug('reg user wait') await this.waitUntilConnected() + this.log.debug('reg user connect') assert(this.state.type === 'connected', 'state should be connected in registerUser') const guestFiles = await this.state .db`SELECT "fileId" as id FROM file_state where "userId" = ${userId}` diff --git a/apps/dotcom/sync-worker/src/TLUserDurableObject.ts b/apps/dotcom/sync-worker/src/TLUserDurableObject.ts index 2d094f987bec..3fa6c88b8494 100644 --- a/apps/dotcom/sync-worker/src/TLUserDurableObject.ts +++ b/apps/dotcom/sync-worker/src/TLUserDurableObject.ts @@ -17,6 +17,7 @@ import { createSentry } from '@tldraw/worker-shared' import { DurableObject } from 'cloudflare:workers' import { IRequest, Router } from 'itty-router' import { Kysely, sql } from 'kysely' +import { Logger } from './Logger' import { createPostgresConnectionPool } from './postgres' import { getR2KeyForRoom } from './r2' import { type TLPostgresReplicator } from './TLPostgresReplicator' @@ -46,6 +47,8 @@ export class TLUserDurableObject extends DurableObject { } } + private log + cache: UserDataSyncer | null = null constructor(ctx: DurableObjectState, env: Environment) { @@ -55,8 +58,10 @@ export class TLUserDurableObject extends DurableObject { this.replicator = getReplicator(env) this.db = createPostgresConnectionPool(env, 'TLUserDurableObject') - this.debug('created') this.measure = env.MEASURE + + // debug logging in preview envs by default + this.log = new Logger(env, 'TLUserDurableObject', this.sentry) } private userId: string | null = null @@ -68,10 +73,11 @@ export class TLUserDurableObject extends DurableObject { } const rateLimited = await isRateLimited(this.env, this.userId!) if (rateLimited) { + this.log.debug('rate limited') this.logEvent({ type: 'rate_limited', id: this.userId }) throw new Error('Rate limited') } - if (this.cache === null) { + if (!this.cache) { await this.init() } else { await this.cache.waitUntilConnected() @@ -81,17 +87,19 @@ export class TLUserDurableObject extends DurableObject { private async init() { assert(this.userId, 'User ID not set') - this.debug('init') + this.log.debug('init', this.userId) this.cache = new UserDataSyncer( this.ctx, this.env, this.db, this.userId, (message) => this.broadcast(message), - this.logEvent.bind(this) + this.logEvent.bind(this), + this.log ) - this.debug('cache', !!this.cache) + this.log.debug('cache', !!this.cache) await this.cache.waitUntilConnected() + this.log.debug('cache connected') } // Handle a request to the Durable Object. @@ -183,17 +191,6 @@ export class TLUserDurableObject extends DurableObject { return new Response(null, { status: 101, webSocket: clientWebSocket }) } - private debug(...args: any[]) { - // uncomment for dev time debugging - // console.log('[TLUserDurableObject]: ', ...args) - if (this.sentry) { - // eslint-disable-next-line @typescript-eslint/no-deprecated - this.sentry.addBreadcrumb({ - message: `[TLUserDurableObject]: ${args.map((a) => (typeof a === 'object' ? JSON.stringify(a) : a)).join(' ')}`, - }) - } - } - private async handleSocketMessage(message: string) { const rateLimited = await isRateLimited(this.env, this.userId!) this.assertCache() @@ -393,7 +390,7 @@ export class TLUserDurableObject extends DurableObject { } ) // TODO: We should probably handle a case where the above operation succeeds but the one below fails - this.debug('mutation success', this.userId) + this.log.debug('mutation success', this.userId) await this.db .transaction() .execute(async (tx) => { @@ -410,14 +407,14 @@ export class TLUserDurableObject extends DurableObject { ) .returning('mutationNumber') .executeTakeFirstOrThrow() - this.debug('mutation number success', this.userId) + this.log.debug('mutation number success', this.userId) const mutationNumber = Number(result.mutationNumber) const currentMutationNumber = this.cache.mutations.at(-1)?.mutationNumber ?? 0 assert( mutationNumber > currentMutationNumber, `mutation number did not increment mutationNumber: ${mutationNumber} current: ${currentMutationNumber}` ) - this.debug('pushing mutation to cache', this.userId, mutationNumber) + this.log.debug('pushing mutation to cache', this.userId, mutationNumber) this.cache.mutations.push({ mutationNumber, mutationId: msg.mutationId }) }) .catch((e) => { @@ -438,7 +435,7 @@ export class TLUserDurableObject extends DurableObject { async handleReplicationEvent(event: ZReplicationEvent) { this.logEvent({ type: 'replication_event', id: this.userId ?? 'anon' }) - this.debug('replication event', event, !!this.cache) + this.log.debug('replication event', event, !!this.cache) if (!this.cache) { return 'unregister' } diff --git a/apps/dotcom/sync-worker/src/UserDataSyncer.ts b/apps/dotcom/sync-worker/src/UserDataSyncer.ts index 97b36250dbc3..4d644bd884aa 100644 --- a/apps/dotcom/sync-worker/src/UserDataSyncer.ts +++ b/apps/dotcom/sync-worker/src/UserDataSyncer.ts @@ -10,6 +10,7 @@ import { import { ExecutionQueue, assert, promiseWithResolve, sleep, uniqueId } from '@tldraw/utils' import { createSentry } from '@tldraw/worker-shared' import { Kysely } from 'kysely' +import { Logger } from './Logger' import { TLPostgresReplicator } from './TLPostgresReplicator' import { fileKeys, @@ -122,28 +123,18 @@ export class UserDataSyncer { private db: Kysely, private userId: string, private broadcast: (message: ZServerSentMessage) => void, - private logEvent: (event: TLUserDurableObjectEvent) => void + private logEvent: (event: TLUserDurableObjectEvent) => void, + private log: Logger ) { this.sentry = createSentry(ctx, env) this.replicator = getReplicator(env) this.reboot(false) } - private debug(...args: any[]) { - // uncomment for dev time debugging - // console.log('[UserDataSyncer]:', ...args) - if (this.sentry) { - // eslint-disable-next-line @typescript-eslint/no-deprecated - this.sentry.addBreadcrumb({ - message: `[UserDataSyncer]: ${args.join(' ')}`, - }) - } - } - private queue = new ExecutionQueue() async reboot(delay = true) { - this.debug('rebooting') + this.log.debug('rebooting') this.logEvent({ type: 'reboot', id: this.userId }) await this.queue.push(async () => { if (delay) { @@ -160,7 +151,7 @@ export class UserDataSyncer { } private commitMutations(upToAndIncludingNumber: number) { - this.debug('commit mutations', this.userId, upToAndIncludingNumber, this.mutations) + this.log.debug('commit mutations', this.userId, upToAndIncludingNumber, this.mutations) const mutationIds = this.mutations .filter((m) => m.mutationNumber <= upToAndIncludingNumber) .map((m) => m.mutationId) @@ -199,11 +190,11 @@ export class UserDataSyncer { } private async boot() { - this.debug('booting') + this.log.debug('booting') // todo: clean up old resources if necessary? const start = Date.now() const { sequenceId, sequenceNumber } = await this.replicator.registerUser(this.userId) - this.debug('registered user, sequenceId:', sequenceId) + this.log.debug('registered user, sequenceId:', sequenceId) this.state = { type: 'connecting', // preserve the promise so any awaiters do eventually get resolved @@ -278,14 +269,14 @@ export class UserDataSyncer { this.state.data = initialData // do an unnecessary assign here to tell typescript that the state might have changed - this.debug('got initial data') + this.log.debug('got initial data') this.state = this.updateStateAfterBootStep() // this will prevent more events from being added to the buffer await promise const end = Date.now() this.logEvent({ type: 'reboot_duration', id: this.userId, duration: end - start }) - this.debug('boot time', end - start, 'ms') + this.log.debug('boot time', end - start, 'ms') assert(this.state.type === 'connected', 'state should be connected after boot') } @@ -322,13 +313,17 @@ export class UserDataSyncer { ('sequenceId' in this.state && this.state.sequenceId !== event.sequenceId) ) { // the replicator has restarted, so we need to reboot - this.debug('force reboot', this.state, event) + this.log.debug('force reboot', this.state, event) this.reboot() return } assert(this.state.type !== 'init', 'state should not be init: ' + event.type) if (event.sequenceNumber !== this.state.lastSequenceNumber + 1) { - this.debug('sequence number mismatch', event.sequenceNumber, this.state.lastSequenceNumber) + this.log.debug( + 'sequence number mismatch', + event.sequenceNumber, + this.state.lastSequenceNumber + ) this.reboot() return } @@ -346,14 +341,14 @@ export class UserDataSyncer { } assert(this.state.type === 'connecting') - this.debug('got event', event, this.state.didGetBootId, this.state.bootId) + this.log.debug('got event', event, this.state.didGetBootId, this.state.bootId) if (this.state.didGetBootId) { // we've already got the boot id, so just shove things into // a buffer until the state is set to 'connecting' this.state.bufferedEvents.push(event) } else if (event.type === 'boot_complete' && event.bootId === this.state.bootId) { this.state.didGetBootId = true - this.debug('got boot id') + this.log.debug('got boot id') this.updateStateAfterBootStep() } // ignore other events until we get the boot id diff --git a/apps/dotcom/sync-worker/src/testRoutes.ts b/apps/dotcom/sync-worker/src/testRoutes.ts index 290a222c10f0..263b3876e9d7 100644 --- a/apps/dotcom/sync-worker/src/testRoutes.ts +++ b/apps/dotcom/sync-worker/src/testRoutes.ts @@ -1,10 +1,10 @@ import { createRouter, notFound } from '@tldraw/worker-shared' -import type { Environment } from './types' +import { isDebugLogging, type Environment } from './types' import { getReplicator, getUserDurableObject } from './utils/durableObjects' export const testRoutes = createRouter() .all('/app/__test__/*', (_, env) => { - if (env.IS_LOCAL !== 'true') return notFound() + if (!isDebugLogging(env)) return notFound() return undefined }) .get('/app/__test__/replicator/reboot', (_, env) => { diff --git a/apps/dotcom/sync-worker/src/types.ts b/apps/dotcom/sync-worker/src/types.ts index 9acaf68b9941..b8a7736a3043 100644 --- a/apps/dotcom/sync-worker/src/types.ts +++ b/apps/dotcom/sync-worker/src/types.ts @@ -1,10 +1,11 @@ // https://developers.cloudflare.com/analytics/analytics-engine/ -import { RoomSnapshot } from '@tldraw/sync-core' +import type { RoomSnapshot } from '@tldraw/sync-core' // import { TLAppDurableObject } from './TLAppDurableObject' -import { TLDrawDurableObject } from './TLDrawDurableObject' -import { TLPostgresReplicator } from './TLPostgresReplicator' -import { TLUserDurableObject } from './TLUserDurableObject' +import type { TLDrawDurableObject } from './TLDrawDurableObject' +import type { TLLoggerDurableObject } from './TLLoggerDurableObject' +import type { TLPostgresReplicator } from './TLPostgresReplicator' +import type { TLUserDurableObject } from './TLUserDurableObject' // This type isn't available in @cloudflare/workers-types yet export interface Analytics { @@ -21,6 +22,8 @@ export interface Environment { // TLAPP_DO: DurableObjectNamespace TL_PG_REPLICATOR: DurableObjectNamespace TL_USER: DurableObjectNamespace + TL_LOGGER: DurableObjectNamespace + BOTCOM_POSTGRES_CONNECTION_STRING: string BOTCOM_POSTGRES_POOLED_CONNECTION_STRING: string MEASURE: Analytics | undefined @@ -54,6 +57,10 @@ export interface Environment { RATE_LIMITER: RateLimit } +export function isDebugLogging(env: Environment) { + return env.TLDRAW_ENV === 'development' || env.TLDRAW_ENV === 'preview' +} + export type DBLoadResult = | { type: 'error' diff --git a/apps/dotcom/sync-worker/src/utils/durableObjects.ts b/apps/dotcom/sync-worker/src/utils/durableObjects.ts index 6ef1961bce72..1171dce71f0a 100644 --- a/apps/dotcom/sync-worker/src/utils/durableObjects.ts +++ b/apps/dotcom/sync-worker/src/utils/durableObjects.ts @@ -1,3 +1,4 @@ +import { TLLoggerDurableObject } from '../TLLoggerDurableObject' import type { TLPostgresReplicator } from '../TLPostgresReplicator' import type { TLUserDurableObject } from '../TLUserDurableObject' import { Environment } from '../types' @@ -11,3 +12,7 @@ export function getReplicator(env: Environment) { export function getUserDurableObject(env: Environment, userId: string) { return env.TL_USER.get(env.TL_USER.idFromName(userId)) as any as TLUserDurableObject } + +export function getLogger(env: Environment) { + return env.TL_LOGGER.get(env.TL_LOGGER.idFromName('logger')) as any as TLLoggerDurableObject +} diff --git a/apps/dotcom/sync-worker/src/worker.ts b/apps/dotcom/sync-worker/src/worker.ts index 87f7618d3814..77593da318a1 100644 --- a/apps/dotcom/sync-worker/src/worker.ts +++ b/apps/dotcom/sync-worker/src/worker.ts @@ -24,13 +24,15 @@ import { deleteFile } from './routes/tla/deleteFile' import { forwardRoomRequest } from './routes/tla/forwardRoomRequest' import { getPublishedFile } from './routes/tla/getPublishedFile' import { testRoutes } from './testRoutes' -import { Environment } from './types' -import { getUserDurableObject } from './utils/durableObjects' +import { Environment, isDebugLogging } from './types' +import { getLogger, getUserDurableObject } from './utils/durableObjects' import { getAuth } from './utils/tla/getAuth' // export { TLAppDurableObject } from './TLAppDurableObject' export { TLDrawDurableObject } from './TLDrawDurableObject' +export { TLLoggerDurableObject } from './TLLoggerDurableObject' export { TLPostgresReplicator } from './TLPostgresReplicator' export { TLUserDurableObject } from './TLUserDurableObject' + export class TLAppDurableObject extends DurableObject {} const { preflight, corsify } = cors({ @@ -83,6 +85,16 @@ const router = createRouter() proxied.headers.delete('cookie') return fetch(proxied) }) + .get('/app/__debug-tail', (req, env) => { + if (isDebugLogging(env)) { + // upgrade to websocket + if (req.headers.get('upgrade') === 'websocket') { + return getLogger(env).fetch(req) + } + } + + return new Response('Not Found', { status: 404 }) + }) // end app .all('*', notFound) diff --git a/apps/dotcom/sync-worker/wrangler.toml b/apps/dotcom/sync-worker/wrangler.toml index 482f7074fcb6..bdf51e678dd9 100644 --- a/apps/dotcom/sync-worker/wrangler.toml +++ b/apps/dotcom/sync-worker/wrangler.toml @@ -30,6 +30,10 @@ tag = "v5" new_sqlite_classes = ["TLPostgresReplicator"] new_classes = ["TLUserDurableObject"] +[[migrations]] +tag = "v6" +new_sqlite_classes = ["TLLoggerDurableObject"] + [[analytics_engine_datasets]] binding = "MEASURE" @@ -41,6 +45,7 @@ name = "dev-tldraw-multiplayer" [env.dev.vars] BOTCOM_POSTGRES_CONNECTION_STRING = "postgresql://user:password@127.0.0.1:6543/postgres" BOTCOM_POSTGRES_POOLED_CONNECTION_STRING = "postgresql://user:password@127.0.0.1:6543/postgres" +TLDRAW_ENV = "development" # we don't have a hard-coded name for preview. we instead have to generate it at build time and append it to this file. @@ -79,35 +84,40 @@ pattern = "sync.tldraw.xyz" bindings = [ { name = "TLDR_DOC", class_name = "TLDrawDurableObject" }, { name = "TL_PG_REPLICATOR", class_name = "TLPostgresReplicator" }, - { name = "TL_USER", class_name = "TLUserDurableObject" } + { name = "TL_USER", class_name = "TLUserDurableObject" }, + { name = "TL_LOGGER", class_name = "TLLoggerDurableObject" }, ] [durable_objects] bindings = [ { name = "TLDR_DOC", class_name = "TLDrawDurableObject" }, { name = "TL_PG_REPLICATOR", class_name = "TLPostgresReplicator" }, - { name = "TL_USER", class_name = "TLUserDurableObject" } + { name = "TL_USER", class_name = "TLUserDurableObject" }, + { name = "TL_LOGGER", class_name = "TLLoggerDurableObject" }, ] [env.preview.durable_objects] bindings = [ { name = "TLDR_DOC", class_name = "TLDrawDurableObject" }, { name = "TL_PG_REPLICATOR", class_name = "TLPostgresReplicator" }, - { name = "TL_USER", class_name = "TLUserDurableObject" } + { name = "TL_USER", class_name = "TLUserDurableObject" }, + { name = "TL_LOGGER", class_name = "TLLoggerDurableObject" }, ] [env.staging.durable_objects] bindings = [ { name = "TLDR_DOC", class_name = "TLDrawDurableObject" }, { name = "TL_PG_REPLICATOR", class_name = "TLPostgresReplicator" }, - { name = "TL_USER", class_name = "TLUserDurableObject" } + { name = "TL_USER", class_name = "TLUserDurableObject" }, + { name = "TL_LOGGER", class_name = "TLLoggerDurableObject" }, ] [env.production.durable_objects] bindings = [ { name = "TLDR_DOC", class_name = "TLDrawDurableObject" }, { name = "TL_PG_REPLICATOR", class_name = "TLPostgresReplicator" }, - { name = "TL_USER", class_name = "TLUserDurableObject" } + { name = "TL_USER", class_name = "TLUserDurableObject" }, + { name = "TL_LOGGER", class_name = "TLLoggerDurableObject" }, ] #################### Analytics engine #################### diff --git a/packages/worker-shared/src/sentry.ts b/packages/worker-shared/src/sentry.ts index 4c1f70afde87..ec8cb7d2cffa 100644 --- a/packages/worker-shared/src/sentry.ts +++ b/packages/worker-shared/src/sentry.ts @@ -16,7 +16,7 @@ export interface SentryEnvironment { export function createSentry(ctx: Context, env: SentryEnvironment, request?: Request) { // TLDRAW_ENV is undefined in dev - if (!env.SENTRY_DSN && !env.TLDRAW_ENV) { + if (!env.SENTRY_DSN && env.TLDRAW_ENV === 'development') { return null }