From 3c2e8661173cdc2ce1dbf42279735a41115a74e2 Mon Sep 17 00:00:00 2001 From: Iain Sproat <68657+iainsproat@users.noreply.github.com> Date: Thu, 23 Feb 2023 17:56:06 +0000 Subject: [PATCH 1/6] chore(fileimport): log more parameters (#1412) --- packages/fileimport-service/src/daemon.js | 19 +++++++++++++++---- 1 file changed, 15 insertions(+), 4 deletions(-) diff --git a/packages/fileimport-service/src/daemon.js b/packages/fileimport-service/src/daemon.js index bf1fb459a..945f605e6 100644 --- a/packages/fileimport-service/src/daemon.js +++ b/packages/fileimport-service/src/daemon.js @@ -50,7 +50,7 @@ async function startTask() { } async function doTask(task) { - const taskLogger = logger.child({ task }) + let taskLogger = logger.child({ taskId: task.id }) let tempUserToken = null let serverApi = null let fileTypeForMetric = 'unknown' @@ -65,7 +65,15 @@ async function doTask(task) { } fileTypeForMetric = info.fileType || 'missing_info' fileSizeForMetric = Number(info.fileSize) || 0 - + taskLogger = taskLogger.child({ + fileId: info.id, + fileType: fileTypeForMetric, + fileName: info.fileName, + fileSize: fileSizeForMetric, + userId: info.userId, + streamId: info.streamId, + branchName: info.branchName + }) fs.mkdirSync(TMP_INPUT_DIR, { recursive: true }) serverApi = new ServerAPI({ streamId: info.streamId }) @@ -86,6 +94,7 @@ async function doTask(task) { if (info.fileType === 'ifc') { await runProcessWithTimeout( + taskLogger, process.env['NODE_BINARY_PATH'] || 'node', [ '--no-experimental-fetch', @@ -104,6 +113,7 @@ async function doTask(task) { ) } else if (info.fileType === 'stl') { await runProcessWithTimeout( + taskLogger, process.env['PYTHON_BINARY_PATH'] || 'python3', [ './stl/import_file.py', @@ -127,6 +137,7 @@ async function doTask(task) { }) await runProcessWithTimeout( + taskLogger, process.env['PYTHON_BINARY_PATH'] || 'python3', [ '-u', @@ -190,9 +201,9 @@ async function doTask(task) { } } -function runProcessWithTimeout(cmd, cmdArgs, extraEnv, timeoutMs) { +function runProcessWithTimeout(processLogger, cmd, cmdArgs, extraEnv, timeoutMs) { return new Promise((resolve, reject) => { - let boundLogger = logger.child({ cmd, args: cmdArgs }) + let boundLogger = processLogger.child({ cmd, args: cmdArgs }) boundLogger.info('Starting process.') const childProc = spawn(cmd, cmdArgs, { env: { ...process.env, ...extraEnv } }) From 6403a3fdddba66a74324214e715d42f11ed6bac6 Mon Sep 17 00:00:00 2001 From: Iain Sproat <68657+iainsproat@users.noreply.github.com> Date: Thu, 23 Feb 2023 20:29:44 +0000 Subject: [PATCH 2/6] fix(webhook): should not duplicate logging of error (#1413) - the caller should be responsible for logging an error response --- packages/webhook-service/src/webhookCaller.js | 1 - 1 file changed, 1 deletion(-) diff --git a/packages/webhook-service/src/webhookCaller.js b/packages/webhook-service/src/webhookCaller.js index 44cba1df8..88ca2b184 100644 --- a/packages/webhook-service/src/webhookCaller.js +++ b/packages/webhook-service/src/webhookCaller.js @@ -64,7 +64,6 @@ async function makeNetworkRequest({ url, data, headersData, logger }) { responseBody: response.body } } catch (e) { - logger.error(e, 'error when making network request for webhook.') return { success: false, error: e.toString(), From 4b1969c8048c2c3fa9c792220420c64976ef3263 Mon Sep 17 00:00:00 2001 From: Iain Sproat <68657+iainsproat@users.noreply.github.com> Date: Fri, 24 Feb 2023 11:22:21 +0000 Subject: [PATCH 3/6] chore(server): improve logging of REST API endpoints (#1414) - chore(server): adds endpoint parameter to some log messages - and any other relevant parameters we have to hand (streamId, userId etc..) - logging severity is info for `400` status codes, error for `500` status codes or equivalent. --- packages/server/app.ts | 2 ++ packages/server/modules/auth/rest/index.js | 18 ++++++++++++------ .../server/modules/auth/strategies/azure-ad.js | 8 +++++--- .../server/modules/auth/strategies/local.js | 10 +++++++--- packages/server/modules/blobstorage/index.js | 10 +++++++--- .../server/modules/core/rest/diffDownload.js | 3 ++- .../server/modules/core/rest/diffUpload.js | 3 ++- packages/server/modules/core/rest/upload.js | 5 +++-- packages/server/modules/emails/rest/index.js | 4 +++- .../server/modules/emails/services/sending.ts | 2 +- packages/server/modules/fileuploads/index.js | 8 +++++++- packages/server/modules/pwdreset/rest/index.ts | 5 +++++ 12 files changed, 56 insertions(+), 22 deletions(-) diff --git a/packages/server/app.ts b/packages/server/app.ts index 4ee2666c0..430e49954 100644 --- a/packages/server/app.ts +++ b/packages/server/app.ts @@ -217,10 +217,12 @@ export async function init() { // Expose prometheus metrics app.get('/metrics', async (req, res) => { + const boundLogger = startupLogger.child({ endpoint: '/metrics' }) try { res.set('Content-Type', prometheusClient.register.contentType) res.end(await prometheusClient.register.metrics()) } catch (ex: unknown) { + boundLogger.error(ex, 'Error while retrieving metrics.') res.status(500).end(ex instanceof Error ? ex.message : `${ex}`) } }) diff --git a/packages/server/modules/auth/rest/index.js b/packages/server/modules/auth/rest/index.js index 64bf63d52..0cbc14528 100644 --- a/packages/server/modules/auth/rest/index.js +++ b/packages/server/modules/auth/rest/index.js @@ -14,7 +14,7 @@ const { revokeRefreshToken } = require(`@/modules/auth/services/apps`) const { validateScopes } = require(`@/modules/shared`) const { InvalidAccessCodeRequestError } = require('@/modules/auth/errors') const { ForbiddenError } = require('apollo-server-errors') -const { moduleLogger } = require('@/logging/logging') +const { authLogger } = require('@/logging/logging') // TODO: Secure these endpoints! module.exports = (app) => { @@ -23,6 +23,7 @@ module.exports = (app) => { TODO: ensure same origin. */ app.get('/auth/accesscode', async (req, res) => { + const boundLogger = authLogger.child({ endpoint: '/auth/accesscode' }) try { const appId = req.query.appId const app = await getApp({ id: appId }) @@ -44,15 +45,18 @@ module.exports = (app) => { const ac = await createAuthorizationCode({ appId, userId, challenge }) return res.redirect(`${app.redirectUrl}?access_code=${ac}`) } catch (err) { - sentry({ err }) - moduleLogger.error(err) - if ( err instanceof InvalidAccessCodeRequestError || err instanceof ForbiddenError ) { + boundLogger.info( + { err }, + 'Invalid access code request error, or Forbidden error.' + ) return res.status(400).send(err.message) } else { + sentry({ err }) + boundLogger.error(err) return res .status(500) .send('Something went wrong while processing your request') @@ -65,6 +69,7 @@ module.exports = (app) => { */ app.options('/auth/token', cors()) app.post('/auth/token', cors(), async (req, res) => { + const boundLogger = authLogger.child({ endpoint: '/auth/token' }) try { // Token refresh if (req.body.refreshToken) { @@ -99,7 +104,7 @@ module.exports = (app) => { return res.send(authResponse) } catch (err) { sentry({ err }) - moduleLogger.warn(err) + boundLogger.warn(err) return res.status(401).send({ err: err.message }) } }) @@ -108,6 +113,7 @@ module.exports = (app) => { Ensures a user is logged out by invalidating their token and refresh token. */ app.post('/auth/logout', async (req, res) => { + const boundLogger = authLogger.child({ endpoint: '/auth/logout' }) try { const token = req.body.token const refreshToken = req.body.refreshToken @@ -120,7 +126,7 @@ module.exports = (app) => { return res.status(200).send({ message: 'You have logged out.' }) } catch (err) { sentry({ err }) - moduleLogger.error(err) + boundLogger.error(err) return res.status(400).send('Something went wrong while trying to logout.') } }) diff --git a/packages/server/modules/auth/strategies/azure-ad.js b/packages/server/modules/auth/strategies/azure-ad.js index e800a694e..9eeeffadf 100644 --- a/packages/server/modules/auth/strategies/azure-ad.js +++ b/packages/server/modules/auth/strategies/azure-ad.js @@ -12,7 +12,7 @@ const { resolveAuthRedirectPath } = require('@/modules/serverinvites/services/inviteProcessingService') const { passportAuthenticate } = require('@/modules/auth/services/passportService') -const { logger } = require('@/logging/logging') +const { authLogger } = require('@/logging/logging') const { UserInputError } = require('@/modules/core/errors/userinput') module.exports = async (app, session, sessionStorage, finalizeAuth) => { @@ -51,6 +51,7 @@ module.exports = async (app, session, sessionStorage, finalizeAuth) => { session, passportAuthenticate('azuread-openidconnect'), async (req, res, next) => { + let boundLogger = authLogger.child({ endpoint: '/auth/azure/callback' }) const serverInfo = await getServerInfo() try { @@ -111,6 +112,7 @@ module.exports = async (app, session, sessionStorage, finalizeAuth) => { // ID is used later for verifying access token req.user.id = myUser.id + boundLogger = boundLogger.child({ userId: myUser.id }) // use the invite await finalizeInvitedServerRegistration(user.email, myUser.id) @@ -123,10 +125,10 @@ module.exports = async (app, session, sessionStorage, finalizeAuth) => { } catch (err) { switch (err.constructor) { case UserInputError: - logger.info(err) + boundLogger.info(err) break default: - logger.error(err) + boundLogger.error(err) } return next() } diff --git a/packages/server/modules/auth/strategies/local.js b/packages/server/modules/auth/strategies/local.js index b2a6cb747..93849aee3 100644 --- a/packages/server/modules/auth/strategies/local.js +++ b/packages/server/modules/auth/strategies/local.js @@ -17,7 +17,7 @@ const { resolveAuthRedirectPath } = require('@/modules/serverinvites/services/inviteProcessingService') const { getIpFromRequest } = require('@/modules/shared/utils/ip') -const { logger } = require('@/logging/logging') +const { authLogger } = require('@/logging/logging') const { NoInviteFoundError } = require('@/modules/serverinvites/errors') const { UserInputError, @@ -38,6 +38,7 @@ module.exports = async (app, session, sessionAppId, finalizeAuth) => { session, sessionAppId, async (req, res, next) => { + const boundLogger = authLogger.child({ endpoint: '/auth/local/login' }) try { const valid = await validatePasssword({ email: req.body.email, @@ -52,6 +53,7 @@ module.exports = async (app, session, sessionAppId, finalizeAuth) => { return next() } catch (err) { + boundLogger.info({ err }, 'Error while logging in.') return res.status(401).send({ err: true, message: 'Invalid credentials' }) } }, @@ -63,6 +65,7 @@ module.exports = async (app, session, sessionAppId, finalizeAuth) => { session, sessionAppId, async (req, res, next) => { + let boundLogger = authLogger.child({ endpoint: '/auth/local/register' }) const serverInfo = await getServerInfo() try { if (!req.body.password) throw new UserInputError('Password missing') @@ -99,6 +102,7 @@ module.exports = async (app, session, sessionAppId, finalizeAuth) => { // so we go ahead and register the user const userId = await createUser(user) req.user = { id: userId, email: user.email } + boundLogger = boundLogger.child({ userId }) // 4. use up all server-only invites the email had attached to it await finalizeInvitedServerRegistration(user.email, userId) @@ -112,10 +116,10 @@ module.exports = async (app, session, sessionAppId, finalizeAuth) => { case PasswordTooShortError: case UserInputError: case NoInviteFoundError: - logger.info(err) + boundLogger.info({ err }, 'Error while registering user.') return res.status(400).send({ err: err.message }) default: - logger.error(err) + boundLogger.error(err, 'Error while registering user.') return res.status(500).send({ err: err.message }) } } diff --git a/packages/server/modules/blobstorage/index.js b/packages/server/modules/blobstorage/index.js index 4caf017de..da82dc910 100644 --- a/packages/server/modules/blobstorage/index.js +++ b/packages/server/modules/blobstorage/index.js @@ -80,6 +80,7 @@ exports.init = async (app) => { allowForAllRegisteredUsersOnPublicStreamsWithPublicComments ]), async (req, res) => { + let boundLogger = logger.child({ endpoint: '/api/stream/:streamId/blob' }) // no checking of startup conditions, just dont init the endpoints if not configured right //authorize request const uploadOperations = {} @@ -89,10 +90,12 @@ exports.init = async (app) => { limits: { fileSize: getFileSizeLimit() } }) const streamId = req.params.streamId + boundLogger = boundLogger.child({ streamId, userId: req.context.userId }) busboy.on('file', (formKey, file, info) => { const { filename: fileName } = info const fileType = fileName.split('.').pop().toLowerCase() + boundLogger = boundLogger.child({ fileName, fileType }) const registerUploadResult = (processingPromise) => { finalizePromises.push( processingPromise.then((resultItem) => ({ ...resultItem, formKey })) @@ -109,6 +112,8 @@ exports.init = async (app) => { } } + boundLogger = boundLogger.child({ blobId }) + uploadOperations[blobId] = uploadFileStream( storeFileStream, { streamId, userId: req.context.userId }, @@ -148,7 +153,7 @@ exports.init = async (app) => { }) busboy.on('error', async (err) => { - logger.error(err, 'File upload error') + boundLogger.info({ err }, 'Upload request error.') //delete all started uploads await Promise.all( Object.keys(uploadOperations).map((blobId) => @@ -157,8 +162,7 @@ exports.init = async (app) => { ) const status = 400 - const response = 'Upload request error. The server logs have more details' - + const response = 'Upload request error. The server logs may have more details' res.status(status).end(response) }) diff --git a/packages/server/modules/core/rest/diffDownload.js b/packages/server/modules/core/rest/diffDownload.js index dd30b9b71..dde36807d 100644 --- a/packages/server/modules/core/rest/diffDownload.js +++ b/packages/server/modules/core/rest/diffDownload.js @@ -15,7 +15,8 @@ module.exports = (app) => { app.post('/api/getobjects/:streamId', cors(), async (req, res) => { const boundLogger = logger.child({ userId: req.context.userId || '-', - streamId: req.params.streamId + streamId: req.params.streamId, + endpoint: '/api/getobjects/:streamId' }) const hasStreamAccess = await validatePermissionsReadStream( req.params.streamId, diff --git a/packages/server/modules/core/rest/diffUpload.js b/packages/server/modules/core/rest/diffUpload.js index df055f34b..22bff6443 100644 --- a/packages/server/modules/core/rest/diffUpload.js +++ b/packages/server/modules/core/rest/diffUpload.js @@ -13,7 +13,8 @@ module.exports = (app) => { app.post('/api/diff/:streamId', cors(), async (req, res) => { const boundLogger = logger.child({ userId: req.context.userId || '-', - streamId: req.params.streamId + streamId: req.params.streamId, + endpoint: '/api/diff/:streamId' }) const hasStreamAccess = await validatePermissionsWriteStream( req.params.streamId, diff --git a/packages/server/modules/core/rest/upload.js b/packages/server/modules/core/rest/upload.js index b74f9f8a1..dce44904e 100644 --- a/packages/server/modules/core/rest/upload.js +++ b/packages/server/modules/core/rest/upload.js @@ -15,8 +15,9 @@ module.exports = (app) => { app.post('/objects/:streamId', cors(), async (req, res) => { const boundLogger = uploadEndpointLogger.child({ - user: req.context.userId || '-', - streamId: req.params.streamId + userId: req.context.userId || '-', + streamId: req.params.streamId, + endpoint: '/objects/:streamId' }) const hasStreamAccess = await validatePermissionsWriteStream( diff --git a/packages/server/modules/emails/rest/index.js b/packages/server/modules/emails/rest/index.js index 6c6c6ea7d..ad7bb8078 100644 --- a/packages/server/modules/emails/rest/index.js +++ b/packages/server/modules/emails/rest/index.js @@ -1,3 +1,4 @@ +const { authLogger } = require('@/logging/logging') const { EmailVerificationFinalizationError } = require('@/modules/emails/errors') const { finalizeEmailVerification @@ -5,6 +6,7 @@ const { module.exports = (app) => { app.get('/auth/verifyemail', async (req, res) => { + const boundLogger = authLogger.child({ endpoint: '/auth/verifyemail' }) try { await finalizeEmailVerification(req.query.t) return res.redirect('/?emailverifiedstatus=true') @@ -13,7 +15,7 @@ module.exports = (app) => { error instanceof EmailVerificationFinalizationError ? error.message : 'Email verification unexpectedly failed' - + boundLogger.info({ err: error }, 'Email verification failed.') return res.redirect(`/?emailverifiederror=${msg}`) } }) diff --git a/packages/server/modules/emails/services/sending.ts b/packages/server/modules/emails/services/sending.ts index 02dc1cdbf..92b2cb295 100644 --- a/packages/server/modules/emails/services/sending.ts +++ b/packages/server/modules/emails/services/sending.ts @@ -21,7 +21,7 @@ export async function sendEmail({ }: SendEmailParams): Promise { const transporter = getTransporter() if (!transporter) { - logger.error('No email transport present. Cannot send emails.') + logger.warn('No email transport present. Cannot send emails.') return false } try { diff --git a/packages/server/modules/fileuploads/index.js b/packages/server/modules/fileuploads/index.js index 07442eb16..88796de6e 100644 --- a/packages/server/modules/fileuploads/index.js +++ b/packages/server/modules/fileuploads/index.js @@ -35,12 +35,18 @@ exports.init = async (app) => { '/api/file/:fileType/:streamId/:branchName?', authMiddlewareCreator(streamWritePermissions), async (req, res) => { + const boundLogger = logger.child({ + endpoint: '/api/file/:fileType/:streamId/:branchName?', + streamId: req.params.streamId, + userId: req.context.userId, + branchName: req.params.branchName ?? 'main' + }) req.pipe( request( `${process.env.CANONICAL_URL}/api/stream/${req.params.streamId}/blob`, async (err, response, body) => { if (err) { - logger.error(err) + boundLogger.error(err) res.status(500).send(err.message) return } diff --git a/packages/server/modules/pwdreset/rest/index.ts b/packages/server/modules/pwdreset/rest/index.ts index b175cdcbb..0883322fc 100644 --- a/packages/server/modules/pwdreset/rest/index.ts +++ b/packages/server/modules/pwdreset/rest/index.ts @@ -1,3 +1,4 @@ +import { logger } from '@/logging/logging' import { finalizePasswordReset } from '@/modules/pwdreset/services/finalize' import { requestPasswordRecovery } from '@/modules/pwdreset/services/request' import { ensureError } from '@/modules/shared/helpers/errorHelper' @@ -6,24 +7,28 @@ import { Express } from 'express' export default function (app: Express) { // sends a password recovery email. app.post('/auth/pwdreset/request', async (req, res) => { + const boundLogger = logger.child({ endpoint: 'auth/pwdreset/request' }) try { const email = req.body.email await requestPasswordRecovery(email) return res.status(200).send('Password reset email sent.') } catch (e: unknown) { + boundLogger.info({ err: e }, 'Error while requesting password recovery.') res.status(400).send(ensureError(e).message) } }) // Finalizes password recovery. app.post('/auth/pwdreset/finalize', async (req, res) => { + const boundLogger = logger.child({ endpoint: 'auth/pwdreset/finalize' }) try { if (!req.body.tokenId || !req.body.password) throw new Error('Invalid request.') await finalizePasswordReset(req.body.tokenId, req.body.password) return res.status(200).send('Password reset. Please log in.') } catch (e: unknown) { + boundLogger.info({ err: e }, 'Error while finalizing password recovery.') res.status(400).send(ensureError(e).message) } }) From f2803432b4481a7a2178ebbdac6b98a63d2f3b7e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Gerg=C5=91=20Jedlicska?= <57442769+gjedlicska@users.noreply.github.com> Date: Mon, 27 Feb 2023 13:59:16 +0100 Subject: [PATCH 4/6] gergo/fix flaky admin override tests (#1420) * fix(server authz tests): force wait until env vars are reloaded in authz tests * test(server): fix flaky admin override tests with mocking --- .../server/modules/core/tests/generic.spec.js | 33 +++++++++++-------- 1 file changed, 19 insertions(+), 14 deletions(-) diff --git a/packages/server/modules/core/tests/generic.spec.js b/packages/server/modules/core/tests/generic.spec.js index 4584c6fdc..d63a8d8e6 100644 --- a/packages/server/modules/core/tests/generic.spec.js +++ b/packages/server/modules/core/tests/generic.spec.js @@ -1,4 +1,12 @@ /* istanbul ignore file */ +const { mockRequireModule } = require('@/test/mockHelper') +const envHelperMock = mockRequireModule( + [ + '@/modules/shared/helpers/envHelper', + require.resolve('../../shared/helpers/envHelper') + ], + ['@/modules/shared/index'] +) const expect = require('chai').expect const { beforeEachContext } = require('@/test/hooks') @@ -12,7 +20,6 @@ const { } = require('@/modules/shared') const { buildContext } = require('@/modules/shared/middleware') const { ForbiddenError } = require('apollo-server-express') -const { adminOverrideEnabled } = require('@/modules/shared/helpers/envHelper') describe('Generic AuthN & AuthZ controller tests', () => { before(async () => { @@ -133,9 +140,11 @@ describe('Generic AuthN & AuthZ controller tests', () => { }) afterEach(() => { - while (adminOverrideEnabled()) { - process.env.ADMIN_OVERRIDE_ENABLED = 'false' - } + envHelperMock.disable() + }) + after(() => { + envHelperMock.destroy() + envHelperMock.resetMockedFunctions() }) it('should allow stream:owners to be stream:owners', async () => { const role = await authorizeResolver( @@ -147,9 +156,8 @@ describe('Generic AuthN & AuthZ controller tests', () => { }) it('should get the passed in role for server:admins if override enabled', async () => { - while (!adminOverrideEnabled()) { - process.env.ADMIN_OVERRIDE_ENABLED = 'true' - } + envHelperMock.enable() + envHelperMock.mockFunction('adminOverrideEnabled', () => true) const role = await authorizeResolver( serverOwner.id, myStream.id, @@ -157,7 +165,6 @@ describe('Generic AuthN & AuthZ controller tests', () => { ) expect(role).to.equal('stream:contributor') }) - it('should not allow server:admins to be anything if adminOverride is disabled', async () => { try { await authorizeResolver(serverOwner.id, notMyStream.id, 'stream:contributor') @@ -168,9 +175,8 @@ describe('Generic AuthN & AuthZ controller tests', () => { }) it('should allow server:admins to be anything if adminOverride is enabled', async () => { - while (!adminOverrideEnabled()) { - process.env.ADMIN_OVERRIDE_ENABLED = 'true' - } + envHelperMock.enable() + envHelperMock.mockFunction('adminOverrideEnabled', () => true) const role = await authorizeResolver( serverOwner.id, @@ -190,9 +196,8 @@ describe('Generic AuthN & AuthZ controller tests', () => { }) it('should not allow server:users to be anything if adminOverride is enabled', async () => { - while (!adminOverrideEnabled()) { - process.env.ADMIN_OVERRIDE_ENABLED = 'true' - } + envHelperMock.enable() + envHelperMock.mockFunction('adminOverrideEnabled', () => true) try { await authorizeResolver(otherGuy.id, myStream.id, 'stream:contributor') throw 'This should have thrown' From b9d68efc5503b7e4b78c52be60f5f3a3b8bea224 Mon Sep 17 00:00:00 2001 From: Alexandru Popovici Date: Mon, 27 Feb 2023 16:20:42 +0200 Subject: [PATCH 5/6] #1408 - Separate Object Layers by Geometry Type (#1422) * Extended the ObjectLayers on a per geometry type basis. We can now restrict intersections and rendering based on geometry type (mesh, line, point) * Disabled geometry type based filtering of opaque/transparent/stencil objects in the batcher, since we're using separate object layers for the geometry types now * Fixed and issue with OR-ing layers --- packages/viewer-sandbox/src/main.ts | 6 ++-- packages/viewer/src/index.ts | 2 ++ packages/viewer/src/modules/Intersections.ts | 31 +++++++++++------ .../viewer/src/modules/SpeckleRenderer.ts | 8 +++-- .../viewer/src/modules/batching/Batcher.ts | 33 ++++++++++--------- .../viewer/src/modules/batching/LineBatch.ts | 2 ++ .../viewer/src/modules/batching/MeshBatch.ts | 2 ++ .../viewer/src/modules/batching/PointBatch.ts | 2 ++ .../src/modules/objects/SpeckleRaycaster.ts | 7 +++- .../viewer/src/modules/pipeline/Pipeline.ts | 11 ++++--- .../queries/IntersectionQuerySolver.ts | 5 +-- 11 files changed, 72 insertions(+), 37 deletions(-) diff --git a/packages/viewer-sandbox/src/main.ts b/packages/viewer-sandbox/src/main.ts index f755988da..cc399df9f 100644 --- a/packages/viewer-sandbox/src/main.ts +++ b/packages/viewer-sandbox/src/main.ts @@ -178,7 +178,7 @@ await sandbox.loadUrl( //Gergo's house // 'https://latest.speckle.dev/streams/c1faab5c62/commits/78bdd8eb76' // Point cloud - // 'https://latest.speckle.dev/streams/2d19273d31/commits/9ceb423feb' + 'https://latest.speckle.dev/streams/2d19273d31/commits/9ceb423feb' // Luis sphere // 'https://speckle.xyz/streams/b85d53c3b4/commits/b47f21b707' // Crankshaft @@ -238,6 +238,8 @@ await sandbox.loadUrl( // tekla 2 // 'https://speckle.xyz/streams/be4813ccd2/commits/da85000921?c=%5B-1.12295,-2.60901,6.12402,4.77979,0.555,3.63346,0,1%5D' // 'https://latest.speckle.dev/streams/85bc4f61c6/commits/bb7b718a1a' - 'https://latest.speckle.dev/streams/c1faab5c62/commits/6b1b1195c4' + // 'https://latest.speckle.dev/streams/c1faab5c62/commits/6b1b1195c4' // 'https://latest.speckle.dev/streams/c1faab5c62/commits/cef1e7527b' + // Big curves + // 'https://latest.speckle.dev/streams/c1faab5c62/commits/49dad07ae2' ) diff --git a/packages/viewer/src/index.ts b/packages/viewer/src/index.ts index c10615c5c..1805a7720 100644 --- a/packages/viewer/src/index.ts +++ b/packages/viewer/src/index.ts @@ -28,6 +28,7 @@ import { IntersectionQueryResult } from './modules/queries/Query' import { Utils } from './modules/Utils' +import { ObjectLayers } from './modules/SpeckleRenderer' export { Viewer, @@ -52,6 +53,7 @@ export type { SpeckleView, CanonicalView, InlineView, + ObjectLayers, PointQuery, IntersectionQuery, QueryResult, diff --git a/packages/viewer/src/modules/Intersections.ts b/packages/viewer/src/modules/Intersections.ts index 35e6dec4f..db8591ecf 100644 --- a/packages/viewer/src/modules/Intersections.ts +++ b/packages/viewer/src/modules/Intersections.ts @@ -3,7 +3,6 @@ import { Camera, Intersection, Object3D, - Points, Ray, Scene, Vector2, @@ -13,10 +12,10 @@ import { LineMaterial } from 'three/examples/jsm/lines/LineMaterial' import { LineSegments2 } from 'three/examples/jsm/lines/LineSegments2' import { SpeckleRaycaster } from './objects/SpeckleRaycaster' import Logger from 'js-logger' +import { ObjectLayers } from './SpeckleRenderer' export class Intersections { private raycaster: SpeckleRaycaster - private allowPointPick = false private boxBuffer: Box3 = new Box3() private vec0Buffer: Vector4 = new Vector4() private vec1Buffer: Vector4 = new Vector4() @@ -81,11 +80,12 @@ export class Intersections { point: Vector2, nearest = true, bounds: Box3 = null, + castLayers: Array = undefined, firstOnly = false ): Array { this.raycaster.setFromCamera(point, camera) this.raycaster.firstHitOnly = firstOnly - return this.intersectInternal(scene, nearest, bounds) + return this.intersectInternal(scene, nearest, bounds, castLayers) } public intersectRay( @@ -94,15 +94,29 @@ export class Intersections { ray: Ray, nearest = true, bounds: Box3 = null, + castLayers: Array = undefined, firstOnly = false ): Array { this.raycaster.camera = camera this.raycaster.set(ray.origin, ray.direction) this.raycaster.firstHitOnly = firstOnly - return this.intersectInternal(scene, nearest, bounds) + return this.intersectInternal(scene, nearest, bounds, castLayers) } - private intersectInternal(scene: Scene, nearest: boolean, bounds: Box3) { + private intersectInternal( + scene: Scene, + nearest: boolean, + bounds: Box3, + castLayers: Array + ) { + const preserveMask = this.raycaster.layers.mask + + if (castLayers !== undefined) { + this.raycaster.layers.disableAll() + castLayers.forEach((layer) => { + this.raycaster.layers.enable(layer) + }) + } const target = scene.getObjectByName('ContentGroup') let results = [] @@ -111,6 +125,7 @@ export class Intersections { results = this.raycaster.intersectObjects(target.children) Logger.warn('Interesct time -> ', performance.now() - start) } + this.raycaster.layers.mask = preserveMask if (results.length === 0) return null if (nearest) @@ -122,11 +137,7 @@ export class Intersections { return bounds.containsPoint(result.point) }) } - if (!this.allowPointPick) { - results = results.filter((val) => { - return !(val.object instanceof Points) - }) - } + return results } } diff --git a/packages/viewer/src/modules/SpeckleRenderer.ts b/packages/viewer/src/modules/SpeckleRenderer.ts index 1ba49b01c..ba8fcf300 100644 --- a/packages/viewer/src/modules/SpeckleRenderer.ts +++ b/packages/viewer/src/modules/SpeckleRenderer.ts @@ -57,6 +57,10 @@ import { Shadowcatcher } from './Shadowcatcher' import Logger from 'js-logger' export enum ObjectLayers { + STREAM_CONTENT_MESH = 10, + STREAM_CONTENT_LINE = 11, + STREAM_CONTENT_POINT = 12, + STREAM_CONTENT = 1, PROPS = 2, SHADOWCATCHER = 3 @@ -296,7 +300,8 @@ export default class SpeckleRenderer { }) this._shadowcatcher = new Shadowcatcher(ObjectLayers.SHADOWCATCHER, [ - ObjectLayers.STREAM_CONTENT + ObjectLayers.STREAM_CONTENT_MESH, + ObjectLayers.STREAM_CONTENT_LINE ]) let restoreVisibility this._shadowcatcher.shadowcatcherPass.onBeforeRender = () => { @@ -499,7 +504,6 @@ export default class SpeckleRenderer { private addBatch(batch: Batch, parent: Object3D) { const batchRenderable = batch.renderObject - batchRenderable.layers.set(ObjectLayers.STREAM_CONTENT) parent.add(batch.renderObject) if (batch.geometryType === GeometryType.MESH) { diff --git a/packages/viewer/src/modules/batching/Batcher.ts b/packages/viewer/src/modules/batching/Batcher.ts index 0b2c5b87a..e2e2e524b 100644 --- a/packages/viewer/src/modules/batching/Batcher.ts +++ b/packages/viewer/src/modules/batching/Batcher.ts @@ -171,10 +171,10 @@ export default class Batcher { const visibilityRanges = {} for (const k in this.batches) { const batch: Batch = this.batches[k] - if (batch.geometryType !== GeometryType.MESH) { - visibilityRanges[k] = HideAllBatchUpdateRange - continue - } + // if (batch.geometryType !== GeometryType.MESH) { + // visibilityRanges[k] = HideAllBatchUpdateRange + // continue + // } const batchMesh: Mesh = batch.renderObject as Mesh if (batchMesh.geometry.groups.length === 0) { if ((batchMesh.material as Material).transparent === true) @@ -204,10 +204,10 @@ export default class Batcher { const visibilityRanges = {} for (const k in this.batches) { const batch: Batch = this.batches[k] - if (batch.geometryType !== GeometryType.MESH) { - visibilityRanges[k] = HideAllBatchUpdateRange - continue - } + // if (batch.geometryType !== GeometryType.MESH) { + // visibilityRanges[k] = HideAllBatchUpdateRange + // continue + // } const batchMesh: Mesh = batch.renderObject as Mesh if (batchMesh.geometry.groups.length === 0) { if ((batchMesh.material as Material).stencilWrite === true) @@ -231,10 +231,10 @@ export default class Batcher { const visibilityRanges = {} for (const k in this.batches) { const batch: Batch = this.batches[k] - if (batch.geometryType !== GeometryType.MESH) { - visibilityRanges[k] = HideAllBatchUpdateRange - continue - } + // if (batch.geometryType !== GeometryType.MESH) { + // visibilityRanges[k] = HideAllBatchUpdateRange + // continue + // } const batchMesh: Mesh = batch.renderObject as Mesh if (batchMesh.geometry.groups.length === 0) { if ((batchMesh.material as Material).transparent === false) @@ -325,10 +325,8 @@ export default class Batcher { }) } - /** Conveniece method. This should also work as a filtering action - * Though, because the batches are not smart enough yet to group - * their draw ranges, it would be currently be inneficient to isolate - * via filtering. This will change in the future + /** + * Used for debuggin only */ public isolateRenderView(id: string) { const rvs = WorldTree.getRenderTree().getRenderViewsForNodeId(id) @@ -375,6 +373,9 @@ export default class Batcher { } } + /** + * Used for debuggin only + */ public async isolateRenderViewBatch(id: string) { const rv = WorldTree.getRenderTree().getRenderViewForNodeId(id) for (const k in this.batches) { diff --git a/packages/viewer/src/modules/batching/LineBatch.ts b/packages/viewer/src/modules/batching/LineBatch.ts index cfcd57692..6f45b8f43 100644 --- a/packages/viewer/src/modules/batching/LineBatch.ts +++ b/packages/viewer/src/modules/batching/LineBatch.ts @@ -13,6 +13,7 @@ import { LineSegments2 } from 'three/examples/jsm/lines/LineSegments2' import { LineSegmentsGeometry } from 'three/examples/jsm/lines/LineSegmentsGeometry' import { Geometry } from '../converter/Geometry' import SpeckleLineMaterial from '../materials/SpeckleLineMaterial' +import { ObjectLayers } from '../SpeckleRenderer' import { NodeRenderView } from '../tree/NodeRenderView' import { Viewer } from '../Viewer' import { @@ -198,6 +199,7 @@ export default class LineBatch implements Batch { this.mesh.scale.set(1, 1, 1) this.mesh.uuid = this.id + this.mesh.layers.set(ObjectLayers.STREAM_CONTENT_LINE) } public getRenderView(index: number): NodeRenderView { diff --git a/packages/viewer/src/modules/batching/MeshBatch.ts b/packages/viewer/src/modules/batching/MeshBatch.ts index 12a197de8..391322b0e 100644 --- a/packages/viewer/src/modules/batching/MeshBatch.ts +++ b/packages/viewer/src/modules/batching/MeshBatch.ts @@ -24,6 +24,7 @@ import { HideAllBatchUpdateRange } from './Batch' import Logger from 'js-logger' +import { ObjectLayers } from '../SpeckleRenderer' export default class MeshBatch implements Batch { public id: string @@ -451,6 +452,7 @@ export default class MeshBatch implements Batch { this.boundsTree.getBoundingBox(this.bounds) this.mesh = new SpeckleMesh(this.geometry, this.batchMaterial, this.boundsTree) this.mesh.uuid = this.id + this.mesh.layers.set(ObjectLayers.STREAM_CONTENT_MESH) } public getRenderView(index: number): NodeRenderView { diff --git a/packages/viewer/src/modules/batching/PointBatch.ts b/packages/viewer/src/modules/batching/PointBatch.ts index 7afef5f81..e14ef9161 100644 --- a/packages/viewer/src/modules/batching/PointBatch.ts +++ b/packages/viewer/src/modules/batching/PointBatch.ts @@ -17,6 +17,7 @@ import { HideAllBatchUpdateRange } from './Batch' import Logger from 'js-logger' +import { ObjectLayers } from '../SpeckleRenderer' export default class PointBatch implements Batch { public id: string @@ -285,6 +286,7 @@ export default class PointBatch implements Batch { this.makePointGeometry(position, color) this.mesh = new Points(this.geometry, this.batchMaterial) this.mesh.uuid = this.id + this.mesh.layers.set(ObjectLayers.STREAM_CONTENT_POINT) } public getRenderView(index: number): NodeRenderView { diff --git a/packages/viewer/src/modules/objects/SpeckleRaycaster.ts b/packages/viewer/src/modules/objects/SpeckleRaycaster.ts index a7907b59f..c52b64ddb 100644 --- a/packages/viewer/src/modules/objects/SpeckleRaycaster.ts +++ b/packages/viewer/src/modules/objects/SpeckleRaycaster.ts @@ -6,7 +6,12 @@ export class SpeckleRaycaster extends Raycaster { constructor(origin?, direction?, near = 0, far = Infinity) { super(origin, direction, near, far) - this.layers.set(ObjectLayers.STREAM_CONTENT) + this.layers.disableAll() + this.layers.enable(ObjectLayers.STREAM_CONTENT) + this.layers.enable(ObjectLayers.STREAM_CONTENT_MESH) + this.layers.enable(ObjectLayers.STREAM_CONTENT_LINE) + // OFF by default + // this.layers.enable(ObjectLayers.STREAM_CONTENT_POINT) } public intersectObjects(objects, recursive = true, intersects = []) { diff --git a/packages/viewer/src/modules/pipeline/Pipeline.ts b/packages/viewer/src/modules/pipeline/Pipeline.ts index dded068b3..d74fc8e89 100644 --- a/packages/viewer/src/modules/pipeline/Pipeline.ts +++ b/packages/viewer/src/modules/pipeline/Pipeline.ts @@ -237,15 +237,18 @@ export class Pipeline { this.copyOutputPass = new CopyOutputPass() this.copyOutputPass.renderToScreen = true - this.depthPass.setLayers([ObjectLayers.STREAM_CONTENT]) - this.normalsPass.setLayers([ObjectLayers.STREAM_CONTENT]) - this.stencilPass.setLayers([ObjectLayers.STREAM_CONTENT]) + this.depthPass.setLayers([ObjectLayers.STREAM_CONTENT_MESH]) + this.normalsPass.setLayers([ObjectLayers.STREAM_CONTENT_MESH]) + this.stencilPass.setLayers([ObjectLayers.STREAM_CONTENT_MESH]) this.renderPass.setLayers([ ObjectLayers.PROPS, ObjectLayers.STREAM_CONTENT, + ObjectLayers.STREAM_CONTENT_MESH, + ObjectLayers.STREAM_CONTENT_LINE, + ObjectLayers.STREAM_CONTENT_POINT, ObjectLayers.SHADOWCATCHER ]) - this.stencilMaskPass.setLayers([ObjectLayers.STREAM_CONTENT]) + this.stencilMaskPass.setLayers([ObjectLayers.STREAM_CONTENT_MESH]) let restoreVisibility this.depthPass.onBeforeRender = () => { diff --git a/packages/viewer/src/modules/queries/IntersectionQuerySolver.ts b/packages/viewer/src/modules/queries/IntersectionQuerySolver.ts index 59c6d5141..b2f74932a 100644 --- a/packages/viewer/src/modules/queries/IntersectionQuerySolver.ts +++ b/packages/viewer/src/modules/queries/IntersectionQuerySolver.ts @@ -1,6 +1,6 @@ import Logger from 'js-logger' import { Intersection, Ray, Vector2, Vector3 } from 'three' -import SpeckleRenderer from '../SpeckleRenderer' +import SpeckleRenderer, { ObjectLayers } from '../SpeckleRenderer' import { IntersectionQuery, IntersectionQueryResult } from './Query' export class IntersectionQuerySolver { @@ -32,7 +32,8 @@ export class IntersectionQuerySolver { this.renderer.camera, ray, true, - this.renderer.currentSectionBox + this.renderer.currentSectionBox, + [ObjectLayers.STREAM_CONTENT_MESH] ) if (!results || results.length === 0) return { objects: null } const hits = this.renderer.queryHits(results) From 7e899503581e03cc893d121fd629fe6c2dfa0103 Mon Sep 17 00:00:00 2001 From: Iain Sproat <68657+iainsproat@users.noreply.github.com> Date: Mon, 27 Feb 2023 15:14:56 +0000 Subject: [PATCH 6/6] feat(logging): log all http requests and responses (#1416) - feat(logging): log all http requests and responses - the auto logger does not log the body, to ensure sensitive payloads are not logged. Unfortunately this means that error messages are not logged either, so need to be manually logged. - fix(logging): 400 errors should be info not error severity logging --- packages/server/app.ts | 12 +-- packages/server/logging/expressLogging.ts | 81 ++++++++++++++++++- packages/server/logging/logging.ts | 1 - packages/server/modules/auth/rest/index.js | 15 +--- .../modules/auth/strategies/azure-ad.js | 11 +-- .../server/modules/auth/strategies/local.js | 17 ++-- packages/server/modules/blobstorage/index.js | 11 ++- .../server/modules/core/rest/diffDownload.js | 12 ++- .../server/modules/core/rest/diffUpload.js | 10 +-- packages/server/modules/core/rest/upload.js | 30 ++++--- packages/server/modules/emails/rest/index.js | 4 +- packages/server/modules/fileuploads/index.js | 7 +- .../server/modules/pwdreset/rest/index.ts | 7 +- 13 files changed, 137 insertions(+), 81 deletions(-) diff --git a/packages/server/app.ts b/packages/server/app.ts index 430e49954..91dac08eb 100644 --- a/packages/server/app.ts +++ b/packages/server/app.ts @@ -11,7 +11,10 @@ import { createTerminus } from '@godaddy/terminus' import * as Sentry from '@sentry/node' import Logging from '@/logging' import { startupLogger, shutdownLogger } from '@/logging/logging' -import { LoggingExpressMiddleware } from '@/logging/expressLogging' +import { + DetermineRequestIdMiddleware, + LoggingExpressMiddleware +} from '@/logging/expressLogging' import { errorLoggingMiddleware } from '@/logging/errorLogging' import prometheusClient from 'prom-client' @@ -183,9 +186,8 @@ export async function init() { // Should perhaps be done manually? await knex.migrate.latest() - if (process.env.NODE_ENV !== 'test') { - app.use(LoggingExpressMiddleware) - } + app.use(DetermineRequestIdMiddleware) + app.use(LoggingExpressMiddleware) if (process.env.COMPRESSION) { app.use(compression()) @@ -217,12 +219,10 @@ export async function init() { // Expose prometheus metrics app.get('/metrics', async (req, res) => { - const boundLogger = startupLogger.child({ endpoint: '/metrics' }) try { res.set('Content-Type', prometheusClient.register.contentType) res.end(await prometheusClient.register.metrics()) } catch (ex: unknown) { - boundLogger.error(ex, 'Error while retrieving metrics.') res.status(500).end(ex instanceof Error ? ex.message : `${ex}`) } }) diff --git a/packages/server/logging/expressLogging.ts b/packages/server/logging/expressLogging.ts index 2f9ae6ba6..219e02b5e 100644 --- a/packages/server/logging/expressLogging.ts +++ b/packages/server/logging/expressLogging.ts @@ -1,7 +1,86 @@ import { logger } from './logging' +import { randomUUID } from 'crypto' import HttpLogger from 'pino-http' +import { IncomingMessage } from 'http' +import { NextFunction, Response } from 'express' +import pino, { SerializedResponse } from 'pino' +import { GenReqId } from 'pino-http' + +const REQUEST_ID_HEADER = 'x-request-id' + +const GenerateRequestId: GenReqId = (req: IncomingMessage) => DetermineRequestId(req) + +const DetermineRequestId = ( + req: IncomingMessage, + uuidGenerator: () => string = randomUUID +): string => { + const headers = req.headers[REQUEST_ID_HEADER] + if (!Array.isArray(headers)) return headers || uuidGenerator() + return headers[0] || uuidGenerator() +} export const LoggingExpressMiddleware = HttpLogger({ logger, - autoLogging: false + autoLogging: true, + genReqId: GenerateRequestId, + customLogLevel: (req, res, err) => { + if (res.statusCode >= 400 && res.statusCode < 500) { + return 'info' + } else if (res.statusCode >= 500 || err) { + return 'error' + } else if (res.statusCode >= 300 && res.statusCode < 400) { + return 'silent' + } + return 'info' + }, + + // we need to redact any potential sensitive data from being logged. + // as we do not know what headers may be sent in a request by a user or client + // we have to allow list selected headers + serializers: { + req: pino.stdSerializers.wrapRequestSerializer((req) => { + return { + id: req.raw.id, + method: req.raw.method, + path: req.raw.url?.split('?')[0], // Remove query params which might be sensitive + // Allowlist useful headers + headers: { + host: req.raw.headers.host, + 'user-agent': req.raw.headers['user-agent'], + 'x-request-id': req.raw.headers[REQUEST_ID_HEADER], + referer: req.raw.headers.referer + } + } + }), + res: pino.stdSerializers.wrapResponseSerializer((res) => { + const resRaw = res as SerializedResponse & { + raw: { + headers: Record + } + } + return { + statusCode: res.raw.statusCode, + // Allowlist useful headers + headers: { + 'content-length': resRaw.raw.headers['content-length'], + 'content-type': resRaw.raw.headers['content-type'], + 'retry-after': resRaw.raw.headers['retry-after'], + 'x-ratelimit-remaining': resRaw.raw.headers['x-ratelimit-remaining'], + 'x-ratelimit-reset': resRaw.raw.headers['x-ratelimit-reset'], + 'x-request-id': resRaw.raw.headers['x-request-id'], + 'x-speckle-meditation': resRaw.raw.headers['x-speckle-meditation'] + } + } + }) + } }) + +export const DetermineRequestIdMiddleware = ( + req: IncomingMessage, + res: Response, + next: NextFunction +) => { + const id = DetermineRequestId(req) + res.setHeader(REQUEST_ID_HEADER, id) + next() +} diff --git a/packages/server/logging/logging.ts b/packages/server/logging/logging.ts index cf0f3058f..bec9a439f 100644 --- a/packages/server/logging/logging.ts +++ b/packages/server/logging/logging.ts @@ -23,5 +23,4 @@ export const uploadEndpointLogger = extendLoggerComponent(logger, 'upload-endpoi export const dbLogger = extendLoggerComponent(logger, 'db') export const servicesLogger = extendLoggerComponent(logger, 'services') export const rateLimiterLogger = extendLoggerComponent(logger, 'rate-limiter') -export const authLogger = extendLoggerComponent(logger, 'auth') export const redisLogger = extendLoggerComponent(logger, 'redis') diff --git a/packages/server/modules/auth/rest/index.js b/packages/server/modules/auth/rest/index.js index 0cbc14528..a10b3cc4d 100644 --- a/packages/server/modules/auth/rest/index.js +++ b/packages/server/modules/auth/rest/index.js @@ -14,7 +14,6 @@ const { revokeRefreshToken } = require(`@/modules/auth/services/apps`) const { validateScopes } = require(`@/modules/shared`) const { InvalidAccessCodeRequestError } = require('@/modules/auth/errors') const { ForbiddenError } = require('apollo-server-errors') -const { authLogger } = require('@/logging/logging') // TODO: Secure these endpoints! module.exports = (app) => { @@ -23,7 +22,6 @@ module.exports = (app) => { TODO: ensure same origin. */ app.get('/auth/accesscode', async (req, res) => { - const boundLogger = authLogger.child({ endpoint: '/auth/accesscode' }) try { const appId = req.query.appId const app = await getApp({ id: appId }) @@ -49,14 +47,11 @@ module.exports = (app) => { err instanceof InvalidAccessCodeRequestError || err instanceof ForbiddenError ) { - boundLogger.info( - { err }, - 'Invalid access code request error, or Forbidden error.' - ) + res.log.info({ err }, 'Invalid access code request error, or Forbidden error.') return res.status(400).send(err.message) } else { sentry({ err }) - boundLogger.error(err) + res.log.error(err) return res .status(500) .send('Something went wrong while processing your request') @@ -69,7 +64,6 @@ module.exports = (app) => { */ app.options('/auth/token', cors()) app.post('/auth/token', cors(), async (req, res) => { - const boundLogger = authLogger.child({ endpoint: '/auth/token' }) try { // Token refresh if (req.body.refreshToken) { @@ -104,7 +98,7 @@ module.exports = (app) => { return res.send(authResponse) } catch (err) { sentry({ err }) - boundLogger.warn(err) + res.log.info({ err }, 'Error while trying to generate a new token.') return res.status(401).send({ err: err.message }) } }) @@ -113,7 +107,6 @@ module.exports = (app) => { Ensures a user is logged out by invalidating their token and refresh token. */ app.post('/auth/logout', async (req, res) => { - const boundLogger = authLogger.child({ endpoint: '/auth/logout' }) try { const token = req.body.token const refreshToken = req.body.refreshToken @@ -126,7 +119,7 @@ module.exports = (app) => { return res.status(200).send({ message: 'You have logged out.' }) } catch (err) { sentry({ err }) - boundLogger.error(err) + res.log.info({ err }, 'Error while trying to logout.') return res.status(400).send('Something went wrong while trying to logout.') } }) diff --git a/packages/server/modules/auth/strategies/azure-ad.js b/packages/server/modules/auth/strategies/azure-ad.js index 9eeeffadf..3d03d316d 100644 --- a/packages/server/modules/auth/strategies/azure-ad.js +++ b/packages/server/modules/auth/strategies/azure-ad.js @@ -12,7 +12,6 @@ const { resolveAuthRedirectPath } = require('@/modules/serverinvites/services/inviteProcessingService') const { passportAuthenticate } = require('@/modules/auth/services/passportService') -const { authLogger } = require('@/logging/logging') const { UserInputError } = require('@/modules/core/errors/userinput') module.exports = async (app, session, sessionStorage, finalizeAuth) => { @@ -51,7 +50,6 @@ module.exports = async (app, session, sessionStorage, finalizeAuth) => { session, passportAuthenticate('azuread-openidconnect'), async (req, res, next) => { - let boundLogger = authLogger.child({ endpoint: '/auth/azure/callback' }) const serverInfo = await getServerInfo() try { @@ -112,7 +110,7 @@ module.exports = async (app, session, sessionStorage, finalizeAuth) => { // ID is used later for verifying access token req.user.id = myUser.id - boundLogger = boundLogger.child({ userId: myUser.id }) + req.log = req.log.child({ userId: myUser.id }) // use the invite await finalizeInvitedServerRegistration(user.email, myUser.id) @@ -125,10 +123,13 @@ module.exports = async (app, session, sessionStorage, finalizeAuth) => { } catch (err) { switch (err.constructor) { case UserInputError: - boundLogger.info(err) + req.log.info( + { err }, + 'User input error during Azure AD authentication callback.' + ) break default: - boundLogger.error(err) + req.log.error(err, 'Error during Azure AD authentication callback.') } return next() } diff --git a/packages/server/modules/auth/strategies/local.js b/packages/server/modules/auth/strategies/local.js index 93849aee3..99926abcd 100644 --- a/packages/server/modules/auth/strategies/local.js +++ b/packages/server/modules/auth/strategies/local.js @@ -17,7 +17,6 @@ const { resolveAuthRedirectPath } = require('@/modules/serverinvites/services/inviteProcessingService') const { getIpFromRequest } = require('@/modules/shared/utils/ip') -const { authLogger } = require('@/logging/logging') const { NoInviteFoundError } = require('@/modules/serverinvites/errors') const { UserInputError, @@ -38,23 +37,22 @@ module.exports = async (app, session, sessionAppId, finalizeAuth) => { session, sessionAppId, async (req, res, next) => { - const boundLogger = authLogger.child({ endpoint: '/auth/local/login' }) try { const valid = await validatePasssword({ email: req.body.email, password: req.body.password }) - if (!valid) throw new UserInputError('Invalid credentials') + if (!valid) throw new UserInputError('Invalid credentials.') const user = await getUserByEmail({ email: req.body.email }) - if (!user) throw new UserInputError('Invalid credentials') + if (!user) throw new UserInputError('Invalid credentials.') req.user = { id: user.id } return next() } catch (err) { - boundLogger.info({ err }, 'Error while logging in.') - return res.status(401).send({ err: true, message: 'Invalid credentials' }) + res.log.info({ err }, 'Error while logging in.') + return res.status(401).send({ err: true, message: 'Invalid credentials.' }) } }, finalizeAuth @@ -65,7 +63,6 @@ module.exports = async (app, session, sessionAppId, finalizeAuth) => { session, sessionAppId, async (req, res, next) => { - let boundLogger = authLogger.child({ endpoint: '/auth/local/register' }) const serverInfo = await getServerInfo() try { if (!req.body.password) throw new UserInputError('Password missing') @@ -102,7 +99,7 @@ module.exports = async (app, session, sessionAppId, finalizeAuth) => { // so we go ahead and register the user const userId = await createUser(user) req.user = { id: userId, email: user.email } - boundLogger = boundLogger.child({ userId }) + req.log = req.log.child({ userId }) // 4. use up all server-only invites the email had attached to it await finalizeInvitedServerRegistration(user.email, userId) @@ -116,10 +113,10 @@ module.exports = async (app, session, sessionAppId, finalizeAuth) => { case PasswordTooShortError: case UserInputError: case NoInviteFoundError: - boundLogger.info({ err }, 'Error while registering user.') + res.log.info({ err }, 'Error while registering.') return res.status(400).send({ err: err.message }) default: - boundLogger.error(err, 'Error while registering user.') + res.log.error(err, 'Error while registering.') return res.status(500).send({ err: err.message }) } } diff --git a/packages/server/modules/blobstorage/index.js b/packages/server/modules/blobstorage/index.js index da82dc910..4f6bb41c0 100644 --- a/packages/server/modules/blobstorage/index.js +++ b/packages/server/modules/blobstorage/index.js @@ -80,7 +80,6 @@ exports.init = async (app) => { allowForAllRegisteredUsersOnPublicStreamsWithPublicComments ]), async (req, res) => { - let boundLogger = logger.child({ endpoint: '/api/stream/:streamId/blob' }) // no checking of startup conditions, just dont init the endpoints if not configured right //authorize request const uploadOperations = {} @@ -90,12 +89,12 @@ exports.init = async (app) => { limits: { fileSize: getFileSizeLimit() } }) const streamId = req.params.streamId - boundLogger = boundLogger.child({ streamId, userId: req.context.userId }) + req.log = req.log.child({ streamId, userId: req.context.userId }) busboy.on('file', (formKey, file, info) => { const { filename: fileName } = info const fileType = fileName.split('.').pop().toLowerCase() - boundLogger = boundLogger.child({ fileName, fileType }) + req.log = req.log.child({ fileName, fileType }) const registerUploadResult = (processingPromise) => { finalizePromises.push( processingPromise.then((resultItem) => ({ ...resultItem, formKey })) @@ -112,7 +111,7 @@ exports.init = async (app) => { } } - boundLogger = boundLogger.child({ blobId }) + req.log = req.log.child({ blobId }) uploadOperations[blobId] = uploadFileStream( storeFileStream, @@ -153,7 +152,7 @@ exports.init = async (app) => { }) busboy.on('error', async (err) => { - boundLogger.info({ err }, 'Upload request error.') + res.log.info({ err }, 'Upload request error.') //delete all started uploads await Promise.all( Object.keys(uploadOperations).map((blobId) => @@ -162,7 +161,7 @@ exports.init = async (app) => { ) const status = 400 - const response = 'Upload request error. The server logs may have more details' + const response = 'Upload request error. The server logs may have more details.' res.status(status).end(response) }) diff --git a/packages/server/modules/core/rest/diffDownload.js b/packages/server/modules/core/rest/diffDownload.js index dde36807d..fdd1d67e0 100644 --- a/packages/server/modules/core/rest/diffDownload.js +++ b/packages/server/modules/core/rest/diffDownload.js @@ -7,16 +7,14 @@ const { SpeckleObjectsStream } = require('./speckleObjectsStream') const { getObjectsStream } = require('../services/objects') const { pipeline, PassThrough } = require('stream') -const { logger } = require('@/logging/logging') module.exports = (app) => { app.options('/api/getobjects/:streamId', cors()) app.post('/api/getobjects/:streamId', cors(), async (req, res) => { - const boundLogger = logger.child({ + req.log = req.log.child({ userId: req.context.userId || '-', - streamId: req.params.streamId, - endpoint: '/api/getobjects/:streamId' + streamId: req.params.streamId }) const hasStreamAccess = await validatePermissionsReadStream( req.params.streamId, @@ -45,9 +43,9 @@ module.exports = (app) => { res, (err) => { if (err) { - boundLogger.error(err, `App error streaming objects`) + req.log.error(err, `App error streaming objects`) } else { - boundLogger.info( + req.log.info( `Streamed ${childrenList.length} objects (size: ${ gzipStream.bytesWritten / 1000000 } MB)` @@ -72,7 +70,7 @@ module.exports = (app) => { }) } } catch (ex) { - boundLogger.error(ex, `DB Error streaming objects`) + req.log.error(ex, `DB Error streaming objects`) speckleObjStream.emit('error', new Error('Database streaming error')) } speckleObjStream.end() diff --git a/packages/server/modules/core/rest/diffUpload.js b/packages/server/modules/core/rest/diffUpload.js index 22bff6443..1bf1421ac 100644 --- a/packages/server/modules/core/rest/diffUpload.js +++ b/packages/server/modules/core/rest/diffUpload.js @@ -5,16 +5,14 @@ const cors = require('cors') const { validatePermissionsWriteStream } = require('./authUtils') const { hasObjects } = require('../services/objects') -const { logger } = require('@/logging/logging') module.exports = (app) => { app.options('/api/diff/:streamId', cors()) app.post('/api/diff/:streamId', cors(), async (req, res) => { - const boundLogger = logger.child({ + req.log = req.log.child({ userId: req.context.userId || '-', - streamId: req.params.streamId, - endpoint: '/api/diff/:streamId' + streamId: req.params.streamId }) const hasStreamAccess = await validatePermissionsWriteStream( req.params.streamId, @@ -26,13 +24,13 @@ module.exports = (app) => { const objectList = JSON.parse(req.body.objects) - boundLogger.info(`Diffing ${objectList.length} objects.`) + req.log.info(`Diffing ${objectList.length} objects.`) const response = await hasObjects({ streamId: req.params.streamId, objectIds: objectList }) - boundLogger.debug(response) + req.log.debug(response) res.writeHead(200, { 'Content-Encoding': 'gzip', 'Content-Type': 'application/json' diff --git a/packages/server/modules/core/rest/upload.js b/packages/server/modules/core/rest/upload.js index dce44904e..fd658b488 100644 --- a/packages/server/modules/core/rest/upload.js +++ b/packages/server/modules/core/rest/upload.js @@ -6,7 +6,6 @@ const Busboy = require('busboy') const { validatePermissionsWriteStream } = require('./authUtils') const { createObjectsBatched } = require('../services/objects') -const { uploadEndpointLogger } = require('@/logging/logging') const MAX_FILE_SIZE = 50 * 1024 * 1024 @@ -14,10 +13,9 @@ module.exports = (app) => { app.options('/objects/:streamId', cors()) app.post('/objects/:streamId', cors(), async (req, res) => { - const boundLogger = uploadEndpointLogger.child({ + req.log = req.log.child({ userId: req.context.userId || '-', - streamId: req.params.streamId, - endpoint: '/objects/:streamId' + streamId: req.params.streamId }) const hasStreamAccess = await validatePermissionsWriteStream( @@ -54,7 +52,7 @@ module.exports = (app) => { const gzippedBuffer = Buffer.concat(buffer) if (gzippedBuffer.length > MAX_FILE_SIZE) { - boundLogger.error( + req.log.error( `Upload error: Batch size too large (${gzippedBuffer.length} > ${MAX_FILE_SIZE})` ) if (!requestDropped) @@ -68,7 +66,7 @@ module.exports = (app) => { const gunzippedBuffer = zlib.gunzipSync(gzippedBuffer).toString() if (gunzippedBuffer.length > MAX_FILE_SIZE) { - boundLogger.error( + req.log.error( `Upload error: Batch size too large (${gunzippedBuffer.length} > ${MAX_FILE_SIZE})` ) if (!requestDropped) @@ -83,7 +81,7 @@ module.exports = (app) => { try { objs = JSON.parse(gunzippedBuffer) } catch (e) { - boundLogger.error(`Upload error: Batch not in JSON format`) + req.log.error(`Upload error: Batch not in JSON format`) if (!requestDropped) res.status(400).send('Failed to parse data.') requestDropped = true } @@ -98,7 +96,7 @@ module.exports = (app) => { } const promise = createObjectsBatched(req.params.streamId, objs).catch((e) => { - boundLogger.error(e, `Upload error.`) + req.log.error(e, `Upload error.`) if (!requestDropped) res .status(400) @@ -111,7 +109,7 @@ module.exports = (app) => { await promise - boundLogger.info( + req.log.info( { durationSeconds: (Date.now() - t0) / 1000, crtMemUsageMB: process.memoryUsage().heapUsed / 1024 / 1024, @@ -138,7 +136,7 @@ module.exports = (app) => { let objs = [] if (buffer.length > MAX_FILE_SIZE) { - boundLogger.error( + req.log.error( `Upload error: Batch size too large (${buffer.length} > ${MAX_FILE_SIZE})` ) if (!requestDropped) @@ -151,7 +149,7 @@ module.exports = (app) => { try { objs = JSON.parse(buffer) } catch (e) { - boundLogger.error(`Upload error: Batch not in JSON format`) + req.log.error(`Upload error: Batch not in JSON format`) if (!requestDropped) res.status(400).send('Failed to parse data.') requestDropped = true } @@ -165,7 +163,7 @@ module.exports = (app) => { } const promise = createObjectsBatched(req.params.streamId, objs).catch((e) => { - boundLogger.error(e, `Upload error.`) + req.log.error(e, `Upload error.`) if (!requestDropped) res .status(400) @@ -177,7 +175,7 @@ module.exports = (app) => { promises.push(promise) await promise - boundLogger.info( + req.log.info( { uploadedSizeMB: buffer.length / 1000000, durationSeconds: (Date.now() - t0) / 1000, @@ -188,7 +186,7 @@ module.exports = (app) => { ) }) } else { - boundLogger.error(`Invalid ContentType header: ${mimeType}`) + req.log.info(`Invalid ContentType header: ${mimeType}`) if (!requestDropped) res .status(400) @@ -202,7 +200,7 @@ module.exports = (app) => { busboy.on('finish', async () => { if (requestDropped) return - boundLogger.info( + req.log.info( { crtMemUsageMB: process.memoryUsage().heapUsed / 1024 / 1024 }, @@ -219,7 +217,7 @@ module.exports = (app) => { }) busboy.on('error', async (err) => { - boundLogger.info(`Upload error: ${err}`) + req.log.info(`Upload error: ${err}`) if (!requestDropped) res.status(400).end('Upload request error. The server logs have more details') requestDropped = true diff --git a/packages/server/modules/emails/rest/index.js b/packages/server/modules/emails/rest/index.js index ad7bb8078..2e3953866 100644 --- a/packages/server/modules/emails/rest/index.js +++ b/packages/server/modules/emails/rest/index.js @@ -1,4 +1,3 @@ -const { authLogger } = require('@/logging/logging') const { EmailVerificationFinalizationError } = require('@/modules/emails/errors') const { finalizeEmailVerification @@ -6,7 +5,6 @@ const { module.exports = (app) => { app.get('/auth/verifyemail', async (req, res) => { - const boundLogger = authLogger.child({ endpoint: '/auth/verifyemail' }) try { await finalizeEmailVerification(req.query.t) return res.redirect('/?emailverifiedstatus=true') @@ -15,7 +13,7 @@ module.exports = (app) => { error instanceof EmailVerificationFinalizationError ? error.message : 'Email verification unexpectedly failed' - boundLogger.info({ err: error }, 'Email verification failed.') + req.log.info({ err: error }, 'Email verification failed.') return res.redirect(`/?emailverifiederror=${msg}`) } }) diff --git a/packages/server/modules/fileuploads/index.js b/packages/server/modules/fileuploads/index.js index 88796de6e..8ab4a3a35 100644 --- a/packages/server/modules/fileuploads/index.js +++ b/packages/server/modules/fileuploads/index.js @@ -5,7 +5,7 @@ const { saveUploadFile } = require('./services/fileuploads') const request = require('request') const { streamWritePermissions } = require('@/modules/shared/authz') const { authMiddlewareCreator } = require('@/modules/shared/middleware') -const { moduleLogger, logger } = require('@/logging/logging') +const { moduleLogger } = require('@/logging/logging') const saveFileUploads = async ({ userId, streamId, branchName, uploadResults }) => { await Promise.all( @@ -35,8 +35,7 @@ exports.init = async (app) => { '/api/file/:fileType/:streamId/:branchName?', authMiddlewareCreator(streamWritePermissions), async (req, res) => { - const boundLogger = logger.child({ - endpoint: '/api/file/:fileType/:streamId/:branchName?', + req.log = req.log.child({ streamId: req.params.streamId, userId: req.context.userId, branchName: req.params.branchName ?? 'main' @@ -46,7 +45,7 @@ exports.init = async (app) => { `${process.env.CANONICAL_URL}/api/stream/${req.params.streamId}/blob`, async (err, response, body) => { if (err) { - boundLogger.error(err) + res.log.error(err, 'Error while uploading blob.') res.status(500).send(err.message) return } diff --git a/packages/server/modules/pwdreset/rest/index.ts b/packages/server/modules/pwdreset/rest/index.ts index 0883322fc..cb75fa0b6 100644 --- a/packages/server/modules/pwdreset/rest/index.ts +++ b/packages/server/modules/pwdreset/rest/index.ts @@ -1,4 +1,3 @@ -import { logger } from '@/logging/logging' import { finalizePasswordReset } from '@/modules/pwdreset/services/finalize' import { requestPasswordRecovery } from '@/modules/pwdreset/services/request' import { ensureError } from '@/modules/shared/helpers/errorHelper' @@ -7,28 +6,26 @@ import { Express } from 'express' export default function (app: Express) { // sends a password recovery email. app.post('/auth/pwdreset/request', async (req, res) => { - const boundLogger = logger.child({ endpoint: 'auth/pwdreset/request' }) try { const email = req.body.email await requestPasswordRecovery(email) return res.status(200).send('Password reset email sent.') } catch (e: unknown) { - boundLogger.info({ err: e }, 'Error while requesting password recovery.') + req.log.info({ err: e }, 'Error while requesting password recovery.') res.status(400).send(ensureError(e).message) } }) // Finalizes password recovery. app.post('/auth/pwdreset/finalize', async (req, res) => { - const boundLogger = logger.child({ endpoint: 'auth/pwdreset/finalize' }) try { if (!req.body.tokenId || !req.body.password) throw new Error('Invalid request.') await finalizePasswordReset(req.body.tokenId, req.body.password) return res.status(200).send('Password reset. Please log in.') } catch (e: unknown) { - boundLogger.info({ err: e }, 'Error while finalizing password recovery.') + req.log.info({ err: e }, 'Error while finalizing password recovery.') res.status(400).send(ensureError(e).message) } })