* Revert "Revert structured logging 2 (#1240)"
This reverts commit 78ecaeffcb.
* Logging should not be bundled into core shared directory
* making sure observability stuff isnt bundled into frontend


Co-authored-by: Kristaps Fabians Geikins <fabis94@live.com>
This commit is contained in:
Iain Sproat 2022-12-06 11:51:18 +00:00 коммит произвёл GitHub
Родитель 78ecaeffcb
Коммит 4d01e13a84
Не найден ключ, соответствующий данной подписи
Идентификатор ключа GPG: 4AEE18F83AFDEB23
110 изменённых файлов: 814 добавлений и 413 удалений

Просмотреть файл

@ -25,7 +25,7 @@ services:
SESSION_SECRET: 'TODO:Replace'
STRATEGY_LOCAL: 'true'
DEBUG: 'speckle:*'
LOG_LEVEL: 'info'
POSTGRES_URL: 'postgres'
POSTGRES_USER: 'speckle'
@ -51,7 +51,7 @@ services:
mem_limit: '3000m'
memswap_limit: '3000m'
environment:
DEBUG: 'preview-service:*'
LOG_LEVEL: 'info'
PG_CONNECTION_STRING: 'postgres://speckle:speckle@postgres/speckle'
webhook-service:
@ -61,7 +61,7 @@ services:
image: speckle/speckle-webhook-service:local
restart: always
environment:
DEBUG: 'webhook-service:*'
LOG_LEVEL: 'info'
PG_CONNECTION_STRING: 'postgres://speckle:speckle@postgres/speckle'
fileimport-service:
@ -71,6 +71,6 @@ services:
image: speckle/speckle-fileimport-service:local
restart: always
environment:
DEBUG: 'fileimport-service:*'
LOG_LEVEL: 'info'
PG_CONNECTION_STRING: 'postgres://speckle:speckle@postgres/speckle'
SPECKLE_SERVER_URL: 'http://speckle-server:3000'

Просмотреть файл

@ -39,6 +39,7 @@
"eslint-config-prettier": "^8.5.0",
"husky": "^7.0.4",
"lint-staged": "^12.3.7",
"pino-pretty": "^9.1.1",
"prettier": "^2.5.1",
"ts-node": "^10.9.1",
"tsconfig-paths": "^4.0.0"

Просмотреть файл

@ -1,3 +1,24 @@
FROM node:16.18.0-bullseye-slim as build-stage
ARG NODE_ENV=production
ENV NODE_ENV=${NODE_ENV}
WORKDIR /speckle-server
COPY .yarnrc.yml .
COPY .yarn ./.yarn
COPY package.json yarn.lock ./
COPY packages/fileimport-service/package.json ./packages/fileimport-service/
COPY packages/shared/package.json ./packages/shared/
RUN yarn workspaces focus -A && yarn
COPY packages/shared ./packages/shared/
COPY packages/fileimport-service ./packages/fileimport-service/
RUN yarn workspaces foreach run build
FROM node:16.18.0-bullseye-slim as node
RUN apt-get update && \
@ -9,6 +30,9 @@ RUN apt-get update && \
&& apt-get clean \
&& rm -rf /var/lib/apt/lists/*
COPY packages/fileimport-service/requirements.txt /speckle-server/
RUN pip install --no-cache-dir -r /speckle-server/requirements.txt
ADD https://github.com/ufoscout/docker-compose-wait/releases/download/2.8.0/wait /wait
RUN chmod +x /wait
@ -21,13 +45,13 @@ COPY .yarnrc.yml .
COPY .yarn ./.yarn
COPY package.json yarn.lock ./
COPY packages/fileimport-service/package.json ./packages/fileimport-service/
COPY --from=build-stage /speckle-server/packages/shared ./packages/shared
COPY --from=build-stage /speckle-server/packages/fileimport-service ./packages/fileimport-service
WORKDIR /speckle-server/packages/fileimport-service
COPY packages/fileimport-service/package.json ./
RUN yarn workspaces focus --production
COPY packages/fileimport-service/requirements.txt ./
RUN pip install --no-cache-dir -r requirements.txt
COPY packages/fileimport-service .
ENTRYPOINT [ "tini", "--" ]
CMD ["yarn", "node", "src/daemon.js"]

Просмотреть файл

@ -1,4 +1,5 @@
const fs = require('fs')
const { logger } = require('../observability/logging')
const TMP_RESULTS_PATH = '/tmp/import_result.json'
@ -10,7 +11,7 @@ async function main() {
const [filePath, userId, streamId, branchName, commitMessage, fileId] = cmdArgs
// eslint-disable-next-line no-console
console.log('ARGV: ', filePath, userId, streamId, branchName, commitMessage)
logger.info('ARGV: ', filePath, userId, streamId, branchName, commitMessage)
const data = fs.readFileSync(filePath)

Просмотреть файл

@ -2,6 +2,7 @@ const { performance } = require('perf_hooks')
const { fetch } = require('undici')
const Parser = require('./parser_v2')
const ServerAPI = require('./api.js')
const { logger } = require('../observability/logging')
async function parseAndCreateCommit({
data,
@ -61,8 +62,7 @@ async function parseAndCreateCommit({
})
const json = await response.json()
// eslint-disable-next-line no-console
console.log(json)
logger.info(json)
return json.data.commitCreate
}

Просмотреть файл

@ -1,6 +1,7 @@
/* eslint-disable no-console */
/* eslint-disable camelcase */
const WebIFC = require('web-ifc/web-ifc-api-node')
const { logger } = require('../observability/logging.js')
const ServerAPI = require('./api.js')
module.exports = class IFCParser {
@ -29,7 +30,7 @@ module.exports = class IFCParser {
// as reference objects in this.productGeo
this.productGeo = {}
await this.createGeometries()
console.log(`Geometries created: ${Object.keys(this.productGeo).length} meshes.`)
logger.info(`Geometries created: ${Object.keys(this.productGeo).length} meshes.`)
// Lastly, traverse the ifc project object and parse it into something friendly; as well as
// replace all its geometries with actual references to speckle meshes from the productGeo map
@ -133,7 +134,7 @@ module.exports = class IFCParser {
if (this.cache[element.expressID.toString()])
return this.cache[element.expressID.toString()]
// If you got here -> It's an IFC Element: create base object, upload and return ref.
// console.log( `Traversing element ${element.expressID}; Recurse: ${recursive}; Stack ${depth}` )
// logger.debug( `Traversing element ${element.expressID}; Recurse: ${recursive}; Stack ${depth}` )
// Traverse all key/value pairs first.
for (const key of Object.keys(element)) {
@ -252,7 +253,7 @@ module.exports = class IFCParser {
element.propertySets ||
element.typeProps
) {
console.log(
logger.info(
`${element.constructor.name} ${element.GlobalId}:\n\tchildren count: ${
element.children ? element.children.length : '0'
};\n\tspatial children count: ${

Просмотреть файл

@ -0,0 +1,11 @@
const { Observability } = require('@speckle/shared')
// loggers for specific components within normal operation
const logger = Observability.extendLoggerComponent(
Observability.getLogger(process.env.LOG_LEVEL || 'info'),
'fileimport-service'
)
module.exports = {
logger
}

Просмотреть файл

@ -23,6 +23,7 @@
"url": "https://github.com/specklesystems/speckle-server/issues"
},
"dependencies": {
"@speckle/shared": "workspace:^",
"aws-sdk": "^2.1075.0",
"bcrypt": "^5.0.1",
"crypto-random-string": "^3.3.1",

Просмотреть файл

@ -16,6 +16,7 @@ const { spawn } = require('child_process')
const ServerAPI = require('../ifc/api')
const objDependencies = require('./objDependencies')
const { logger } = require('../observability/logging')
const HEALTHCHECK_FILE_PATH = '/tmp/last_successful_query'
@ -56,7 +57,7 @@ async function doTask(task) {
const metricDurationEnd = metricDuration.startTimer()
try {
console.log('Doing task ', task)
logger.info('Doing task ', task)
const info = await FileUploads().where({ id: task.id }).first()
if (!info) {
throw new Error('Internal error: DB inconsistent')
@ -162,7 +163,7 @@ async function doTask(task) {
[commitId, task.id]
)
} catch (err) {
console.log('Error: ', err)
logger.error(err)
await knex.raw(
`
UPDATE file_uploads
@ -189,21 +190,21 @@ async function doTask(task) {
function runProcessWithTimeout(cmd, cmdArgs, extraEnv, timeoutMs) {
return new Promise((resolve, reject) => {
console.log(`Starting process: ${cmd} ${cmdArgs}`)
logger.info(`Starting process: ${cmd} ${cmdArgs}`)
const childProc = spawn(cmd, cmdArgs, { env: { ...process.env, ...extraEnv } })
childProc.stdout.on('data', (data) => {
console.log('Parser: ', data.toString())
logger.info('Parser: ', data.toString())
})
childProc.stderr.on('data', (data) => {
console.error('Parser: ', data.toString())
logger.error('Parser: ', data.toString())
})
let timedOut = false
const timeout = setTimeout(() => {
console.log('Process timeout. Killing process...')
logger.error('Process timeout. Killing process...')
timedOut = true
childProc.kill(9)
@ -211,7 +212,7 @@ function runProcessWithTimeout(cmd, cmdArgs, extraEnv, timeoutMs) {
}, timeoutMs)
childProc.on('close', (code) => {
console.log(`Process exited with code ${code}`)
logger.info(`Process exited with code ${code}`)
if (timedOut) return // ignore `close` calls after killing (the promise was already rejected)
@ -247,18 +248,18 @@ async function tick() {
setTimeout(tick, 10)
} catch (err) {
metricOperationErrors.labels('main_loop').inc()
console.log('Error executing task: ', err)
logger.error('Error executing task: ', err)
setTimeout(tick, 5000)
}
}
async function main() {
console.log('Starting FileUploads Service...')
logger.info('Starting FileUploads Service...')
initPrometheusMetrics()
process.on('SIGTERM', () => {
shouldExit = true
console.log('Shutting down...')
logger.info('Shutting down...')
})
tick()

Просмотреть файл

@ -6,6 +6,7 @@ const path = require('path')
const { downloadFile, getFileInfoByName } = require('./filesApi')
const isValidFilename = require('valid-filename')
const { logger } = require('../observability/logging')
const getReferencedMtlFiles = async ({ objFilePath }) => {
const mtlFiles = []
@ -25,7 +26,7 @@ const getReferencedMtlFiles = async ({ objFilePath }) => {
await events.once(rl, 'close')
} catch (err) {
console.error(`Error getting dependencies for file ${objFilePath}: ${err}`)
logger.error(`Error getting dependencies for file ${objFilePath}: ${err}`)
}
return mtlFiles
}
@ -34,17 +35,17 @@ module.exports = {
async downloadDependencies({ objFilePath, streamId, destinationDir, token }) {
const dependencies = await getReferencedMtlFiles({ objFilePath })
console.log(`Obj file depends on ${dependencies}`)
logger.info(`Obj file depends on ${dependencies}`)
for (const mtlFile of dependencies) {
// there might be multiple files named with the same name, take the first...
const [file] = (await getFileInfoByName({ fileName: mtlFile, streamId, token }))
.blobs
if (!file) {
console.log(`OBJ dependency file not found in stream: ${mtlFile}`)
logger.info(`OBJ dependency file not found in stream: ${mtlFile}`)
continue
}
if (!isValidFilename(mtlFile)) {
console.log(`Invalid filename reference in OBJ dependencies: ${mtlFile}`)
logger.warn(`Invalid filename reference in OBJ dependencies: ${mtlFile}`)
continue
}
await downloadFile({

Просмотреть файл

@ -5,17 +5,15 @@ build:
run:
docker run -it --rm --net=host \
-e DEBUG="preview-service:*" \
-e PG_CONNECTION_STRING="postgres://speckle:speckle@localhost/speckle" \
preview-service
run-release:
docker run -it --rm --net=host \
-e DEBUG="preview-service:*" \
-e PG_CONNECTION_STRING="postgres://speckle:speckle@localhost/speckle" \
speckle/speckle-preview-service:v2.3.3
small:
docker build -t small-preview-service ../.. -f Dockerfile.small
docker run -it --rm --net=host -e DEBUG="preview-service:*" -e PG_CONNECTION_STRING="postgres://speckle:speckle@localhost/speckle" small-preview-service bash
docker run -it --rm --net=host -e PG_CONNECTION_STRING="postgres://speckle:speckle@localhost/speckle" small-preview-service bash

Просмотреть файл

@ -4,16 +4,16 @@ const createError = require('http-errors')
const express = require('express')
const path = require('path')
const cookieParser = require('cookie-parser')
const logger = require('morgan')
const indexRouter = require('./routes/index')
const previewRouter = require('./routes/preview')
const objectsRouter = require('./routes/objects')
const apiRouter = require('./routes/api')
const { LoggingExpressMiddleware } = require('./observability/expressLogging')
const app = express()
app.use(logger('dev'))
app.use(LoggingExpressMiddleware)
app.use(express.json({ limit: '100mb' }))
app.use(express.urlencoded({ limit: '100mb', extended: false }))

Просмотреть файл

@ -6,6 +6,7 @@ const fetch = require('node-fetch')
const fs = require('fs')
const metrics = require('./prometheusMetrics')
const joinImages = require('join-images')
const { logger } = require('../observability/logging')
let shouldExit = false
@ -130,22 +131,22 @@ async function tick() {
setTimeout(tick, 10)
} catch (err) {
metrics.metricOperationErrors.labels('main_loop').inc()
console.log('Error executing task: ', err)
logger.error('Error executing task: ', err)
setTimeout(tick, 5000)
}
}
async function startPreviewService() {
console.log('📸 Started Preview Service')
logger.info('📸 Started Preview Service')
process.on('SIGTERM', () => {
shouldExit = true
console.log('Shutting down...')
logger.info('Shutting down...')
})
process.on('SIGINT', () => {
shouldExit = true
console.log('Shutting down...')
logger.info('Shutting down...')
})
metrics.initPrometheusMetrics()

Просмотреть файл

@ -5,9 +5,9 @@
*/
const app = require('../app')
const debug = require('debug')('preview-service:server')
const http = require('http')
const { startPreviewService } = require('../bg_service')
const { serverLogger, logger } = require('../observability/logging')
/**
* Get port from environment and store in Express.
@ -64,11 +64,11 @@ function onError(error) {
// handle specific listen errors with friendly messages
switch (error.code) {
case 'EACCES':
console.error(bind + ' requires elevated privileges')
logger.error(bind + ' requires elevated privileges')
process.exit(1)
break
case 'EADDRINUSE':
console.error(bind + ' is already in use')
logger.error(bind + ' is already in use')
process.exit(1)
break
default:
@ -83,7 +83,7 @@ function onError(error) {
function onListening() {
const addr = server.address()
const bind = typeof addr === 'string' ? 'pipe ' + addr : 'port ' + addr.port
debug('Listening on ' + bind)
serverLogger.info('Listening on ' + bind)
startPreviewService()
}

Просмотреть файл

@ -0,0 +1,7 @@
const { logger } = require('./logging')
const HttpLogger = require('pino-http')
module.exports.LoggingExpressMiddleware = HttpLogger({
logger,
autoLogging: false
})

Просмотреть файл

@ -0,0 +1,13 @@
const { Observability } = require('@speckle/shared')
// loggers for specific components within normal operation
const logger = Observability.extendLoggerComponent(
Observability.getLogger(process.env.LOG_LEVEL || 'info'),
'preview-service'
)
const serverLogger = Observability.extendLoggerComponent(logger, 'server')
module.exports = {
logger,
serverLogger
}

Просмотреть файл

@ -14,25 +14,25 @@
"node": ">=14.0.0 <17.0.0"
},
"scripts": {
"dev": "DEBUG='preview-service:*' nodemon --trace-deprecation ./bin/www",
"dev": "nodemon --trace-deprecation ./bin/www",
"build": "webpack --env dev --config webpack.config.render_page.js && webpack --env build --config webpack.config.render_page.js",
"lint": "eslint . --ext .js,.ts"
},
"dependencies": {
"@speckle/objectloader": "workspace:^",
"@speckle/shared": "workspace:^",
"@speckle/viewer": "workspace:^",
"cookie-parser": "~1.4.4",
"crypto": "^1.0.1",
"debug": "~2.6.9",
"express": "~4.16.1",
"file-type": "^16.5.4",
"http-errors": "~1.6.3",
"join-images": "^1.1.3",
"knex": "^2.0.0",
"morgan": "~1.9.1",
"node-fetch": "^2.6.1",
"pg": "^8.7.3",
"pg-query-stream": "^4.2.3",
"pino-http": "^8.2.1",
"prom-client": "^14.0.1",
"puppeteer": "^13.3.2",
"sharp": "^0.30.5",

Просмотреть файл

@ -1,9 +1,10 @@
import { DebugViewer, DefaultViewerParams } from '@speckle/viewer'
// import { logger } from '../../observability/logging'
const v = new DebugViewer(document.getElementById('renderer'), DefaultViewerParams)
window.v = v
// v.on( ViewerEvent.LoadProgress, args => console.log( args ) )
// v.on( ViewerEvent.LoadProgress, args => logger.debug( args ) )
window.LoadData = async function LoadData(url) {
// token is not used in this context, since the preview service talks directly to the DB

Просмотреть файл

@ -5,6 +5,7 @@ const express = require('express')
const { getObjectsStream } = require('./services/objects_utils')
const { SpeckleObjectsStream } = require('./speckleObjectsStream')
const { pipeline, PassThrough } = require('stream')
const { logger } = require('../observability/logging')
const router = express.Router()
@ -34,11 +35,11 @@ router.post('/getobjects/:streamId', async (req, res) => {
res,
(err) => {
if (err) {
console.log(
logger.error(
`Error streaming objects from stream ${req.params.streamId}: ${err}`
)
} else {
console.log(
logger.error(
`Streamed ${childrenList.length} objects from stream ${
req.params.streamId
} (size: ${gzipStream.bytesWritten / 1000000} MB)`

Просмотреть файл

@ -5,6 +5,7 @@ const express = require('express')
const { getObject, getObjectChildrenStream } = require('./services/objects_utils')
const { SpeckleObjectsStream } = require('./speckleObjectsStream')
const { pipeline, PassThrough } = require('stream')
const { logger } = require('../observability/logging')
const router = express.Router()
@ -44,11 +45,11 @@ router.get('/:streamId/:objectId', async function (req, res) {
res,
(err) => {
if (err) {
console.log(
logger.error(
`Error downloading object ${req.params.objectId} from stream ${req.params.streamId}: ${err}`
)
} else {
console.log(
logger.info(
`Downloaded object ${req.params.objectId} from stream ${
req.params.streamId
} (size: ${gzipStream.bytesWritten / 1000000} MB)`
@ -68,7 +69,7 @@ router.get('/:streamId/:objectId/single', async (req, res) => {
return res.status(404).send('Failed to find object.')
}
console.log(
logger.info(
`Downloaded single object ${req.params.objectId} from stream ${req.params.streamId}`
)

Просмотреть файл

@ -4,6 +4,7 @@
const express = require('express')
const router = express.Router()
const puppeteer = require('puppeteer')
const { logger } = require('../observability/logging')
async function pageFunction(objectUrl) {
waitForAnimation = async (ms = 70) =>
@ -66,7 +67,7 @@ async function getScreenshot(objectUrl) {
const wrapperPromise = (async () => {
await page.goto('http://127.0.0.1:3001/render/')
console.log('Page loaded')
logger.info('Page loaded')
// Handle page crash (oom?)
page.on('error', (err) => {
@ -79,7 +80,7 @@ async function getScreenshot(objectUrl) {
try {
ret = await wrapperPromise
} catch (err) {
console.log(`Error generating preview for ${objectUrl}: ${err}`)
logger.error(`Error generating preview for ${objectUrl}: ${err}`)
ret = {
error: err
}
@ -92,7 +93,7 @@ async function getScreenshot(objectUrl) {
return null
}
console.log(
logger.info(
`Generated preview for ${objectUrl} in ${ret.duration} sec with ${
ret.mem.total / 1000000
} MB of memory`
@ -147,7 +148,7 @@ router.get('/:streamId/:objectId', async function (req, res) {
}
*/
console.log(objectUrl)
logger.info(objectUrl)
const scr = await getScreenshot(objectUrl)

4
packages/server/.vscode/launch.json поставляемый
Просмотреть файл

@ -62,10 +62,6 @@
"--timeout=0"
],
"console": "integratedTerminal",
"env": {
// "POSTGRES_URL": "postgres://speckle:speckle@localhost/speckle2_test",
"DEBUG": "server:*"
},
// "envFile": "${workspaceFolder}/.env",
"runtimeExecutable": "npm",
"skipFiles": ["<node_internals>/**"],

Просмотреть файл

@ -6,12 +6,12 @@ import express, { Express } from 'express'
// `express-async-errors` patches express to catch errors in async handlers. no variable needed
import 'express-async-errors'
import compression from 'compression'
import logger from 'morgan-debug'
import debug from 'debug'
import { createTerminus } from '@godaddy/terminus'
import * as Sentry from '@sentry/node'
import Logging from '@/logging'
import { startupLogger, shutdownLogger } from '@/logging/logging'
import { LoggingExpressMiddleware } from '@/logging/expressLogging'
import { errorLoggingMiddleware } from '@/logging/errorLogging'
import prometheusClient from 'prom-client'
@ -184,7 +184,7 @@ export async function init() {
await knex.migrate.latest()
if (process.env.NODE_ENV !== 'test') {
app.use(logger('speckle', 'dev', {}))
app.use(LoggingExpressMiddleware)
}
if (process.env.COMPRESSION) {
@ -273,8 +273,8 @@ export async function startHttp(
// app.use('/', frontendProxy)
app.use(await createFrontendProxy())
debug('speckle:startup')('✨ Proxying frontend (dev mode):')
debug('speckle:startup')(`👉 main application: http://localhost:${port}/`)
startupLogger.info('✨ Proxying frontend (dev mode):')
startupLogger.info(`👉 main application: http://localhost:${port}/`)
}
// Production mode
@ -291,13 +291,13 @@ export async function startHttp(
signals: ['SIGTERM', 'SIGINT'],
timeout: 5 * 60 * 1000,
beforeShutdown: async () => {
debug('speckle:shutdown')('Shutting down (signal received)...')
shutdownLogger.info('Shutting down (signal received)...')
},
onSignal: async () => {
await shutdown()
},
onShutdown: () => {
debug('speckle:shutdown')('Shutdown completed')
shutdownLogger.info('Shutdown completed')
process.exit(0)
}
})
@ -307,7 +307,7 @@ export async function startHttp(
const addressString = isString(address) ? address : address?.address
const port = isString(address) ? null : address?.port
debug('speckle:startup')(
startupLogger.info(
`🚀 My name is Speckle Server, and I'm running at ${addressString}:${port}`
)
app.emit('appStarted')

Просмотреть файл

@ -7,11 +7,11 @@
*/
require('ts-node/register')
const debug = require('debug')('speckle:www')
const { logger } = require('../logging/logging')
const { init, startHttp } = require('../app')
init()
.then(({ app, server }) => startHttp(server, app))
.catch((err) => debug(err))
.catch((err) => logger.error(err))
// 💥

Просмотреть файл

@ -1,11 +1,11 @@
#!/usr/bin/env node
'use strict'
const debug = require('debug')('speckle:www')
const { logger } = require('../dist/logging/logging')
const { init, startHttp } = require('../dist/app')
init()
.then(({ app, server }) => startHttp(server, app))
.catch((err) => debug(err))
.catch((err) => logger.error(err))
// 💥

3
packages/server/bootstrap.js поставляемый
Просмотреть файл

@ -24,6 +24,7 @@ const {
getApolloServerVersion,
getServerVersion
} = require('./modules/shared/helpers/envHelper')
const { logger } = require('@/logging/logging')
if (isApolloMonitoringEnabled() && !getApolloServerVersion()) {
process.env.APOLLO_SERVER_USER_VERSION = getServerVersion()
@ -37,7 +38,7 @@ if (isTestEnv()) {
const e = new Error(
'Attempting to run tests without an .env.test file properly set up! Check readme!'
)
console.error(e)
logger.error(e)
process.exit(1)
}
}

Просмотреть файл

@ -3,6 +3,7 @@
const env = process.env.NODE_ENV || 'development'
const configs = require('@/knexfile.js')
const { dbStartupLogger } = require('@/logging/logging')
const config = configs[env]
config.log = {
@ -15,9 +16,7 @@ config.log = {
}
}
const debug = require('debug')
debug('speckle:db-startup')(`Loaded knex conf for ${env}`)
dbStartupLogger.info(`Loaded knex conf for ${env}`)
/**
* Need to override type because type def file incorrectly uses ES6

Просмотреть файл

@ -1,4 +1,5 @@
/* istanbul ignore file */
// const { logger } = require('@/logging/logging')
const Sentry = require('@sentry/node')
const { ApolloError } = require('apollo-server-express')
const prometheusClient = require('prom-client')
@ -27,7 +28,7 @@ module.exports = {
try {
const actionName = `${ctx.operation.operation} ${ctx.operation.selectionSet.selections[0].name.value}`
metricCallCount.labels(actionName).inc()
// console.log( actionName )
// logger.debug(actionName)
} catch (e) {
Sentry.captureException(e)
}

Просмотреть файл

@ -1,4 +1,5 @@
/* istanbul ignore file */
const { logger } = require('@/logging/logging')
const prometheusClient = require('prom-client')
let metricErrorCount = null
@ -13,7 +14,9 @@ module.exports = {
})
}
console.log(`Error when handling ${req.originalUrl} from ${req.ip}: ${err.message}`)
logger.error(
`Error when handling ${req.originalUrl} from ${req.ip}: ${err.message}`
)
let route = 'unknown'
if (req.route && req.route.path) route = req.route.path
metricErrorCount.labels(route).inc()

Просмотреть файл

@ -0,0 +1,7 @@
import { logger } from './logging'
import HttpLogger from 'pino-http'
export const LoggingExpressMiddleware = HttpLogger({
logger,
autoLogging: false
})

Просмотреть файл

@ -0,0 +1,21 @@
// Note logging is imported by www & ts-www, prior to init() being called
// so we can't use local imports with '@' etc., as they aren't yet defined.
import { Observability } from '@speckle/shared'
export { Observability } from '@speckle/shared'
const { getLogger, extendLoggerComponent } = Observability
export const logger = getLogger(process.env.LOG_LEVEL || 'info')
// loggers for phases of operation
export const startupLogger = logger.child({ phase: 'startup' })
export const dbStartupLogger = logger.child({ phase: 'db-startup' })
export const shutdownLogger = logger.child({ phase: 'shutdown' })
// loggers for specific components within normal operation
export const moduleLogger = extendLoggerComponent(logger, 'modules')
export const activitiesLogger = extendLoggerComponent(moduleLogger, 'activities')
export const cliLogger = extendLoggerComponent(logger, 'cli')
export const notificationsLogger = extendLoggerComponent(logger, 'notifications')
export const uploadEndpointLogger = extendLoggerComponent(logger, 'upload-endpoint')
export const dbLogger = extendLoggerComponent(logger, 'db')
export const servicesLogger = extendLoggerComponent(logger, 'services')

Просмотреть файл

@ -1,12 +1,12 @@
import { moduleLogger } from '@/logging/logging'
import { initializeEventListener } from '@/modules/accessrequests/services/eventListener'
import { Optional, SpeckleModule } from '@/modules/shared/helpers/typeHelper'
import { modulesDebug } from '@/modules/shared/utils/logger'
let quitListeners: Optional<() => void> = undefined
const ServerAccessRequestsModule: SpeckleModule = {
init(_, isInitial) {
modulesDebug('🔐 Init access request module')
moduleLogger.info('🔐 Init access request module')
if (isInitial) {
quitListeners = initializeEventListener()

Просмотреть файл

@ -2,11 +2,9 @@ import { SpeckleModule } from '@/modules/shared/helpers/typeHelper'
import cron from 'node-cron'
import { sendActivityNotifications } from '@/modules/activitystream/services/summary'
import { initializeEventListener } from '@/modules/activitystream/services/eventListener'
import { modulesDebug } from '@/modules/shared/utils/logger'
import { publishNotification } from '@/modules/notifications/services/publication'
import { scheduleExecution } from '@/modules/core/services/taskScheduler'
const activitiesDebug = modulesDebug.extend('activities')
import { activitiesLogger, moduleLogger } from '@/logging/logging'
let scheduledTask: cron.ScheduledTask | null = null
@ -25,7 +23,7 @@ const scheduleWeeklyActivityNotifications = () => {
'weeklyActivityNotification',
//task should be locked for 10 minutes
async (now: Date) => {
activitiesDebug('Sending weekly activity digests notifications.')
activitiesLogger.info('Sending weekly activity digests notifications.')
const end = now
const start = new Date(end.getTime())
start.setDate(start.getDate() - numberOfDays)
@ -37,7 +35,7 @@ const scheduleWeeklyActivityNotifications = () => {
const activityModule: SpeckleModule = {
init: async (_, isInitial) => {
modulesDebug('🤺 Init activity module')
moduleLogger.info('🤺 Init activity module')
if (isInitial) {
initializeEventListener()
scheduledTask = scheduleWeeklyActivityNotifications()

Просмотреть файл

@ -1,9 +1,9 @@
/* istanbul ignore file */
'use strict'
const debug = require('debug')
const { moduleLogger } = require('@/logging/logging')
exports.init = (app) => {
debug('speckle:modules')('💅 Init graphql api explorer module')
moduleLogger.info('💅 Init graphql api explorer module')
// sweet and simple
app.get('/explorer', (req, res) => {

Просмотреть файл

@ -1,5 +1,4 @@
'use strict'
const debug = require('debug')
const knex = require('@/db/knex')
const Scopes = () => knex('scopes')
const Apps = () => knex('server_apps')
@ -8,6 +7,7 @@ const AppScopes = () => knex('server_apps_scopes')
const { getApp } = require('@/modules/auth/services/apps')
const { Scopes: ScopesConst } = require('@/modules/core/helpers/mainConstants')
const { difference } = require('lodash')
const { moduleLogger, logger } = require('@/logging/logging')
let allScopes = []
@ -25,7 +25,7 @@ module.exports = async () => {
async function registerOrUpdateApp(app) {
if (app.scopes && app.scopes === 'all') {
// let scopes = await Scopes( ).select( '*' )
// console.log( allScopes.length )
// logger.debug( allScopes.length )
app.scopes = allScopes.map((s) => s.name)
}
@ -44,7 +44,7 @@ async function registerDefaultApp(app) {
await Apps().insert(app)
await AppScopes().insert(scopes)
} catch (e) {
console.log(e)
logger.error(e)
}
}
@ -57,7 +57,7 @@ async function updateDefaultApp(app, existingApp) {
let affectedTokenIds = []
if (newScopes.length || removedScopes.length) {
debug('speckle:modules')(`🔑 Updating default app ${app.name}`)
moduleLogger.info(`🔑 Updating default app ${app.name}`)
affectedTokenIds = await knex('user_server_app_tokens')
.where({ appId: app.id })
.pluck('tokenId')

Просмотреть файл

@ -1,9 +1,9 @@
'use strict'
const debug = require('debug')
const { registerOrUpdateScope } = require('@/modules/shared')
const { moduleLogger } = require('@/logging/logging')
exports.init = async (app) => {
debug('speckle:modules')('🔑 Init auth module')
moduleLogger.info('🔑 Init auth module')
// Initialize authn strategies
exports.authStrategies = await require('./strategies')(app)

Просмотреть файл

@ -1,5 +1,4 @@
'use strict'
const debug = require('debug')
const cors = require('cors')
const sentry = require(`@/logging/sentryHelper`)
@ -15,6 +14,7 @@ const { revokeRefreshToken } = require(`@/modules/auth/services/apps`)
const { validateScopes } = require(`@/modules/shared`)
const { InvalidAccessCodeRequestError } = require('@/modules/auth/errors')
const { ForbiddenError } = require('apollo-server-errors')
const { moduleLogger } = require('@/logging/logging')
// TODO: Secure these endpoints!
module.exports = (app) => {
@ -45,7 +45,7 @@ module.exports = (app) => {
return res.redirect(`${app.redirectUrl}?access_code=${ac}`)
} catch (err) {
sentry({ err })
debug('speckle:error')(err)
moduleLogger.error(err)
if (
err instanceof InvalidAccessCodeRequestError ||
@ -97,6 +97,7 @@ module.exports = (app) => {
return res.send(authResponse)
} catch (err) {
sentry({ err })
moduleLogger.error(err)
return res.status(401).send({ err: err.message })
}
})
@ -117,7 +118,8 @@ module.exports = (app) => {
return res.status(200).send({ message: 'You have logged out.' })
} catch (err) {
sentry({ err })
return res.status(400).send({ err: err.message })
moduleLogger.error(err)
return res.status(400).send('Something went wrong while trying to logout.')
}
})
}

Просмотреть файл

@ -4,6 +4,7 @@ const crs = require('crypto-random-string')
const knex = require(`@/db/knex`)
const { createToken, createBareToken } = require(`@/modules/core/services/tokens`)
const { logger } = require('@/logging/logging')
const Users = () => knex('users')
const ApiTokens = () => knex('api_tokens')
const ServerApps = () => knex('server_apps')
@ -129,7 +130,7 @@ module.exports = {
await module.exports.revokeExistingAppCredentials({ appId: app.id })
if (app.scopes) {
// console.log( app.scopes, app.id )
logger.debug(app.scopes, app.id)
// Flush existing app scopes
await ServerAppsScopes().where({ appId: app.id }).del()
// Update new scopes

Просмотреть файл

@ -1,5 +1,5 @@
const passport = require('passport')
const debug = require('debug')
const { logger } = require('@/logging/logging')
/**
* Wrapper for passport.authenticate that handles success & failure scenarios correctly
@ -11,7 +11,7 @@ const debug = require('debug')
function passportAuthenticate(strategy, options = undefined) {
return (req, res, next) =>
passport.authenticate(strategy, options, (err, user, info) => {
if (err) debug('speckle:error')(err)
if (err) logger.error(err)
if (!user) {
const errMsg = info?.message || 'Failed to authenticate, contact server admins'
return res.redirect(`/error?message=${errMsg}`)

Просмотреть файл

@ -8,6 +8,7 @@ const passport = require('passport')
const sentry = require('@/logging/sentryHelper')
const { createAuthorizationCode } = require('./services/apps')
const { isSSLServer } = require('@/modules/shared/helpers/envHelper')
const { moduleLogger } = require('@/logging/logging')
/**
* TODO: Get rid of session entirely, we don't use it for the app and it's not really necessary for the auth flow, so it only complicates things
@ -69,6 +70,7 @@ module.exports = async (app) => {
return res.redirect(redirectUrl)
} catch (err) {
sentry({ err })
moduleLogger.error(err)
if (req.session) req.session.destroy()
return res.status(401).send({ err: err.message })
}

Просмотреть файл

@ -4,7 +4,6 @@
const passport = require('passport')
const OIDCStrategy = require('passport-azure-ad').OIDCStrategy
const URL = require('url').URL
const debug = require('debug')
const { findOrCreateUser, getUserByEmail } = require('@/modules/core/services/users')
const { getServerInfo } = require('@/modules/core/services/generic')
const {
@ -13,6 +12,7 @@ const {
resolveAuthRedirectPath
} = require('@/modules/serverinvites/services/inviteProcessingService')
const { passportAuthenticate } = require('@/modules/auth/services/passportService')
const { logger } = require('@/logging/logging')
module.exports = async (app, session, sessionStorage, finalizeAuth) => {
const strategy = new OIDCStrategy(
@ -120,7 +120,7 @@ module.exports = async (app, session, sessionStorage, finalizeAuth) => {
// return to the auth flow
return next()
} catch (err) {
debug('speckle:error')(err)
logger.error(err)
return next()
}
},

Просмотреть файл

@ -4,7 +4,6 @@
const passport = require('passport')
const GithubStrategy = require('passport-github2')
const URL = require('url').URL
const debug = require('debug')
const { findOrCreateUser, getUserByEmail } = require('@/modules/core/services/users')
const { getServerInfo } = require('@/modules/core/services/generic')
const {
@ -13,6 +12,7 @@ const {
resolveAuthRedirectPath
} = require('@/modules/serverinvites/services/inviteProcessingService')
const { passportAuthenticate } = require('@/modules/auth/services/passportService')
const { logger } = require('@/logging/logging')
module.exports = async (app, session, sessionStorage, finalizeAuth) => {
const strategy = {
@ -91,7 +91,7 @@ module.exports = async (app, session, sessionStorage, finalizeAuth) => {
// return to the auth flow
return done(null, myUser)
} catch (err) {
debug('speckle:error')(err)
logger.error(err)
return done(null, false, { message: err.message })
}
}

Просмотреть файл

@ -2,7 +2,6 @@
'use strict'
const passport = require('passport')
const GoogleStrategy = require('passport-google-oauth20').Strategy
const debug = require('debug')
const { findOrCreateUser, getUserByEmail } = require('@/modules/core/services/users')
const { getServerInfo } = require('@/modules/core/services/generic')
const {
@ -11,6 +10,7 @@ const {
resolveAuthRedirectPath
} = require('@/modules/serverinvites/services/inviteProcessingService')
const { passportAuthenticate } = require('@/modules/auth/services/passportService')
const { logger } = require('@/logging/logging')
module.exports = async (app, session, sessionStorage, finalizeAuth) => {
const strategy = {
@ -88,7 +88,7 @@ module.exports = async (app, session, sessionStorage, finalizeAuth) => {
// return to the auth flow
return done(null, myUser)
} catch (err) {
debug('speckle:error')(err)
logger.error(err)
return done(null, false, { message: err.message })
}
}

Просмотреть файл

@ -1,5 +1,4 @@
'use strict'
const debug = require('debug')
const {
createUser,
validatePasssword,
@ -18,6 +17,7 @@ const {
resolveAuthRedirectPath
} = require('@/modules/serverinvites/services/inviteProcessingService')
const { getIpFromRequest } = require('@/modules/shared/utils/ip')
const { logger } = require('@/logging/logging')
module.exports = async (app, session, sessionAppId, finalizeAuth) => {
const strategy = {
@ -101,7 +101,7 @@ module.exports = async (app, session, sessionAppId, finalizeAuth) => {
return next()
} catch (err) {
debug('speckle:error')(err)
logger.error(err)
return res.status(400).send({ err: err.message })
}
},

Просмотреть файл

@ -1,4 +1,3 @@
const debug = require('debug')
const Busboy = require('busboy')
const {
streamReadPermissions,
@ -37,18 +36,19 @@ const {
ResourceMismatch,
BadRequestError
} = require('@/modules/shared/errors')
const { moduleLogger, logger } = require('@/logging/logging')
const ensureConditions = async () => {
if (process.env.DISABLE_FILE_UPLOADS) {
debug('speckle:modules')('📦 Blob storage is DISABLED')
moduleLogger.info('📦 Blob storage is DISABLED')
return
} else {
debug('speckle:modules')('📦 Init BlobStorage module')
moduleLogger.info('📦 Init BlobStorage module')
await ensureStorageAccess()
}
if (!process.env.S3_BUCKET) {
debug('speckle:error')(
logger.error(
'S3_BUCKET env variable was not specified. 📦 BlobStorage will be DISABLED.'
)
return
@ -104,7 +104,7 @@ exports.init = async (app) => {
if (formKey.includes('hash:')) {
clientHash = formKey.split(':')[1]
if (clientHash && clientHash !== '') {
// console.log(`I have a client hash (${clientHash})`)
// logger.debug(`I have a client hash (${clientHash})`)
blobId = clientHash
}
}
@ -148,7 +148,7 @@ exports.init = async (app) => {
})
busboy.on('error', async (err) => {
debug('speckle:error')(`File upload error: ${err}`)
logger.error(`File upload error: ${err}`)
//delete all started uploads
await Promise.all(
Object.keys(uploadOperations).map((blobId) =>

Просмотреть файл

@ -1,3 +1,4 @@
const { logger } = require('@/logging/logging')
const { NotFoundError } = require('@/modules/shared/errors')
const {
S3Client,
@ -88,7 +89,7 @@ const storeFileStream = async ({ objectKey, fileStream }) => {
})
// parallelUploads3.on('httpUploadProgress', (progress) => {
// console.log(progress)
// logger.debug(progress)
// })
const data = await parallelUploads3.done()
@ -114,7 +115,7 @@ const ensureStorageAccess = async () => {
try {
await client.send(new CreateBucketCommand({ Bucket }))
} catch (err) {
console.log(err)
logger.error(err)
}
} else {
throw new Error(`Can't open S3 bucket '${Bucket}': ${err.toString()}`)

Просмотреть файл

@ -2,7 +2,7 @@ import { CommandModule } from 'yargs'
import { initializeQueue } from '@/modules/notifications/services/queue'
import { sendActivityNotifications } from '@/modules/activitystream/services/summary'
import { publishNotification } from '@/modules/notifications/services/publication'
import { cliDebug } from '@/modules/shared/utils/logger'
import { cliLogger } from '@/logging/logging'
const command: CommandModule = {
command: 'send [days]',
@ -22,7 +22,7 @@ const command: CommandModule = {
const start = new Date(end.getTime())
start.setDate(start.getDate() - numberOfDays)
await sendActivityNotifications(start, end, publishNotification)
cliDebug('Sent activity notifications')
cliLogger.info('Sent activity notifications')
}
}

Просмотреть файл

@ -7,8 +7,8 @@ import {
NOTIFICATIONS_QUEUE,
buildNotificationsQueue
} from '@/modules/notifications/services/queue'
import { cliDebug } from '@/modules/shared/utils/logger'
import { noop } from 'lodash'
import { cliLogger } from '@/logging/logging'
const PORT = 3032
@ -25,15 +25,15 @@ const command: CommandModule<unknown, { testQueueId: string }> = {
handler: async (argv) => {
const testQueueId = argv.testQueueId
cliDebug('Initializing bull queues...')
cliLogger.info('Initializing bull queues...')
const queues = [buildNotificationsQueue(NOTIFICATIONS_QUEUE)]
if (testQueueId) {
cliDebug('Also initializing queue ' + testQueueId + '...')
cliLogger.info('Also initializing queue %s...', testQueueId)
queues.push(buildNotificationsQueue(testQueueId))
}
cliDebug('Initializing monitor...')
cliLogger.info('Initializing monitor...')
const app = express()
const serverAdapter = new ExpressAdapter()
@ -45,8 +45,8 @@ const command: CommandModule<unknown, { testQueueId: string }> = {
app.use(serverAdapter.getRouter())
app.listen(PORT, () => {
cliDebug(`Running on ${PORT}...`)
cliDebug(
cliLogger.info(`Running on ${PORT}...`)
cliLogger.info(
`For the UI, open http://localhost:${PORT}/, and make sure Redis is running`
)
})

Просмотреть файл

@ -1,6 +1,6 @@
import { cliLogger } from '@/logging/logging'
import { NotificationType } from '@/modules/notifications/helpers/types'
import { initializeConsumption } from '@/modules/notifications/index'
import { cliDebug } from '@/modules/shared/utils/logger'
import { get, noop } from 'lodash'
import { CommandModule } from 'yargs'
@ -8,12 +8,12 @@ const command: CommandModule = {
command: 'test-consume',
describe: 'Consume incoming messages inserted through test-push',
handler: async () => {
cliDebug('Starting consumption...')
cliLogger.info('Starting consumption...')
// Overriding handler for test purposes, we don't want the actual mentions logic to run
await initializeConsumption({
[NotificationType.MentionedInComment]: async (msg, { debug, job }) => {
debug('Received test message with payload', msg, job)
[NotificationType.MentionedInComment]: async (msg, { logger, job }) => {
logger.info('Received test message with payload', msg, job)
if (get(msg.data, 'error')) {
throw new Error('Forced to throw error!')

Просмотреть файл

@ -1,10 +1,10 @@
import { cliLogger } from '@/logging/logging'
import {
MentionedInCommentData,
NotificationType
} from '@/modules/notifications/helpers/types'
import { publishNotification } from '@/modules/notifications/services/publication'
import { initializeQueue } from '@/modules/notifications/services/queue'
import { cliDebug } from '@/modules/shared/utils/logger'
import { CommandModule } from 'yargs'
const command: CommandModule = {
@ -36,7 +36,7 @@ const command: CommandModule = {
} as unknown as MentionedInCommentData
})
cliDebug('Queued a fake notification...')
cliLogger.info('Queued a fake notification...')
}
}

Просмотреть файл

@ -1,6 +1,7 @@
const knex = require('@/db/knex')
const { appRoot } = require('@/bootstrap')
const fs = require('fs/promises')
const { logger } = require('@/logging/logging')
/** @type {import('yargs').CommandModule} */
const command = {
@ -30,12 +31,12 @@ const command = {
)
}
console.log('Creating migration...')
logger.info('Creating migration...')
await knex.migrate.make(name, {
directory: migrationDir,
extension: 'ts'
})
console.log('...done')
logger.info('migration is complete')
}
}

Просмотреть файл

@ -1,13 +1,14 @@
const knex = require('@/db/knex')
const { logger } = require('@/logging/logging')
/** @type {import('yargs').CommandModule} */
const command = {
command: 'down',
describe: 'Undo last run migration',
async handler() {
console.log('Undoing last migration...')
logger.info('Undoing last migration...')
await knex.migrate.down()
console.log('...done')
logger.info('Completed undoing last migration')
}
}

Просмотреть файл

@ -1,13 +1,14 @@
const knex = require('@/db/knex')
const { logger } = require('@/logging/logging')
/** @type {import('yargs').CommandModule} */
const command = {
command: 'latest',
describe: 'Run all migrations that have not yet been run',
async handler() {
console.log('Running...')
logger.info('Running latest migration...')
await knex.migrate.latest()
console.log('...done')
logger.info('Completed running migration')
}
}

Просмотреть файл

@ -1,13 +1,14 @@
const knex = require('@/db/knex')
const { logger } = require('@/logging/logging')
/** @type {import('yargs').CommandModule} */
const command = {
command: 'rollback',
describe: 'Roll back all migrations',
async handler() {
console.log('Rolling back...')
logger.info('Rolling back migrations...')
await knex.migrate.rollback(null, true)
console.log('...done')
logger.info('Completed rolling back migrations')
}
}

Просмотреть файл

@ -1,13 +1,14 @@
const knex = require('@/db/knex')
const { logger } = require('@/logging/logging')
/** @type {import('yargs').CommandModule} */
const command = {
command: 'up',
describe: 'Run next migration that has not yet been run',
async handler() {
console.log('Running...')
logger.info('Running next migration...')
await knex.migrate.up()
console.log('...done')
logger.info('Completed running next migration')
}
}

Просмотреть файл

@ -1,6 +1,6 @@
import { cliLogger } from '@/logging/logging'
import { getStream } from '@/modules/core/repositories/streams'
import { getUser } from '@/modules/core/repositories/users'
import { cliDebug } from '@/modules/shared/utils/logger'
import { BasicTestCommit, createTestCommits } from '@/test/speckle-helpers/commitHelper'
import dayjs from 'dayjs'
import { times } from 'lodash'
@ -48,7 +48,7 @@ const command: CommandModule<
)
}
cliDebug(`Generating ${count} objects & commits for stream ${streamId}...`)
cliLogger.info(`Generating ${count} objects & commits for stream ${streamId}...`)
await createTestCommits(
times(
count,
@ -61,7 +61,7 @@ const command: CommandModule<
})
)
)
cliDebug(`...done`)
cliLogger.info(`...done`)
}
}

Просмотреть файл

@ -1,3 +1,4 @@
const { logger } = require('@/logging/logging')
const { Users, ServerAcl } = require('@/modules/core/dbSchema')
const { Roles } = require('@/modules/core/helpers/mainConstants')
const { faker } = require('@faker-js/faker')
@ -45,11 +46,11 @@ async function* batchedOperationGenerator({
batchSize,
retryCount = 3
}) {
console.log('Starting batched operation...')
logger.info('Starting batched operation...')
let currentItemCount = 0
const batchCount = Math.ceil(itemCount / batchSize)
for (let i = 0; i < batchCount; i++) {
console.log(`Processing batch ${i + 1} out of ${batchCount}...`)
logger.info(`Processing batch ${i + 1} out of ${batchCount}...`)
const newItemCount = Math.min(currentItemCount + batchSize, itemCount)
const insertCount = newItemCount - currentItemCount
@ -59,7 +60,7 @@ async function* batchedOperationGenerator({
const execute = () =>
Promise.resolve(batchInsertGenerator(insertCount, currentItemCount))
let batchPromise = execute().then((res) => {
console.log(`...processed batch ${i + 1} out of ${batchCount}`)
logger.info(`...processed batch ${i + 1} out of ${batchCount}`)
return res
})
@ -70,7 +71,7 @@ async function* batchedOperationGenerator({
})
})
batchPromise = batchPromise.catch((e) => {
console.error('Operation failed all retries: ', e)
logger.error('Operation failed all retries: ', e)
})
currentItemCount = newItemCount
@ -110,14 +111,14 @@ const command = {
})
for await (const insertedIds of userBatchedInsertionGenerator) {
console.log('Inserting ACL entries for batch...')
logger.info('Inserting ACL entries for batch...')
const entries = insertedIds.map((id) => ({
role: Roles.Server.User,
userId: id
}))
await ServerAcl.knex().insert(entries)
console.log('...inserted ACL')
logger.info('...inserted ACL')
}
}
}

Просмотреть файл

@ -7,7 +7,6 @@ import {
HttpLink,
ApolloQueryResult
} from '@apollo/client/core'
import { cliDebug } from '@/modules/shared/utils/logger'
import { CommandModule } from 'yargs'
import { getBaseUrl, getServerVersion } from '@/modules/shared/helpers/envHelper'
import { Commit } from '@/test/graphql/generated/graphql'
@ -20,6 +19,7 @@ import { createObject } from '@/modules/core/services/objects'
import { getObject } from '@/modules/core/repositories/objects'
import ObjectLoader from '@speckle/objectloader'
import { noop } from 'lodash'
import { cliLogger } from '@/logging/logging'
type LocalResources = Awaited<ReturnType<typeof getLocalResources>>
type ParsedCommitUrl = ReturnType<typeof parseCommitUrl>
@ -188,7 +188,7 @@ const createNewObject = async (
targetStreamId: string
) => {
if (!newObject) {
cliDebug('Encountered falsy object!')
cliLogger.error('Encountered falsy object!')
return
}
@ -230,7 +230,9 @@ const loadAllObjectsFromParent = async (params: {
let processedObjectCount = 1
for await (const obj of objectLoader.getObjectIterator()) {
const typedObj = obj as ObjectLoaderObject
cliDebug(`Processing ${obj.id} - ${processedObjectCount++}/${totalObjectCount}`)
cliLogger.info(
`Processing ${obj.id} - ${processedObjectCount++}/${totalObjectCount}`
)
await createNewObject(typedObj, targetStreamId)
}
}
@ -259,24 +261,24 @@ const command: CommandModule<
},
handler: async (argv) => {
const { commitUrl, targetStreamId, branchName } = argv
cliDebug(`Process started at: ${new Date().toISOString()}`)
cliLogger.info(`Process started at: ${new Date().toISOString()}`)
const localResources = await getLocalResources(targetStreamId, branchName)
cliDebug(
cliLogger.info(
`Using local branch ${branchName} of stream ${targetStreamId} to dump the incoming commit`
)
const parsedCommitUrl = parseCommitUrl(commitUrl)
cliDebug('Loading the following commit: ', parsedCommitUrl)
cliLogger.info('Loading the following commit: %s', parsedCommitUrl)
const client = await createApolloClient(parsedCommitUrl.origin)
const commit = await getCommitMetadata(client, parsedCommitUrl)
cliDebug('Loaded commit metadata', commit)
cliLogger.info('Loaded commit metadata: %s', commit)
const newCommitId = await saveNewCommit(commit, localResources)
cliDebug(`Created new local commit: ${newCommitId}`)
cliLogger.info(`Created new local commit: ${newCommitId}`)
cliDebug(`Pulling & saving all objects! (${commit.totalChildrenCount})`)
cliLogger.info(`Pulling & saving all objects! (${commit.totalChildrenCount})`)
await loadAllObjectsFromParent({
targetStreamId,
sourceCommit: commit,
@ -284,7 +286,7 @@ const command: CommandModule<
})
const linkToNewCommit = `${getBaseUrl()}/streams/${targetStreamId}/commits/${newCommitId}`
cliDebug(`All done! Find your commit here: ${linkToNewCommit}`)
cliLogger.info(`All done! Find your commit here: ${linkToNewCommit}`)
}
}

Просмотреть файл

@ -1,6 +1,7 @@
const path = require('path')
const yargs = require('yargs')
require('../../bootstrap')
const { logger } = require('@/logging/logging')
const execution = yargs
.scriptName('yarn cli')
@ -10,12 +11,12 @@ const execution = yargs
.fail((msg, err, yargs) => {
if (!err) {
// If validation error (no err instance) then just show help and show the message
console.error(yargs.help())
console.error('\n', msg)
logger.error(yargs.help())
logger.error('\n', msg)
} else {
// If actual app error occurred, show the msg, but don't show help info
console.error(err)
console.error('\n', 'Specify --help for available options')
logger.error(err)
logger.error('\n', 'Specify --help for available options')
}
process.exit(1)

Просмотреть файл

@ -1,12 +1,12 @@
const { moduleLogger } = require('@/logging/logging')
const {
notifyUsersOnCommentEvents
} = require('@/modules/comments/services/notifications')
const debug = require('debug')
let unsubFromEvents
exports.init = async (_, isInitial) => {
debug('speckle:modules')('🗣 Init comments module')
moduleLogger.info('🗣 Init comments module')
if (isInitial) {
unsubFromEvents = await notifyUsersOnCommentEvents()

Просмотреть файл

@ -1,9 +1,9 @@
'use strict'
const debug = require('debug')
const { registerOrUpdateScope, registerOrUpdateRole } = require('@/modules/shared')
const { moduleLogger } = require('@/logging/logging')
exports.init = async (app) => {
debug('speckle:modules')('💥 Init core module')
moduleLogger.info('💥 Init core module')
// Initialize the static route
require('./rest/static')(app)

Просмотреть файл

@ -1,6 +1,5 @@
'use strict'
const zlib = require('zlib')
const debug = require('debug')
const cors = require('cors')
const { validatePermissionsReadStream } = require('./authUtils')
@ -8,6 +7,7 @@ const { SpeckleObjectsStream } = require('./speckleObjectsStream')
const { getObjectsStream } = require('../services/objects')
const { pipeline, PassThrough } = require('stream')
const { logger } = require('@/logging/logging')
module.exports = (app) => {
app.options('/api/getobjects/:streamId', cors())
@ -40,13 +40,13 @@ module.exports = (app) => {
res,
(err) => {
if (err) {
debug('speckle:error')(
logger.error(
`[User ${
req.context.userId || '-'
}] App error streaming objects from stream ${req.params.streamId}: ${err}`
)
} else {
debug('speckle:info')(
logger.info(
`[User ${req.context.userId || '-'}] Streamed ${
childrenList.length
} objects from stream ${req.params.streamId} (size: ${
@ -73,7 +73,7 @@ module.exports = (app) => {
})
}
} catch (ex) {
debug('speckle:error')(
logger.error(
`[User ${req.context.userId || '-'}] DB Error streaming objects from stream ${
req.params.streamId
}: ${ex}`

Просмотреть файл

@ -1,11 +1,11 @@
'use strict'
const zlib = require('zlib')
const cors = require('cors')
const debug = require('debug')
const { validatePermissionsWriteStream } = require('./authUtils')
const { hasObjects } = require('../services/objects')
const { logger } = require('@/logging/logging')
module.exports = (app) => {
app.options('/api/diff/:streamId', cors())
@ -21,7 +21,7 @@ module.exports = (app) => {
const objectList = JSON.parse(req.body.objects)
debug('speckle:info')(
logger.info(
`[User ${req.context.userId || '-'}] Diffing ${
objectList.length
} objects for stream ${req.params.streamId}`
@ -31,7 +31,7 @@ module.exports = (app) => {
streamId: req.params.streamId,
objectIds: objectList
})
// console.log(response)
// logger.debug(response)
res.writeHead(200, {
'Content-Encoding': 'gzip',
'Content-Type': 'application/json'

Просмотреть файл

@ -1,6 +1,5 @@
'use strict'
const zlib = require('zlib')
const debug = require('debug')
const cors = require('cors')
const { validatePermissionsReadStream } = require('./authUtils')
@ -8,7 +7,7 @@ const { validatePermissionsReadStream } = require('./authUtils')
const { getObject, getObjectChildrenStream } = require('../services/objects')
const { SpeckleObjectsStream } = require('./speckleObjectsStream')
const { pipeline, PassThrough } = require('stream')
const { logger } = require('@/logging/logging')
module.exports = (app) => {
app.options('/objects/:streamId/:objectId', cors())
@ -55,13 +54,13 @@ module.exports = (app) => {
res,
(err) => {
if (err) {
debug('speckle:error')(
logger.error(
`[User ${req.context.userId || '-'}] Error downloading object ${
req.params.objectId
} from stream ${req.params.streamId}: ${err}`
)
} else {
debug('speckle:info')(
logger.info(
`[User ${req.context.userId || '-'}] Downloaded object ${
req.params.objectId
} from stream ${req.params.streamId} (size: ${
@ -92,7 +91,7 @@ module.exports = (app) => {
return res.status(404).send('Failed to find object.')
}
debug('speckle:info')(
logger.info(
`[User ${req.context.userId || '-'}] Downloaded single object ${
req.params.objectId
} from stream ${req.params.streamId}`

Просмотреть файл

@ -2,11 +2,11 @@
const zlib = require('zlib')
const cors = require('cors')
const Busboy = require('busboy')
const debug = require('debug')
const { validatePermissionsWriteStream } = require('./authUtils')
const { createObjectsBatched } = require('../services/objects')
const { logger, uploadEndpointLogger } = require('@/logging/logging')
const MAX_FILE_SIZE = 50 * 1024 * 1024
@ -48,7 +48,7 @@ module.exports = (app) => {
const gzippedBuffer = Buffer.concat(buffer)
if (gzippedBuffer.length > MAX_FILE_SIZE) {
debug('speckle:error')(
logger.error(
`[User ${
req.context.userId || '-'
}] Upload error: Batch size too large (${
@ -66,7 +66,7 @@ module.exports = (app) => {
const gunzippedBuffer = zlib.gunzipSync(gzippedBuffer).toString()
if (gunzippedBuffer.length > MAX_FILE_SIZE) {
debug('speckle:error')(
logger.error(
`[User ${
req.context.userId || '-'
}] Upload error: Batch size too large (${
@ -85,7 +85,7 @@ module.exports = (app) => {
try {
objs = JSON.parse(gunzippedBuffer)
} catch (e) {
debug('speckle:error')(
logger.error(
`[User ${
req.context.userId || '-'
}] Upload error: Batch not in JSON format`
@ -104,7 +104,7 @@ module.exports = (app) => {
}
const promise = createObjectsBatched(req.params.streamId, objs).catch((e) => {
debug('speckle:error')(
logger.error(
`[User ${req.context.userId || '-'}] Upload error: ${e.message}`
)
if (!requestDropped)
@ -119,7 +119,7 @@ module.exports = (app) => {
await promise
debug('speckle:info')(
logger.info(
`[User ${req.context.userId || '-'}] Uploaded batch of ${
objs.length
} objects to stream ${req.params.streamId} (size: ${
@ -146,7 +146,7 @@ module.exports = (app) => {
let objs = []
if (buffer.length > MAX_FILE_SIZE) {
debug('speckle:error')(
logger.error(
`[User ${
req.context.userId || '-'
}] Upload error: Batch size too large (${
@ -163,7 +163,7 @@ module.exports = (app) => {
try {
objs = JSON.parse(buffer)
} catch (e) {
debug('speckle:error')(
logger.error(
`[User ${
req.context.userId || '-'
}] Upload error: Batch not in JSON format`
@ -181,7 +181,7 @@ module.exports = (app) => {
}
const promise = createObjectsBatched(req.params.streamId, objs).catch((e) => {
debug('speckle:error')(
logger.error(
`[User ${req.context.userId || '-'}] Upload error: ${e.message}`
)
if (!requestDropped)
@ -195,7 +195,7 @@ module.exports = (app) => {
promises.push(promise)
await promise
debug('speckle:info')(
logger.info(
`[User ${req.context.userId || '-'}] Uploaded batch of ${
objs.length
} objects to stream ${req.params.streamId} (size: ${
@ -206,7 +206,7 @@ module.exports = (app) => {
)
})
} else {
debug('speckle:error')(
logger.error(
`[User ${req.context.userId || '-'}] Invalid ContentType header: ${mimeType}`
)
if (!requestDropped)
@ -222,7 +222,7 @@ module.exports = (app) => {
busboy.on('finish', async () => {
if (requestDropped) return
debug('speckle:upload-endpoint')(
uploadEndpointLogger.info(
`[User ${req.context.userId || '-'}] Upload finished: ${totalProcessed} objs, ${
process.memoryUsage().heapUsed / 1024 / 1024
} MB mem`
@ -238,7 +238,7 @@ module.exports = (app) => {
})
busboy.on('error', async (err) => {
debug('speckle:upload-endpoint')(
uploadEndpointLogger.info(
`[User ${req.context.userId || '-'}] Upload error: ${err}`
)
if (!requestDropped)

Просмотреть файл

@ -3,9 +3,8 @@ const { performance } = require('perf_hooks')
const crypto = require('crypto')
const { set, get, chunk } = require('lodash')
const debug = require('debug')('speckle:services')
const knex = require(`@/db/knex`)
const { servicesLogger } = require('@/logging/logging')
const Objects = () => knex('objects')
const Closures = () => knex('object_children_closure')
@ -103,7 +102,7 @@ module.exports = {
const q = Objects().insert(batch).toString() + ' on conflict do nothing'
await trx.raw(q)
})
debug(`Inserted ${batch.length} objects`)
servicesLogger.info(`Inserted ${batch.length} objects`)
}
}
@ -117,7 +116,7 @@ module.exports = {
const q = Closures().insert(batch).toString() + ' on conflict do nothing'
await trx.raw(q)
})
debug(`Inserted ${batch.length} closures`)
servicesLogger.info(`Inserted ${batch.length} closures`)
}
}
return true
@ -190,12 +189,12 @@ module.exports = {
}
const t1 = performance.now()
debug(
servicesLogger.info(
`Batch ${index + 1}/${batches.length}: Stored ${
closures.length + objsToInsert.length
} objects in ${t1 - t0}ms.`
)
// console.log( `Batch ${index + 1}/${batches.length}: Stored ${closures.length + objsToInsert.length} objects in ${t1-t0}ms.` )
// logger.debug( `Batch ${index + 1}/${batches.length}: Stored ${closures.length + objsToInsert.length} objects in ${t1-t0}ms.` )
}
const promises = batches.map((batch, index) => insertBatch(batch, index))
@ -433,7 +432,7 @@ module.exports = {
// Set cursor clause, if present. If it's not present, it's an entry query; this method will return a cursor based on its given query.
// We have implemented keyset pagination for more efficient searches on larger sets. This approach depends on an order by value provided by the user and a (hidden) primary key.
// console.log( cursor )
// logger.debug( cursor )
if (cursor) {
let castType = 'text'
if (typeof cursor.value === 'string') castType = 'text'
@ -489,7 +488,7 @@ module.exports = {
}
mainQuery.limit(limit)
// console.log( mainQuery.toString() )
// logger.debug( mainQuery.toString() )
// Finally, execute the query
const rows = await mainQuery
const totalCount = rows && rows.length > 0 ? parseInt(rows[0].total_count) : 0

Просмотреть файл

@ -1,7 +1,6 @@
'use strict'
const _ = require('lodash')
const crs = require('crypto-random-string')
const debug = require('debug')
const { createBranch } = require('@/modules/core/services/branches')
const { Streams, StreamAcl, knex } = require('@/modules/core/dbSchema')
@ -18,6 +17,7 @@ const {
inviteUsersToStream
} = require('@/modules/serverinvites/services/inviteCreationService')
const { omitBy, isNull, isUndefined, has } = require('lodash')
const { dbLogger } = require('@/logging/logging')
module.exports = {
/**
@ -165,7 +165,7 @@ module.exports = {
},
async deleteStream({ streamId }) {
debug('speckle:db')('Deleting stream ' + streamId)
dbLogger.info('Deleting stream %s', streamId)
// Delete stream commits (not automatically cascaded)
await knex.raw(

Просмотреть файл

@ -1,11 +1,9 @@
import cron from 'node-cron'
import { InvalidArgumentError } from '@/modules/shared/errors'
import { modulesDebug, errorDebug } from '@/modules/shared/utils/logger'
import { ensureError } from '@/modules/shared/helpers/errorHelper'
import { acquireTaskLock } from '@/modules/core/repositories/scheduledTasks'
import { ScheduledTaskRecord } from '@/modules/core/helpers/types'
const activitiesDebug = modulesDebug.extend('activities')
import { activitiesLogger, logger } from '@/logging/logging'
export const scheduledCallbackWrapper = async (
scheduledTime: Date,
@ -23,24 +21,26 @@ export const scheduledCallbackWrapper = async (
// if couldn't acquire it, stop execution
if (!lock) {
activitiesDebug(
activitiesLogger.warn(
`Could not acquire task lock for ${taskName}, stopping execution.`
)
return null
}
// else continue executing the callback...
activitiesDebug(`Executing scheduled function ${taskName} at ${scheduledTime}`)
activitiesLogger.info(
`Executing scheduled function ${taskName} at ${scheduledTime}`
)
await callback(scheduledTime)
// update lock as succeeded
const finishDate = new Date()
activitiesDebug(
activitiesLogger.info(
`Finished scheduled function ${taskName} execution in ${
(finishDate.getTime() - scheduledTime.getTime()) / 1000
} seconds`
)
} catch (error) {
errorDebug(
logger.error(
`The triggered task execution ${taskName} failed at ${scheduledTime}, with error ${
ensureError(error, 'unknown reason').message
}`

Просмотреть файл

@ -10,11 +10,11 @@ const {
const Users = () => UsersSchema.knex()
const Acl = () => ServerAclSchema.knex()
const debug = require('debug')
const { deleteStream } = require('./streams')
const { LIMITED_USER_FIELDS } = require('@/modules/core/helpers/userHelper')
const { deleteAllUserInvites } = require('@/modules/serverinvites/repositories')
const { UsersEmitter, UsersEvents } = require('@/modules/core/events/usersEmitter')
const { dbLogger } = require('@/logging/logging')
const changeUserRole = async ({ userId, role }) =>
await Acl().where({ userId }).update({ role })
@ -176,7 +176,7 @@ module.exports = {
async deleteUser(id) {
//TODO: check for the last admin user to survive
debug('speckle:db')('Deleting user ' + id)
dbLogger.info('Deleting user ' + id)
await _ensureAtleastOneAdminRemains(id)
const streams = await knex.raw(
`

Просмотреть файл

@ -77,7 +77,7 @@ describe('GraphQL API Subscriptions @gql-subscriptions', () => {
)
const reg = /running at 0.0.0.0:([0-9]*)/
serverProcess.stderr.on('data', (data) => {
serverProcess.stdout.on('data', (data) => {
// uncomment this line to understand a bit more what's happening...
// console.error( `stderr: ${data}` )
// ok this is going to be a dirt hack, but I have no better idea ATM

Просмотреть файл

@ -1,17 +1,15 @@
/* istanbul ignore file */
import { moduleLogger } from '@/logging/logging'
import * as SendingService from '@/modules/emails/services/sending'
import { initializeVerificationOnRegistration } from '@/modules/emails/services/verification/request'
import { initializeTransporter } from '@/modules/emails/utils/transporter'
import { Optional, SpeckleModule } from '@/modules/shared/helpers/typeHelper'
import dbg from 'debug'
const debug = dbg('speckle')
const modulesDebug = debug.extend('modules')
let quitVerificationListeners: Optional<() => void> = undefined
const emailsModule: SpeckleModule = {
init: async (app, isInitial) => {
modulesDebug('📧 Init emails module')
moduleLogger.info('📧 Init emails module')
// init transporter
await initializeTransporter()

Просмотреть файл

@ -1,8 +1,5 @@
import { logger } from '@/logging/logging'
import { getTransporter } from '@/modules/emails/utils/transporter'
import dbg from 'debug'
const debug = dbg('speckle')
const errorDebug = debug.extend('errors')
export type SendEmailParams = {
from?: string
@ -24,7 +21,7 @@ export async function sendEmail({
}: SendEmailParams): Promise<boolean> {
const transporter = getTransporter()
if (!transporter) {
errorDebug('No email transport present. Cannot send emails.')
logger.error('No email transport present. Cannot send emails.')
return false
}
try {
@ -37,7 +34,7 @@ export async function sendEmail({
html
})
} catch (error) {
errorDebug(error)
logger.error(error)
}
return false

Просмотреть файл

@ -1,10 +1,6 @@
import debug from 'debug'
import { logger, moduleLogger } from '@/logging/logging'
import { createTransport, Transporter } from 'nodemailer'
const modulesDebug = debug('speckle').extend('modules')
const errorDebug = debug('speckle').extend('errors')
let transporter: Transporter | undefined = undefined
const createJsonEchoTransporter = () => createTransport({ jsonTransport: true })
@ -26,7 +22,7 @@ const initSmtpTransporter = async () => {
await smtpTransporter.verify()
return smtpTransporter
} catch (e) {
errorDebug('📧 Email provider is misconfigured, check config variables.', e)
logger.error('📧 Email provider is misconfigured, check config variables.', e)
}
}
@ -37,7 +33,7 @@ export async function initializeTransporter(): Promise<Transporter | undefined>
if (process.env.EMAIL === 'true') newTransporter = await initSmtpTransporter()
if (!newTransporter) {
modulesDebug(
moduleLogger.warn(
'📧 Email provider is not configured. Server functionality will be limited.'
)
}

Просмотреть файл

@ -1,11 +1,11 @@
/* istanbul ignore file */
'use strict'
const debug = require('debug')
const { saveUploadFile } = require('./services/fileuploads')
const request = require('request')
const { streamWritePermissions } = require('@/modules/shared/authz')
const { authMiddlewareCreator } = require('@/modules/shared/middleware')
const { moduleLogger, logger } = require('@/logging/logging')
const saveFileUploads = async ({ userId, streamId, branchName, uploadResults }) => {
await Promise.all(
@ -25,10 +25,10 @@ const saveFileUploads = async ({ userId, streamId, branchName, uploadResults })
exports.init = async (app) => {
if (process.env.DISABLE_FILE_UPLOADS) {
debug('speckle:modules')('📄 FileUploads module is DISABLED')
moduleLogger.warn('📄 FileUploads module is DISABLED')
return
} else {
debug('speckle:modules')('📄 Init FileUploads module')
moduleLogger.info('📄 Init FileUploads module')
}
app.post(
@ -40,7 +40,7 @@ exports.init = async (app) => {
`${process.env.CANONICAL_URL}/api/stream/${req.params.streamId}/blob`,
async (err, response, body) => {
if (err) {
debug('speckle:error')(err.message)
logger.error(err)
res.status(500).send(err.message)
return
}

Просмотреть файл

@ -5,8 +5,8 @@ const { appRoot, packageRoot } = require('@/bootstrap')
const { values, merge, camelCase } = require('lodash')
const baseTypeDefs = require('@/modules/core/graph/schema/baseTypeDefs')
const { scalarResolvers } = require('./core/graph/scalars')
const { modulesDebug } = require('@/modules/shared/utils/logger')
const { makeExecutableSchema } = require('@graphql-tools/schema')
const { moduleLogger } = require('@/logging/logging')
/**
* Cached speckle module requires
@ -84,13 +84,13 @@ exports.init = async (app) => {
}
exports.shutdown = async () => {
modulesDebug('Triggering module shutdown...')
moduleLogger.info('Triggering module shutdown...')
const modules = await getSpeckleModules()
for (const module of modules) {
await module.shutdown?.()
}
modulesDebug('...module shutdown finished')
moduleLogger.info('...module shutdown finished')
}
/**

Просмотреть файл

@ -2,8 +2,8 @@
import { StreamAccessRequestRecord } from '@/modules/accessrequests/repositories'
import { MaybeAsync, Optional } from '@/modules/shared/helpers/typeHelper'
import { Job } from 'bull'
import debug from 'debug'
import { isObject, has } from 'lodash'
import { Logger } from 'pino'
export enum NotificationType {
ActivityDigest = 'activityDigest',
@ -41,7 +41,7 @@ export type NotificationHandler<M extends NotificationMessage = NotificationMess
msg: M,
extra: {
job: Job
debug: debug.Debugger
logger: Logger
}
) => MaybeAsync<void>

Просмотреть файл

@ -8,14 +8,14 @@ import {
NotificationType,
NotificationTypeHandlers
} from '@/modules/notifications/helpers/types'
import { modulesDebug } from '@/modules/shared/utils/logger'
import { SpeckleModule } from '@/modules/shared/helpers/typeHelper'
import { shouldDisableNotificationsConsumption } from '@/modules/shared/helpers/envHelper'
import { moduleLogger } from '@/logging/logging'
export async function initializeConsumption(
customHandlers?: Partial<NotificationTypeHandlers>
) {
modulesDebug('📞 Initializing notification queue consumption...')
moduleLogger.info('📞 Initializing notification queue consumption...')
const allHandlers: Partial<NotificationTypeHandlers> = {
[NotificationType.MentionedInComment]: (
@ -39,14 +39,14 @@ export async function initializeConsumption(
initializeQueue()
if (shouldDisableNotificationsConsumption()) {
modulesDebug('Skipping notification consumption...')
moduleLogger.info('Skipping notification consumption...')
} else {
await consumeIncomingNotifications()
}
}
export const init: SpeckleModule['init'] = async (_, isInitial) => {
modulesDebug('📞 Init notifications module')
moduleLogger.info('📞 Init notifications module')
if (isInitial) {
await initializeConsumption()
}

Просмотреть файл

@ -12,11 +12,11 @@ import {
NotificationType,
NotificationTypeHandlers
} from '@/modules/notifications/helpers/types'
import { notificationsDebug } from '@/modules/shared/utils/logger'
import { isProdEnv, isTestEnv } from '@/modules/shared/helpers/envHelper'
import Bull from 'bull'
import { buildBaseQueueOptions } from '@/modules/shared/helpers/bullHelper'
import cryptoRandomString from 'crypto-random-string'
import { logger, notificationsLogger, Observability } from '@/logging/logging'
export type NotificationJobResult = {
status: NotificationJobResultsStatus
@ -39,8 +39,8 @@ export const NOTIFICATIONS_QUEUE = isTestEnv()
: NOTIFICATIONS_QUEUE_MAIN_BASE
if (isTestEnv()) {
console.log('Notifications test queue ID: ' + NOTIFICATIONS_QUEUE)
console.log(`Monitor using: 'yarn cli bull monitor ${NOTIFICATIONS_QUEUE}'`)
logger.info('Notifications test queue ID: ' + NOTIFICATIONS_QUEUE)
logger.info(`Monitor using: 'yarn cli bull monitor ${NOTIFICATIONS_QUEUE}'`)
}
let queue: Optional<Bull.Queue>
@ -117,7 +117,7 @@ export async function consumeIncomingNotifications() {
queue.process(async (job): Promise<NotificationJobResult> => {
let notificationType: Optional<NotificationType>
try {
notificationsDebug('New notification received...')
notificationsLogger.info('New notification received...')
// Parse
const payload = job.data
@ -139,17 +139,20 @@ export async function consumeIncomingNotifications() {
throw new UnhandledNotificationError(null, { info: { payload, type } })
}
const notificationDebug = notificationsDebug.extend(type)
notificationDebug('Starting processing notification...')
await Promise.resolve(handler(typedPayload, { job, debug: notificationDebug }))
notificationDebug('...successfully processed notification')
const notificationLogger = Observability.extendLoggerComponent(
notificationsLogger,
type
)
notificationLogger.info('Starting processing notification...')
await Promise.resolve(handler(typedPayload, { job, logger: notificationLogger }))
notificationLogger.info('...successfully processed notification')
return {
status: NotificationJobResultsStatus.Success,
type
}
} catch (e: unknown) {
notificationsDebug(e)
notificationsLogger.error(e)
const err =
e instanceof Error ? e : new Error('Unexpected notification consumption error')

Просмотреть файл

@ -1,8 +1,6 @@
/* istanbul ignore file */
'use strict'
const debug = require('debug')
const { validateScopes, authorizeResolver } = require('@/modules/shared')
const { getStream } = require('../core/services/streams')
@ -19,6 +17,7 @@ const {
} = require('./services/previews')
const { makeOgImage } = require('./ogImage')
const { moduleLogger, logger } = require('@/logging/logging')
const httpErrorImage = (httpErrorCode) =>
require.resolve(`#/assets/previews/images/preview_${httpErrorCode}.png`)
@ -28,9 +27,9 @@ const previewErrorImage = require.resolve('#/assets/previews/images/preview_erro
exports.init = (app) => {
if (process.env.DISABLE_PREVIEWS) {
debug('speckle:modules')('📸 Object preview module is DISABLED')
moduleLogger.warn('📸 Object preview module is DISABLED')
} else {
debug('speckle:modules')('📸 Init object preview module')
moduleLogger.info('📸 Init object preview module')
}
const DEFAULT_ANGLE = '0'
@ -64,7 +63,7 @@ exports.init = (app) => {
const previewImgId = previewInfo.preview[angle]
if (!previewImgId) {
debug('speckle:error')(
logger.error(
`Error: Preview angle '${angle}' not found for object ${streamId}:${objectId}`
)
return {
@ -75,7 +74,7 @@ exports.init = (app) => {
}
const previewImg = await getPreviewImage({ previewId: previewImgId })
if (!previewImg) {
debug('speckle:error')(`Error: Preview image not found: ${previewImgId}`)
logger.error(`Error: Preview image not found: ${previewImgId}`)
return {
type: 'file',
file: previewErrorImage

Просмотреть файл

@ -4,6 +4,7 @@
const sharp = require('sharp')
const xmlescape = require('xml-escape')
const pixelWidth = require('string-pixel-width')
// const { logger } = require('@/logging/logging')
module.exports = {
async makeOgImage(previewBufferOrFilename, streamName) {
@ -22,7 +23,7 @@ module.exports = {
}
title += '...'
}
// console.log( streamName, pixelWidth( title, { font: 'open sans', size: 48 } ), ' / ', imgWidth - 2 * panelPadding - 305 )
// logger.debug( streamName, pixelWidth( title, { font: 'open sans', size: 48 } ), ' / ', imgWidth - 2 * panelPadding - 305 )
const logo = await sharp(
require.resolve('#/assets/previews/images/speckle_logo_and_text.png')

Просмотреть файл

@ -1,10 +1,10 @@
import { moduleLogger } from '@/logging/logging'
import RestSetup from '@/modules/pwdreset/rest'
import { SpeckleModule } from '@/modules/shared/helpers/typeHelper'
import debug from 'debug'
import { noop } from 'lodash'
export const init: SpeckleModule['init'] = (app) => {
debug('speckle:modules')('♻️ Init pwd reset module')
moduleLogger.info('♻️ Init pwd reset module')
RestSetup(app)
}

Просмотреть файл

@ -1,9 +1,9 @@
'use strict'
const debug = require('debug')
const { registerOrUpdateScope } = require('@/modules/shared')
const { moduleLogger } = require('@/logging/logging')
exports.init = async () => {
debug('speckle:modules')('💌 Init invites module')
moduleLogger.info('💌 Init invites module')
const scopes = require('./scopes.js')
for (const scope of scopes) {

Просмотреть файл

@ -1,5 +1,5 @@
import { moduleLogger, Observability } from '@/logging/logging'
import { MaybeAsync } from '@/modules/shared/helpers/typeHelper'
import { modulesDebug } from '@/modules/shared/utils/logger'
import EventEmitter from 'eventemitter2'
export type ModuleEventEmitterParams = {
@ -26,10 +26,10 @@ export function initializeModuleEventEmitter<P extends Record<string, unknown>>(
const { moduleName, namespace } = params
const identifier = namespace ? `${moduleName}-${namespace}` : moduleName
const debug = modulesDebug.extend(identifier).extend('events')
const logger = Observability.extendLoggerComponent(moduleLogger, identifier, 'events')
const errHandler = (e: unknown) => {
debug(`Unhandled ${identifier} event emitter error`, e)
logger.error(`Unhandled ${identifier} event emitter error`, e)
}
const emitter = new EventEmitter()
@ -76,6 +76,6 @@ export function initializeModuleEventEmitter<P extends Record<string, unknown>>(
/**
* Debugger scoped to this module event emitter
*/
debug
logger
}
}

Просмотреть файл

@ -1,8 +0,0 @@
import dbg from 'debug'
const debug = dbg('speckle')
export const modulesDebug = debug.extend('modules')
export const notificationsDebug = debug.extend('notifications')
export const cliDebug = debug.extend('cli')
export const errorDebug = debug.extend('error')

Просмотреть файл

@ -1,8 +1,8 @@
'use strict'
const debug = require('debug')
const { moduleLogger } = require('@/logging/logging')
exports.init = async () => {
debug('speckle:modules')('📊 Init stats module')
moduleLogger.info('📊 Init stats module')
// TODO
}

Просмотреть файл

@ -1,24 +1,22 @@
import cron from 'node-cron'
import { SpeckleModule } from '@/modules/shared/helpers/typeHelper'
import { modulesDebug } from '@/modules/shared/utils/logger'
import { scheduleExecution } from '@/modules/core/services/taskScheduler'
import { cleanOrphanedWebhookConfigs } from '@/modules/webhooks/services/cleanup'
const webhooksDebug = modulesDebug.extend('activities')
import { activitiesLogger, moduleLogger } from '@/logging/logging'
const scheduleWebhookCleanup = () => {
const cronExpression = '0 4 * * 1'
return scheduleExecution(cronExpression, 'weeklyWebhookCleanup', async () => {
webhooksDebug('Starting weekly webhooks cleanup')
activitiesLogger.info('Starting weekly webhooks cleanup')
await cleanOrphanedWebhookConfigs()
webhooksDebug('Finished cleanup')
activitiesLogger.info('Finished cleanup')
})
}
let scheduledTask: cron.ScheduledTask | null = null
export const init: SpeckleModule['init'] = () => {
modulesDebug('🎣 Init webhooks module')
moduleLogger.info('🎣 Init webhooks module')
scheduledTask = scheduleWebhookCleanup()
}

Просмотреть файл

@ -17,17 +17,17 @@
"scripts": {
"build": "tsc -p ./tsconfig.build.json",
"build:watch": "tsc -p ./tsconfig.build.json -w",
"run:watch": "cross-env NODE_ENV=development DEBUG='speckle:*' nodemon ./bin/www --watch ./dist --watch ./assets --watch ./bin/www -e js,ts,graphql,env,gql",
"run:watch": "cross-env NODE_ENV=development nodemon ./bin/www --watch ./dist --watch ./assets --watch ./bin/www -e js,ts,graphql,env,gql",
"dev": "concurrently -r \"npm:build:watch\" \"npm:run:watch\"",
"dev:server:test": "cross-env DISABLE_NOTIFICATIONS_CONSUMPTION=true NODE_ENV=test DEBUG='speckle:*' node ./bin/ts-www",
"dev:server:test": "cross-env DISABLE_NOTIFICATIONS_CONSUMPTION=true NODE_ENV=test node ./bin/ts-www",
"test": "cross-env NODE_ENV=test mocha",
"test:coverage": "cross-env NODE_ENV=test nyc --reporter lcov mocha",
"test:report": "yarn test:coverage -- --reporter mocha-junit-reporter --reporter-options mochaFile=reports/test-results.xml",
"lint": "eslint . --ext .js,.ts",
"lint:tsc": "tsc --noEmit",
"cli": "cross-env NODE_ENV=development DEBUG='speckle:*' ts-node ./modules/cli/index.js",
"cli": "cross-env NODE_ENV=development ts-node ./modules/cli/index.js",
"migrate": "yarn cli db migrate",
"migrate:test": "cross-env NODE_ENV=test DEBUG='speckle:*' ts-node ./modules/cli/index.js db migrate",
"migrate:test": "cross-env NODE_ENV=test ts-node ./modules/cli/index.js db migrate",
"gqlgen": "graphql-codegen --config codegen.yml"
},
"dependencies": {
@ -38,6 +38,7 @@
"@sentry/node": "^6.17.9",
"@sentry/tracing": "^6.17.9",
"@speckle/shared": "workspace:^",
"@types/pino-http": "^5.8.1",
"apollo-server-express": "^3.10.2",
"bcrypt": "^5.0.0",
"bull": "^4.8.5",
@ -48,7 +49,6 @@
"crypto-random-string": "^3.2.0",
"dataloader": "^2.0.0",
"dayjs": "^1.11.5",
"debug": "^4.3.1",
"dotenv": "^8.2.0",
"ejs": "^3.1.8",
"eventemitter2": "^6.4.7",
@ -64,8 +64,6 @@
"lodash": "^4.17.21",
"mjml": "^4.13.0",
"module-alias": "^2.2.2",
"morgan": "^1.10.0",
"morgan-debug": "^2.0.0",
"node-cron": "^3.0.2",
"node-machine-id": "^1.1.12",
"nodemailer": "^6.5.0",
@ -76,6 +74,8 @@
"passport-google-oauth20": "^2.0.0",
"pg": "^8.7.3",
"pg-query-stream": "^4.2.3",
"pino": "^8.7.0",
"pino-http": "^8.2.1",
"prom-client": "^14.0.1",
"rate-limiter-flexible": "^2.4.1",
"redis": "^3.1.1",

Просмотреть файл

@ -1,4 +1,5 @@
const knex = require('@/db/knex')
const { logger } = require('@/logging/logging')
const roles = require('@/modules/core/roles.js')
const Users = () => knex('users')
@ -24,10 +25,10 @@ const migrateColumnValue = async (tableName, columnName, oldUser, newUser) => {
const query = knex(tableName)
.where({ [columnName]: oldUser.id })
.update({ [columnName]: newUser.id })
console.log(`${query}`)
logger.info(`${query}`)
await query
} catch (err) {
console.log(err)
logger.error(err)
}
}
@ -106,7 +107,7 @@ const getDuplicateUsers = async () => {
const runMigrations = async () => {
const duplicateUsers = await getDuplicateUsers()
console.log(duplicateUsers)
logger.info(duplicateUsers)
await Promise.all(
duplicateUsers.map(async (userDouble) => {
const migrations = createMigrations(userDouble)
@ -125,7 +126,7 @@ const runMigrations = async () => {
// await createData()
await runMigrations()
} catch (err) {
console.log(err)
logger.error(err)
} finally {
process.exit()
}

Просмотреть файл

@ -1,4 +1,5 @@
require('../bootstrap')
const { logger } = require('@/logging/logging')
const { createUser } = require('@/modules/core/services/users')
const axios = require('axios').default
@ -15,4 +16,4 @@ const main = async () => {
await Promise.all(userInputs.map((userInput) => createUser(userInput)))
}
main().then(console.log('created')).catch(console.log('failed'))
main().then(logger.info('created')).catch(logger.error('failed'))

Просмотреть файл

@ -8,6 +8,7 @@ const { fetch } = require('undici')
const { init } = require(`@/app`)
const request = require('supertest')
const { exit } = require('yargs')
const { logger } = require('@/logging/logging')
const main = async () => {
const testStream = {
@ -53,7 +54,7 @@ const main = async () => {
.set('Content-type', 'multipart/form-data')
.attach('batch1', Buffer.from(JSON.stringify(objBatch), 'utf8'))
console.log(uploadRes.status)
logger.info(uploadRes.status)
const objectIds = objBatch.map((obj) => obj.id)
const res = await fetch(`http://localhost:3000/api/getobjects/${testStream.id}`, {
@ -66,8 +67,8 @@ const main = async () => {
body: JSON.stringify({ objects: JSON.stringify(objectIds) })
})
const data = await res.body.getReader().read()
console.log(data)
logger.info(data)
exit(0)
}
main().then(console.log('created')).catch(console.log('failed'))
main().then(logger.info('created')).catch(logger.error('failed'))

Просмотреть файл

@ -1,4 +1,5 @@
/* istanbul ignore file */
// const { logger } = require('@/logging/logging')
const crypto = require('crypto')
function generateManyObjects(shitTon, noise) {
@ -68,7 +69,7 @@ exports.generateManyObjects = generateManyObjects
exports.getAnIdForThisOnePlease = getAnIdForThisOnePlease
exports.sleep = (ms) => {
// console.log( `\t Sleeping ${ms}ms ` )
// logger.debug( `\t Sleeping ${ms}ms ` )
return new Promise((resolve) => {
setTimeout(resolve, ms)
})

Просмотреть файл

@ -9,6 +9,7 @@ const deepEqualInAnyOrder = require('deep-equal-in-any-order')
const knex = require(`@/db/knex`)
const { init, startHttp, shutdown } = require(`@/app`)
const { default: graphqlChaiPlugin } = require('@/test/plugins/graphql')
const { logger } = require('@/logging/logging')
// Register chai plugins
chai.use(chaiHttp)
@ -72,14 +73,14 @@ const initializeTestServer = async (server, app) => {
exports.mochaHooks = {
beforeAll: async () => {
console.log('running before all')
logger.info('running before all')
await unlock()
await knex.migrate.rollback()
await knex.migrate.latest()
await init()
},
afterAll: async () => {
console.log('running after all')
logger.info('running after all')
await unlock()
await shutdown()
}

Просмотреть файл

@ -2,6 +2,7 @@ import { getQueue, NotificationJobResult } from '@/modules/notifications/service
import { EventEmitter } from 'events'
import { CompletedEventCallback, FailedEventCallback, JobId } from 'bull'
import { pick } from 'lodash'
import { logger } from '@/logging/logging'
type AckEvent = {
result?: NotificationJobResult
@ -142,13 +143,13 @@ export async function debugJobs() {
{ items: failed, display: 'Failed' }
]
console.log('------------- START debugJobs() --------------')
logger.debug('------------- START debugJobs() --------------')
for (const { items, display } of jobCollections) {
console.log(`${display}: ` + waiting.length)
console.log(`${display} jobs: `)
logger.debug(`${display}: ` + waiting.length)
logger.debug(`${display} jobs: `)
for (const job of items) {
console.log(
logger.debug(
` - ${JSON.stringify(
pick(job, [
'timestamp',
@ -162,6 +163,6 @@ export async function debugJobs() {
)
}
}
console.log({ workers })
console.log('------------- END debugJobs() --------------')
logger.debug({ workers })
logger.debug('------------- END debugJobs() --------------')
}

Просмотреть файл

@ -1,9 +0,0 @@
declare module 'morgan-debug' {
declare const func: (
namespace: string,
format: string,
options: Record<string, unknown>
) => import('express').Handler
export default func
}

Просмотреть файл

@ -20,6 +20,7 @@
"lint:tsc": "tsc --noEmit",
"lint": "yarn lint:eslint && yarn lint:tsc"
},
"sideEffects": false,
"engines": {
"node": ">=14.0.0"
},
@ -31,7 +32,8 @@
"3d"
],
"dependencies": {
"lodash": "^4.17.0"
"lodash": "^4.17.0",
"pino": "^8.7.0"
},
"peerDependencies": {
"@tiptap/core": "^2.0.0-beta.176"

Просмотреть файл

@ -1,2 +1,3 @@
export * as RichTextEditor from './rich-text-editor'
export * as Observability from './observability'
export * from './core'

Просмотреть файл

@ -0,0 +1,30 @@
import pino from 'pino'
let logger: pino.Logger
export function getLogger(logLevel = 'info'): pino.Logger {
if (logger) return logger
logger = pino({
base: undefined, // Set to undefined to avoid adding pid, hostname properties to each log.
formatters: {
level: (label) => {
return { level: label }
}
},
level: logLevel,
timestamp: pino.stdTimeFunctions.isoTime
})
return logger
}
export function extendLoggerComponent(
otherChild: pino.Logger,
...subComponent: string[]
): pino.Logger {
const otherChildBindings = otherChild.bindings()
otherChildBindings.component = [otherChildBindings.component, ...subComponent]
.filter(Boolean)
.join('/')
return otherChild.child(otherChildBindings)
}

Просмотреть файл

@ -1,3 +1,24 @@
FROM node:16.18.0-bullseye-slim as build-stage
ARG NODE_ENV=production
ENV NODE_ENV=${NODE_ENV}
WORKDIR /speckle-server
COPY .yarnrc.yml .
COPY .yarn ./.yarn
COPY package.json yarn.lock ./
COPY packages/webhook-service/package.json ./packages/webhook-service/
COPY packages/shared/package.json ./packages/shared/
RUN yarn workspaces focus -A && yarn
COPY packages/shared ./packages/shared/
COPY packages/webhook-service/src ./packages/webhook-service/
RUN yarn workspaces foreach run build
FROM node:16.18.0-bullseye-slim as node
RUN apt-get update && \
@ -19,10 +40,13 @@ COPY .yarnrc.yml .
COPY .yarn ./.yarn
COPY package.json yarn.lock ./
WORKDIR /speckle-server/packages/webhook-service
COPY packages/webhook-service/package.json .
COPY packages/webhook-service/package.json ./packages/webhook-service/
COPY --from=build-stage /speckle-server/packages/shared ./packages/shared
COPY --from=build-stage /speckle-server/packages/webhook-service ./packages/webhook-service
WORKDIR /speckle-server/packages/webhook-service/
RUN yarn workspaces focus --production
COPY packages/webhook-service/src .
ENTRYPOINT [ "tini", "--" ]
CMD ["yarn", "node", "main.js"]

Просмотреть файл

@ -21,6 +21,7 @@
"lint": "eslint . --ext .js,.ts"
},
"dependencies": {
"@speckle/shared": "workspace:^",
"knex": "^2.0.0",
"node-fetch": "^2.6.1",
"pg": "^8.7.3",

Просмотреть файл

@ -3,7 +3,8 @@
const crypto = require('crypto')
const knex = require('./knex')
const fs = require('fs')
const metrics = require('./prometheusMetrics')
const metrics = require('./observability/prometheusMetrics')
const { logger } = require('./observability/logging')
let shouldExit = false
const HEALTHCHECK_FILE_PATH = '/tmp/last_successful_query'
@ -57,7 +58,7 @@ async function doTask(task) {
.digest('hex')
const postHeaders = { 'X-WEBHOOK-SIGNATURE': signature }
console.log(
logger.info(
`Callin webhook ${fullPayload.streamId} : ${fullPayload.event.event_name} at ${fullPayload.webhook.url}...`
)
const result = await makeNetworkRequest({
@ -66,7 +67,7 @@ async function doTask(task) {
headersData: postHeaders
})
console.log(` Result: ${JSON.stringify(result)}`)
logger.info(` Result: ${JSON.stringify(result)}`)
if (!result.success) {
throw new Error(result.error)
@ -124,17 +125,17 @@ async function tick() {
setTimeout(tick, 10)
} catch (err) {
metrics.metricOperationErrors.labels('main_loop').inc()
console.log('Error executing task: ', err)
logger.error('Error executing task: ', err)
setTimeout(tick, 5000)
}
}
async function main() {
console.log('Starting Webhook Service...')
logger.info('Starting Webhook Service...')
process.on('SIGTERM', () => {
shouldExit = true
console.log('Shutting down...')
logger.info('Shutting down...')
})
metrics.initPrometheusMetrics()

Некоторые файлы не были показаны из-за слишком большого количества измененных файлов Показать больше