This commit is contained in:
AlexandruPopovici 2023-02-28 15:20:37 +02:00
Родитель b231204d24 7e89950358
Коммит 38d81c2d06
28 изменённых файлов: 246 добавлений и 106 удалений

Просмотреть файл

@ -50,7 +50,7 @@ async function startTask() {
}
async function doTask(task) {
const taskLogger = logger.child({ task })
let taskLogger = logger.child({ taskId: task.id })
let tempUserToken = null
let serverApi = null
let fileTypeForMetric = 'unknown'
@ -65,7 +65,15 @@ async function doTask(task) {
}
fileTypeForMetric = info.fileType || 'missing_info'
fileSizeForMetric = Number(info.fileSize) || 0
taskLogger = taskLogger.child({
fileId: info.id,
fileType: fileTypeForMetric,
fileName: info.fileName,
fileSize: fileSizeForMetric,
userId: info.userId,
streamId: info.streamId,
branchName: info.branchName
})
fs.mkdirSync(TMP_INPUT_DIR, { recursive: true })
serverApi = new ServerAPI({ streamId: info.streamId })
@ -86,6 +94,7 @@ async function doTask(task) {
if (info.fileType === 'ifc') {
await runProcessWithTimeout(
taskLogger,
process.env['NODE_BINARY_PATH'] || 'node',
[
'--no-experimental-fetch',
@ -104,6 +113,7 @@ async function doTask(task) {
)
} else if (info.fileType === 'stl') {
await runProcessWithTimeout(
taskLogger,
process.env['PYTHON_BINARY_PATH'] || 'python3',
[
'./stl/import_file.py',
@ -127,6 +137,7 @@ async function doTask(task) {
})
await runProcessWithTimeout(
taskLogger,
process.env['PYTHON_BINARY_PATH'] || 'python3',
[
'-u',
@ -190,9 +201,9 @@ async function doTask(task) {
}
}
function runProcessWithTimeout(cmd, cmdArgs, extraEnv, timeoutMs) {
function runProcessWithTimeout(processLogger, cmd, cmdArgs, extraEnv, timeoutMs) {
return new Promise((resolve, reject) => {
let boundLogger = logger.child({ cmd, args: cmdArgs })
let boundLogger = processLogger.child({ cmd, args: cmdArgs })
boundLogger.info('Starting process.')
const childProc = spawn(cmd, cmdArgs, { env: { ...process.env, ...extraEnv } })

Просмотреть файл

@ -11,7 +11,10 @@ import { createTerminus } from '@godaddy/terminus'
import * as Sentry from '@sentry/node'
import Logging from '@/logging'
import { startupLogger, shutdownLogger } from '@/logging/logging'
import { LoggingExpressMiddleware } from '@/logging/expressLogging'
import {
DetermineRequestIdMiddleware,
LoggingExpressMiddleware
} from '@/logging/expressLogging'
import { errorLoggingMiddleware } from '@/logging/errorLogging'
import prometheusClient from 'prom-client'
@ -183,9 +186,8 @@ export async function init() {
// Should perhaps be done manually?
await knex.migrate.latest()
if (process.env.NODE_ENV !== 'test') {
app.use(DetermineRequestIdMiddleware)
app.use(LoggingExpressMiddleware)
}
if (process.env.COMPRESSION) {
app.use(compression())

Просмотреть файл

@ -1,7 +1,86 @@
import { logger } from './logging'
import { randomUUID } from 'crypto'
import HttpLogger from 'pino-http'
import { IncomingMessage } from 'http'
import { NextFunction, Response } from 'express'
import pino, { SerializedResponse } from 'pino'
import { GenReqId } from 'pino-http'
const REQUEST_ID_HEADER = 'x-request-id'
const GenerateRequestId: GenReqId = (req: IncomingMessage) => DetermineRequestId(req)
const DetermineRequestId = (
req: IncomingMessage,
uuidGenerator: () => string = randomUUID
): string => {
const headers = req.headers[REQUEST_ID_HEADER]
if (!Array.isArray(headers)) return headers || uuidGenerator()
return headers[0] || uuidGenerator()
}
export const LoggingExpressMiddleware = HttpLogger({
logger,
autoLogging: false
autoLogging: true,
genReqId: GenerateRequestId,
customLogLevel: (req, res, err) => {
if (res.statusCode >= 400 && res.statusCode < 500) {
return 'info'
} else if (res.statusCode >= 500 || err) {
return 'error'
} else if (res.statusCode >= 300 && res.statusCode < 400) {
return 'silent'
}
return 'info'
},
// we need to redact any potential sensitive data from being logged.
// as we do not know what headers may be sent in a request by a user or client
// we have to allow list selected headers
serializers: {
req: pino.stdSerializers.wrapRequestSerializer((req) => {
return {
id: req.raw.id,
method: req.raw.method,
path: req.raw.url?.split('?')[0], // Remove query params which might be sensitive
// Allowlist useful headers
headers: {
host: req.raw.headers.host,
'user-agent': req.raw.headers['user-agent'],
'x-request-id': req.raw.headers[REQUEST_ID_HEADER],
referer: req.raw.headers.referer
}
}
}),
res: pino.stdSerializers.wrapResponseSerializer((res) => {
const resRaw = res as SerializedResponse & {
raw: {
headers: Record<string, string>
}
}
return {
statusCode: res.raw.statusCode,
// Allowlist useful headers
headers: {
'content-length': resRaw.raw.headers['content-length'],
'content-type': resRaw.raw.headers['content-type'],
'retry-after': resRaw.raw.headers['retry-after'],
'x-ratelimit-remaining': resRaw.raw.headers['x-ratelimit-remaining'],
'x-ratelimit-reset': resRaw.raw.headers['x-ratelimit-reset'],
'x-request-id': resRaw.raw.headers['x-request-id'],
'x-speckle-meditation': resRaw.raw.headers['x-speckle-meditation']
}
}
})
}
})
export const DetermineRequestIdMiddleware = (
req: IncomingMessage,
res: Response,
next: NextFunction
) => {
const id = DetermineRequestId(req)
res.setHeader(REQUEST_ID_HEADER, id)
next()
}

Просмотреть файл

@ -23,5 +23,4 @@ export const uploadEndpointLogger = extendLoggerComponent(logger, 'upload-endpoi
export const dbLogger = extendLoggerComponent(logger, 'db')
export const servicesLogger = extendLoggerComponent(logger, 'services')
export const rateLimiterLogger = extendLoggerComponent(logger, 'rate-limiter')
export const authLogger = extendLoggerComponent(logger, 'auth')
export const redisLogger = extendLoggerComponent(logger, 'redis')

Просмотреть файл

@ -14,7 +14,6 @@ const { revokeRefreshToken } = require(`@/modules/auth/services/apps`)
const { validateScopes } = require(`@/modules/shared`)
const { InvalidAccessCodeRequestError } = require('@/modules/auth/errors')
const { ForbiddenError } = require('apollo-server-errors')
const { moduleLogger } = require('@/logging/logging')
// TODO: Secure these endpoints!
module.exports = (app) => {
@ -44,15 +43,15 @@ module.exports = (app) => {
const ac = await createAuthorizationCode({ appId, userId, challenge })
return res.redirect(`${app.redirectUrl}?access_code=${ac}`)
} catch (err) {
sentry({ err })
moduleLogger.error(err)
if (
err instanceof InvalidAccessCodeRequestError ||
err instanceof ForbiddenError
) {
res.log.info({ err }, 'Invalid access code request error, or Forbidden error.')
return res.status(400).send(err.message)
} else {
sentry({ err })
res.log.error(err)
return res
.status(500)
.send('Something went wrong while processing your request')
@ -99,7 +98,7 @@ module.exports = (app) => {
return res.send(authResponse)
} catch (err) {
sentry({ err })
moduleLogger.warn(err)
res.log.info({ err }, 'Error while trying to generate a new token.')
return res.status(401).send({ err: err.message })
}
})
@ -120,7 +119,7 @@ module.exports = (app) => {
return res.status(200).send({ message: 'You have logged out.' })
} catch (err) {
sentry({ err })
moduleLogger.error(err)
res.log.info({ err }, 'Error while trying to logout.')
return res.status(400).send('Something went wrong while trying to logout.')
}
})

Просмотреть файл

@ -12,7 +12,6 @@ const {
resolveAuthRedirectPath
} = require('@/modules/serverinvites/services/inviteProcessingService')
const { passportAuthenticate } = require('@/modules/auth/services/passportService')
const { logger } = require('@/logging/logging')
const { UserInputError } = require('@/modules/core/errors/userinput')
module.exports = async (app, session, sessionStorage, finalizeAuth) => {
@ -111,6 +110,7 @@ module.exports = async (app, session, sessionStorage, finalizeAuth) => {
// ID is used later for verifying access token
req.user.id = myUser.id
req.log = req.log.child({ userId: myUser.id })
// use the invite
await finalizeInvitedServerRegistration(user.email, myUser.id)
@ -123,10 +123,13 @@ module.exports = async (app, session, sessionStorage, finalizeAuth) => {
} catch (err) {
switch (err.constructor) {
case UserInputError:
logger.info(err)
req.log.info(
{ err },
'User input error during Azure AD authentication callback.'
)
break
default:
logger.error(err)
req.log.error(err, 'Error during Azure AD authentication callback.')
}
return next()
}

Просмотреть файл

@ -17,7 +17,6 @@ const {
resolveAuthRedirectPath
} = require('@/modules/serverinvites/services/inviteProcessingService')
const { getIpFromRequest } = require('@/modules/shared/utils/ip')
const { logger } = require('@/logging/logging')
const { NoInviteFoundError } = require('@/modules/serverinvites/errors')
const {
UserInputError,
@ -44,15 +43,16 @@ module.exports = async (app, session, sessionAppId, finalizeAuth) => {
password: req.body.password
})
if (!valid) throw new UserInputError('Invalid credentials')
if (!valid) throw new UserInputError('Invalid credentials.')
const user = await getUserByEmail({ email: req.body.email })
if (!user) throw new UserInputError('Invalid credentials')
if (!user) throw new UserInputError('Invalid credentials.')
req.user = { id: user.id }
return next()
} catch (err) {
return res.status(401).send({ err: true, message: 'Invalid credentials' })
res.log.info({ err }, 'Error while logging in.')
return res.status(401).send({ err: true, message: 'Invalid credentials.' })
}
},
finalizeAuth
@ -99,6 +99,7 @@ module.exports = async (app, session, sessionAppId, finalizeAuth) => {
// so we go ahead and register the user
const userId = await createUser(user)
req.user = { id: userId, email: user.email }
req.log = req.log.child({ userId })
// 4. use up all server-only invites the email had attached to it
await finalizeInvitedServerRegistration(user.email, userId)
@ -112,10 +113,10 @@ module.exports = async (app, session, sessionAppId, finalizeAuth) => {
case PasswordTooShortError:
case UserInputError:
case NoInviteFoundError:
logger.info(err)
res.log.info({ err }, 'Error while registering.')
return res.status(400).send({ err: err.message })
default:
logger.error(err)
res.log.error(err, 'Error while registering.')
return res.status(500).send({ err: err.message })
}
}

Просмотреть файл

@ -89,10 +89,12 @@ exports.init = async (app) => {
limits: { fileSize: getFileSizeLimit() }
})
const streamId = req.params.streamId
req.log = req.log.child({ streamId, userId: req.context.userId })
busboy.on('file', (formKey, file, info) => {
const { filename: fileName } = info
const fileType = fileName.split('.').pop().toLowerCase()
req.log = req.log.child({ fileName, fileType })
const registerUploadResult = (processingPromise) => {
finalizePromises.push(
processingPromise.then((resultItem) => ({ ...resultItem, formKey }))
@ -109,6 +111,8 @@ exports.init = async (app) => {
}
}
req.log = req.log.child({ blobId })
uploadOperations[blobId] = uploadFileStream(
storeFileStream,
{ streamId, userId: req.context.userId },
@ -148,7 +152,7 @@ exports.init = async (app) => {
})
busboy.on('error', async (err) => {
logger.error(err, 'File upload error')
res.log.info({ err }, 'Upload request error.')
//delete all started uploads
await Promise.all(
Object.keys(uploadOperations).map((blobId) =>
@ -157,8 +161,7 @@ exports.init = async (app) => {
)
const status = 400
const response = 'Upload request error. The server logs have more details'
const response = 'Upload request error. The server logs may have more details.'
res.status(status).end(response)
})

Просмотреть файл

@ -7,13 +7,12 @@ const { SpeckleObjectsStream } = require('./speckleObjectsStream')
const { getObjectsStream } = require('../services/objects')
const { pipeline, PassThrough } = require('stream')
const { logger } = require('@/logging/logging')
module.exports = (app) => {
app.options('/api/getobjects/:streamId', cors())
app.post('/api/getobjects/:streamId', cors(), async (req, res) => {
const boundLogger = logger.child({
req.log = req.log.child({
userId: req.context.userId || '-',
streamId: req.params.streamId
})
@ -44,9 +43,9 @@ module.exports = (app) => {
res,
(err) => {
if (err) {
boundLogger.error(err, `App error streaming objects`)
req.log.error(err, `App error streaming objects`)
} else {
boundLogger.info(
req.log.info(
`Streamed ${childrenList.length} objects (size: ${
gzipStream.bytesWritten / 1000000
} MB)`
@ -71,7 +70,7 @@ module.exports = (app) => {
})
}
} catch (ex) {
boundLogger.error(ex, `DB Error streaming objects`)
req.log.error(ex, `DB Error streaming objects`)
speckleObjStream.emit('error', new Error('Database streaming error'))
}
speckleObjStream.end()

Просмотреть файл

@ -5,13 +5,12 @@ const cors = require('cors')
const { validatePermissionsWriteStream } = require('./authUtils')
const { hasObjects } = require('../services/objects')
const { logger } = require('@/logging/logging')
module.exports = (app) => {
app.options('/api/diff/:streamId', cors())
app.post('/api/diff/:streamId', cors(), async (req, res) => {
const boundLogger = logger.child({
req.log = req.log.child({
userId: req.context.userId || '-',
streamId: req.params.streamId
})
@ -25,13 +24,13 @@ module.exports = (app) => {
const objectList = JSON.parse(req.body.objects)
boundLogger.info(`Diffing ${objectList.length} objects.`)
req.log.info(`Diffing ${objectList.length} objects.`)
const response = await hasObjects({
streamId: req.params.streamId,
objectIds: objectList
})
boundLogger.debug(response)
req.log.debug(response)
res.writeHead(200, {
'Content-Encoding': 'gzip',
'Content-Type': 'application/json'

Просмотреть файл

@ -6,7 +6,6 @@ const Busboy = require('busboy')
const { validatePermissionsWriteStream } = require('./authUtils')
const { createObjectsBatched } = require('../services/objects')
const { uploadEndpointLogger } = require('@/logging/logging')
const MAX_FILE_SIZE = 50 * 1024 * 1024
@ -14,8 +13,8 @@ module.exports = (app) => {
app.options('/objects/:streamId', cors())
app.post('/objects/:streamId', cors(), async (req, res) => {
const boundLogger = uploadEndpointLogger.child({
user: req.context.userId || '-',
req.log = req.log.child({
userId: req.context.userId || '-',
streamId: req.params.streamId
})
@ -53,7 +52,7 @@ module.exports = (app) => {
const gzippedBuffer = Buffer.concat(buffer)
if (gzippedBuffer.length > MAX_FILE_SIZE) {
boundLogger.error(
req.log.error(
`Upload error: Batch size too large (${gzippedBuffer.length} > ${MAX_FILE_SIZE})`
)
if (!requestDropped)
@ -67,7 +66,7 @@ module.exports = (app) => {
const gunzippedBuffer = zlib.gunzipSync(gzippedBuffer).toString()
if (gunzippedBuffer.length > MAX_FILE_SIZE) {
boundLogger.error(
req.log.error(
`Upload error: Batch size too large (${gunzippedBuffer.length} > ${MAX_FILE_SIZE})`
)
if (!requestDropped)
@ -82,7 +81,7 @@ module.exports = (app) => {
try {
objs = JSON.parse(gunzippedBuffer)
} catch (e) {
boundLogger.error(`Upload error: Batch not in JSON format`)
req.log.error(`Upload error: Batch not in JSON format`)
if (!requestDropped) res.status(400).send('Failed to parse data.')
requestDropped = true
}
@ -97,7 +96,7 @@ module.exports = (app) => {
}
const promise = createObjectsBatched(req.params.streamId, objs).catch((e) => {
boundLogger.error(e, `Upload error.`)
req.log.error(e, `Upload error.`)
if (!requestDropped)
res
.status(400)
@ -110,7 +109,7 @@ module.exports = (app) => {
await promise
boundLogger.info(
req.log.info(
{
durationSeconds: (Date.now() - t0) / 1000,
crtMemUsageMB: process.memoryUsage().heapUsed / 1024 / 1024,
@ -137,7 +136,7 @@ module.exports = (app) => {
let objs = []
if (buffer.length > MAX_FILE_SIZE) {
boundLogger.error(
req.log.error(
`Upload error: Batch size too large (${buffer.length} > ${MAX_FILE_SIZE})`
)
if (!requestDropped)
@ -150,7 +149,7 @@ module.exports = (app) => {
try {
objs = JSON.parse(buffer)
} catch (e) {
boundLogger.error(`Upload error: Batch not in JSON format`)
req.log.error(`Upload error: Batch not in JSON format`)
if (!requestDropped) res.status(400).send('Failed to parse data.')
requestDropped = true
}
@ -164,7 +163,7 @@ module.exports = (app) => {
}
const promise = createObjectsBatched(req.params.streamId, objs).catch((e) => {
boundLogger.error(e, `Upload error.`)
req.log.error(e, `Upload error.`)
if (!requestDropped)
res
.status(400)
@ -176,7 +175,7 @@ module.exports = (app) => {
promises.push(promise)
await promise
boundLogger.info(
req.log.info(
{
uploadedSizeMB: buffer.length / 1000000,
durationSeconds: (Date.now() - t0) / 1000,
@ -187,7 +186,7 @@ module.exports = (app) => {
)
})
} else {
boundLogger.error(`Invalid ContentType header: ${mimeType}`)
req.log.info(`Invalid ContentType header: ${mimeType}`)
if (!requestDropped)
res
.status(400)
@ -201,7 +200,7 @@ module.exports = (app) => {
busboy.on('finish', async () => {
if (requestDropped) return
boundLogger.info(
req.log.info(
{
crtMemUsageMB: process.memoryUsage().heapUsed / 1024 / 1024
},
@ -218,7 +217,7 @@ module.exports = (app) => {
})
busboy.on('error', async (err) => {
boundLogger.info(`Upload error: ${err}`)
req.log.info(`Upload error: ${err}`)
if (!requestDropped)
res.status(400).end('Upload request error. The server logs have more details')
requestDropped = true

Просмотреть файл

@ -1,4 +1,12 @@
/* istanbul ignore file */
const { mockRequireModule } = require('@/test/mockHelper')
const envHelperMock = mockRequireModule(
[
'@/modules/shared/helpers/envHelper',
require.resolve('../../shared/helpers/envHelper')
],
['@/modules/shared/index']
)
const expect = require('chai').expect
const { beforeEachContext } = require('@/test/hooks')
@ -12,7 +20,6 @@ const {
} = require('@/modules/shared')
const { buildContext } = require('@/modules/shared/middleware')
const { ForbiddenError } = require('apollo-server-express')
const { adminOverrideEnabled } = require('@/modules/shared/helpers/envHelper')
describe('Generic AuthN & AuthZ controller tests', () => {
before(async () => {
@ -133,9 +140,11 @@ describe('Generic AuthN & AuthZ controller tests', () => {
})
afterEach(() => {
while (adminOverrideEnabled()) {
process.env.ADMIN_OVERRIDE_ENABLED = 'false'
}
envHelperMock.disable()
})
after(() => {
envHelperMock.destroy()
envHelperMock.resetMockedFunctions()
})
it('should allow stream:owners to be stream:owners', async () => {
const role = await authorizeResolver(
@ -147,9 +156,8 @@ describe('Generic AuthN & AuthZ controller tests', () => {
})
it('should get the passed in role for server:admins if override enabled', async () => {
while (!adminOverrideEnabled()) {
process.env.ADMIN_OVERRIDE_ENABLED = 'true'
}
envHelperMock.enable()
envHelperMock.mockFunction('adminOverrideEnabled', () => true)
const role = await authorizeResolver(
serverOwner.id,
myStream.id,
@ -157,7 +165,6 @@ describe('Generic AuthN & AuthZ controller tests', () => {
)
expect(role).to.equal('stream:contributor')
})
it('should not allow server:admins to be anything if adminOverride is disabled', async () => {
try {
await authorizeResolver(serverOwner.id, notMyStream.id, 'stream:contributor')
@ -168,9 +175,8 @@ describe('Generic AuthN & AuthZ controller tests', () => {
})
it('should allow server:admins to be anything if adminOverride is enabled', async () => {
while (!adminOverrideEnabled()) {
process.env.ADMIN_OVERRIDE_ENABLED = 'true'
}
envHelperMock.enable()
envHelperMock.mockFunction('adminOverrideEnabled', () => true)
const role = await authorizeResolver(
serverOwner.id,
@ -190,9 +196,8 @@ describe('Generic AuthN & AuthZ controller tests', () => {
})
it('should not allow server:users to be anything if adminOverride is enabled', async () => {
while (!adminOverrideEnabled()) {
process.env.ADMIN_OVERRIDE_ENABLED = 'true'
}
envHelperMock.enable()
envHelperMock.mockFunction('adminOverrideEnabled', () => true)
try {
await authorizeResolver(otherGuy.id, myStream.id, 'stream:contributor')
throw 'This should have thrown'

Просмотреть файл

@ -13,7 +13,7 @@ module.exports = (app) => {
error instanceof EmailVerificationFinalizationError
? error.message
: 'Email verification unexpectedly failed'
req.log.info({ err: error }, 'Email verification failed.')
return res.redirect(`/?emailverifiederror=${msg}`)
}
})

Просмотреть файл

@ -21,7 +21,7 @@ export async function sendEmail({
}: SendEmailParams): Promise<boolean> {
const transporter = getTransporter()
if (!transporter) {
logger.error('No email transport present. Cannot send emails.')
logger.warn('No email transport present. Cannot send emails.')
return false
}
try {

Просмотреть файл

@ -5,7 +5,7 @@ const { saveUploadFile } = require('./services/fileuploads')
const request = require('request')
const { streamWritePermissions } = require('@/modules/shared/authz')
const { authMiddlewareCreator } = require('@/modules/shared/middleware')
const { moduleLogger, logger } = require('@/logging/logging')
const { moduleLogger } = require('@/logging/logging')
const saveFileUploads = async ({ userId, streamId, branchName, uploadResults }) => {
await Promise.all(
@ -35,12 +35,17 @@ exports.init = async (app) => {
'/api/file/:fileType/:streamId/:branchName?',
authMiddlewareCreator(streamWritePermissions),
async (req, res) => {
req.log = req.log.child({
streamId: req.params.streamId,
userId: req.context.userId,
branchName: req.params.branchName ?? 'main'
})
req.pipe(
request(
`${process.env.CANONICAL_URL}/api/stream/${req.params.streamId}/blob`,
async (err, response, body) => {
if (err) {
logger.error(err)
res.log.error(err, 'Error while uploading blob.')
res.status(500).send(err.message)
return
}

Просмотреть файл

@ -12,6 +12,7 @@ export default function (app: Express) {
return res.status(200).send('Password reset email sent.')
} catch (e: unknown) {
req.log.info({ err: e }, 'Error while requesting password recovery.')
res.status(400).send(ensureError(e).message)
}
})
@ -24,6 +25,7 @@ export default function (app: Express) {
return res.status(200).send('Password reset. Please log in.')
} catch (e: unknown) {
req.log.info({ err: e }, 'Error while finalizing password recovery.')
res.status(400).send(ensureError(e).message)
}
})

Просмотреть файл

@ -248,4 +248,6 @@ await sandbox.loadUrl(
// 'https://latest.speckle.dev/streams/f92e060177/commits/92858681b7'
// 'https://latest.speckle.dev/streams/f92e060177/commits/655771674e'
// 'https://latest.speckle.dev/streams/f92e060177/commits/00dbbf4509'
// Big curves
// 'https://latest.speckle.dev/streams/c1faab5c62/commits/49dad07ae2'
)

Просмотреть файл

@ -28,6 +28,7 @@ import {
IntersectionQueryResult
} from './modules/queries/Query'
import { Utils } from './modules/Utils'
import { ObjectLayers } from './modules/SpeckleRenderer'
export {
Viewer,
@ -52,6 +53,7 @@ export type {
SpeckleView,
CanonicalView,
InlineView,
ObjectLayers,
PointQuery,
IntersectionQuery,
QueryResult,

Просмотреть файл

@ -3,7 +3,6 @@ import {
Camera,
Intersection,
Object3D,
Points,
Ray,
Scene,
Vector2,
@ -13,10 +12,10 @@ import { LineMaterial } from 'three/examples/jsm/lines/LineMaterial'
import { LineSegments2 } from 'three/examples/jsm/lines/LineSegments2'
import { SpeckleRaycaster } from './objects/SpeckleRaycaster'
import Logger from 'js-logger'
import { ObjectLayers } from './SpeckleRenderer'
export class Intersections {
private raycaster: SpeckleRaycaster
private allowPointPick = false
private boxBuffer: Box3 = new Box3()
private vec0Buffer: Vector4 = new Vector4()
private vec1Buffer: Vector4 = new Vector4()
@ -81,11 +80,12 @@ export class Intersections {
point: Vector2,
nearest = true,
bounds: Box3 = null,
castLayers: Array<ObjectLayers> = undefined,
firstOnly = false
): Array<Intersection> {
this.raycaster.setFromCamera(point, camera)
this.raycaster.firstHitOnly = firstOnly
return this.intersectInternal(scene, nearest, bounds)
return this.intersectInternal(scene, nearest, bounds, castLayers)
}
public intersectRay(
@ -94,15 +94,29 @@ export class Intersections {
ray: Ray,
nearest = true,
bounds: Box3 = null,
castLayers: Array<ObjectLayers> = undefined,
firstOnly = false
): Array<Intersection> {
this.raycaster.camera = camera
this.raycaster.set(ray.origin, ray.direction)
this.raycaster.firstHitOnly = firstOnly
return this.intersectInternal(scene, nearest, bounds)
return this.intersectInternal(scene, nearest, bounds, castLayers)
}
private intersectInternal(scene: Scene, nearest: boolean, bounds: Box3) {
private intersectInternal(
scene: Scene,
nearest: boolean,
bounds: Box3,
castLayers: Array<ObjectLayers>
) {
const preserveMask = this.raycaster.layers.mask
if (castLayers !== undefined) {
this.raycaster.layers.disableAll()
castLayers.forEach((layer) => {
this.raycaster.layers.enable(layer)
})
}
const target = scene.getObjectByName('ContentGroup')
let results = []
@ -111,6 +125,7 @@ export class Intersections {
results = this.raycaster.intersectObjects(target.children)
Logger.warn('Interesct time -> ', performance.now() - start)
}
this.raycaster.layers.mask = preserveMask
if (results.length === 0) return null
if (nearest)
@ -122,11 +137,7 @@ export class Intersections {
return bounds.containsPoint(result.point)
})
}
if (!this.allowPointPick) {
results = results.filter((val) => {
return !(val.object instanceof Points)
})
}
return results
}
}

Просмотреть файл

@ -57,6 +57,10 @@ import { Shadowcatcher } from './Shadowcatcher'
import Logger from 'js-logger'
export enum ObjectLayers {
STREAM_CONTENT_MESH = 10,
STREAM_CONTENT_LINE = 11,
STREAM_CONTENT_POINT = 12,
STREAM_CONTENT = 1,
PROPS = 2,
SHADOWCATCHER = 3
@ -296,7 +300,8 @@ export default class SpeckleRenderer {
})
this._shadowcatcher = new Shadowcatcher(ObjectLayers.SHADOWCATCHER, [
ObjectLayers.STREAM_CONTENT
ObjectLayers.STREAM_CONTENT_MESH,
ObjectLayers.STREAM_CONTENT_LINE
])
let restoreVisibility
this._shadowcatcher.shadowcatcherPass.onBeforeRender = () => {
@ -499,7 +504,6 @@ export default class SpeckleRenderer {
private addBatch(batch: Batch, parent: Object3D) {
const batchRenderable = batch.renderObject
batchRenderable.layers.set(ObjectLayers.STREAM_CONTENT)
parent.add(batch.renderObject)
if (batch.geometryType === GeometryType.MESH) {

Просмотреть файл

@ -171,10 +171,10 @@ export default class Batcher {
const visibilityRanges = {}
for (const k in this.batches) {
const batch: Batch = this.batches[k]
if (batch.geometryType !== GeometryType.MESH) {
visibilityRanges[k] = HideAllBatchUpdateRange
continue
}
// if (batch.geometryType !== GeometryType.MESH) {
// visibilityRanges[k] = HideAllBatchUpdateRange
// continue
// }
const batchMesh: Mesh = batch.renderObject as Mesh
if (batchMesh.geometry.groups.length === 0) {
if ((batchMesh.material as Material).transparent === true)
@ -204,10 +204,10 @@ export default class Batcher {
const visibilityRanges = {}
for (const k in this.batches) {
const batch: Batch = this.batches[k]
if (batch.geometryType !== GeometryType.MESH) {
visibilityRanges[k] = HideAllBatchUpdateRange
continue
}
// if (batch.geometryType !== GeometryType.MESH) {
// visibilityRanges[k] = HideAllBatchUpdateRange
// continue
// }
const batchMesh: Mesh = batch.renderObject as Mesh
if (batchMesh.geometry.groups.length === 0) {
if ((batchMesh.material as Material).stencilWrite === true)
@ -231,10 +231,10 @@ export default class Batcher {
const visibilityRanges = {}
for (const k in this.batches) {
const batch: Batch = this.batches[k]
if (batch.geometryType !== GeometryType.MESH) {
visibilityRanges[k] = HideAllBatchUpdateRange
continue
}
// if (batch.geometryType !== GeometryType.MESH) {
// visibilityRanges[k] = HideAllBatchUpdateRange
// continue
// }
const batchMesh: Mesh = batch.renderObject as Mesh
if (batchMesh.geometry.groups.length === 0) {
if ((batchMesh.material as Material).transparent === false)
@ -325,10 +325,8 @@ export default class Batcher {
})
}
/** Conveniece method. This should also work as a filtering action
* Though, because the batches are not smart enough yet to group
* their draw ranges, it would be currently be inneficient to isolate
* via filtering. This will change in the future
/**
* Used for debuggin only
*/
public isolateRenderView(id: string) {
const rvs = WorldTree.getRenderTree().getRenderViewsForNodeId(id)
@ -375,6 +373,9 @@ export default class Batcher {
}
}
/**
* Used for debuggin only
*/
public async isolateRenderViewBatch(id: string) {
const rv = WorldTree.getRenderTree().getRenderViewForNodeId(id)
for (const k in this.batches) {

Просмотреть файл

@ -13,6 +13,7 @@ import { LineSegments2 } from 'three/examples/jsm/lines/LineSegments2'
import { LineSegmentsGeometry } from 'three/examples/jsm/lines/LineSegmentsGeometry'
import { Geometry } from '../converter/Geometry'
import SpeckleLineMaterial from '../materials/SpeckleLineMaterial'
import { ObjectLayers } from '../SpeckleRenderer'
import { NodeRenderView } from '../tree/NodeRenderView'
import { Viewer } from '../Viewer'
import {
@ -198,6 +199,7 @@ export default class LineBatch implements Batch {
this.mesh.scale.set(1, 1, 1)
this.mesh.uuid = this.id
this.mesh.layers.set(ObjectLayers.STREAM_CONTENT_LINE)
}
public getRenderView(index: number): NodeRenderView {

Просмотреть файл

@ -24,6 +24,7 @@ import {
HideAllBatchUpdateRange
} from './Batch'
import Logger from 'js-logger'
import { ObjectLayers } from '../SpeckleRenderer'
export default class MeshBatch implements Batch {
public id: string
@ -451,6 +452,7 @@ export default class MeshBatch implements Batch {
this.boundsTree.getBoundingBox(this.bounds)
this.mesh = new SpeckleMesh(this.geometry, this.batchMaterial, this.boundsTree)
this.mesh.uuid = this.id
this.mesh.layers.set(ObjectLayers.STREAM_CONTENT_MESH)
}
public getRenderView(index: number): NodeRenderView {

Просмотреть файл

@ -17,6 +17,7 @@ import {
HideAllBatchUpdateRange
} from './Batch'
import Logger from 'js-logger'
import { ObjectLayers } from '../SpeckleRenderer'
export default class PointBatch implements Batch {
public id: string
@ -285,6 +286,7 @@ export default class PointBatch implements Batch {
this.makePointGeometry(position, color)
this.mesh = new Points(this.geometry, this.batchMaterial)
this.mesh.uuid = this.id
this.mesh.layers.set(ObjectLayers.STREAM_CONTENT_POINT)
}
public getRenderView(index: number): NodeRenderView {

Просмотреть файл

@ -6,7 +6,12 @@ export class SpeckleRaycaster extends Raycaster {
constructor(origin?, direction?, near = 0, far = Infinity) {
super(origin, direction, near, far)
this.layers.set(ObjectLayers.STREAM_CONTENT)
this.layers.disableAll()
this.layers.enable(ObjectLayers.STREAM_CONTENT)
this.layers.enable(ObjectLayers.STREAM_CONTENT_MESH)
this.layers.enable(ObjectLayers.STREAM_CONTENT_LINE)
// OFF by default
// this.layers.enable(ObjectLayers.STREAM_CONTENT_POINT)
}
public intersectObjects(objects, recursive = true, intersects = []) {

Просмотреть файл

@ -237,15 +237,18 @@ export class Pipeline {
this.copyOutputPass = new CopyOutputPass()
this.copyOutputPass.renderToScreen = true
this.depthPass.setLayers([ObjectLayers.STREAM_CONTENT])
this.normalsPass.setLayers([ObjectLayers.STREAM_CONTENT])
this.stencilPass.setLayers([ObjectLayers.STREAM_CONTENT])
this.depthPass.setLayers([ObjectLayers.STREAM_CONTENT_MESH])
this.normalsPass.setLayers([ObjectLayers.STREAM_CONTENT_MESH])
this.stencilPass.setLayers([ObjectLayers.STREAM_CONTENT_MESH])
this.renderPass.setLayers([
ObjectLayers.PROPS,
ObjectLayers.STREAM_CONTENT,
ObjectLayers.STREAM_CONTENT_MESH,
ObjectLayers.STREAM_CONTENT_LINE,
ObjectLayers.STREAM_CONTENT_POINT,
ObjectLayers.SHADOWCATCHER
])
this.stencilMaskPass.setLayers([ObjectLayers.STREAM_CONTENT])
this.stencilMaskPass.setLayers([ObjectLayers.STREAM_CONTENT_MESH])
let restoreVisibility
this.depthPass.onBeforeRender = () => {

Просмотреть файл

@ -1,6 +1,6 @@
import Logger from 'js-logger'
import { Intersection, Ray, Vector2, Vector3 } from 'three'
import SpeckleRenderer from '../SpeckleRenderer'
import SpeckleRenderer, { ObjectLayers } from '../SpeckleRenderer'
import { IntersectionQuery, IntersectionQueryResult } from './Query'
export class IntersectionQuerySolver {
@ -32,7 +32,8 @@ export class IntersectionQuerySolver {
this.renderer.camera,
ray,
true,
this.renderer.currentSectionBox
this.renderer.currentSectionBox,
[ObjectLayers.STREAM_CONTENT_MESH]
)
if (!results || results.length === 0) return { objects: null }
const hits = this.renderer.queryHits(results)

Просмотреть файл

@ -64,7 +64,6 @@ async function makeNetworkRequest({ url, data, headersData, logger }) {
responseBody: response.body
}
} catch (e) {
logger.error(e, 'error when making network request for webhook.')
return {
success: false,
error: e.toString(),