Merge branch 'main' into fabians/quick-js-to-ts-4

This commit is contained in:
Kristaps Fabians Geikins 2024-10-25 15:19:29 +03:00
Родитель 6cf31becad c579e208ee
Коммит 9783537ea9
Не найден ключ, соответствующий данной подписи
Идентификатор ключа GPG: 16D20A16730A0111
6 изменённых файлов: 148 добавлений и 112 удалений

Просмотреть файл

@ -59,8 +59,10 @@ enum WorkspacePlans {
enum WorkspacePlanStatuses {
valid
paymentFailed
cancelationScheduled
canceled
trial
expired
}
type WorkspacePlan {

Просмотреть файл

@ -1,6 +1,5 @@
import { ActionTypes, ResourceTypes } from '@/modules/activitystream/helpers/types'
import {
pubsub,
BranchSubscriptions as BranchPubsubEvents,
PublishSubscription
} from '@/modules/shared/utils/subscriptions'
@ -38,8 +37,7 @@ export const addBranchCreatedActivityFactory =
info: { branch },
message: `Branch created: ${branch.name} (${branch.id})`
}),
// @deprecated
pubsub.publish(BranchPubsubEvents.BranchCreated, {
publish(BranchPubsubEvents.BranchCreated, {
branchCreated: { ...branch },
streamId: branch.streamId
}),
@ -76,8 +74,7 @@ export const addBranchUpdatedActivityFactory =
info: { old: oldBranch, new: update },
message: `Branch metadata changed for branch ${update.id}`
}),
// @deprecated
pubsub.publish(BranchPubsubEvents.BranchUpdated, {
publish(BranchPubsubEvents.BranchUpdated, {
branchUpdated: { ...update },
streamId,
branchId: update.id
@ -115,7 +112,7 @@ export const addBranchDeletedActivityFactory =
info: { branch: { ...input, name: branchName } },
message: `Branch deleted: '${branchName}' (${input.id})`
}),
pubsub.publish(BranchPubsubEvents.BranchDeleted, {
publish(BranchPubsubEvents.BranchDeleted, {
branchDeleted: input,
streamId
}),

Просмотреть файл

@ -1,23 +1,34 @@
const { NotFoundError, EnvironmentResourceError } = require('@/modules/shared/errors')
const {
/* eslint-disable @typescript-eslint/no-explicit-any */
import {
NotFoundError,
EnvironmentResourceError,
BadRequestError
} from '@/modules/shared/errors'
import {
S3Client,
GetObjectCommand,
HeadBucketCommand,
DeleteObjectCommand,
CreateBucketCommand,
S3ServiceException
} = require('@aws-sdk/client-s3')
const { Upload } = require('@aws-sdk/lib-storage')
const {
S3ServiceException,
S3ClientConfig,
ServiceOutputTypes
} from '@aws-sdk/client-s3'
import { Upload, Options as UploadOptions } from '@aws-sdk/lib-storage'
import {
getS3AccessKey,
getS3SecretKey,
getS3Endpoint,
getS3Region,
getS3BucketName,
createS3Bucket
} = require('@/modules/shared/helpers/envHelper')
} from '@/modules/shared/helpers/envHelper'
import { ensureError, Nullable } from '@speckle/shared'
import { get } from 'lodash'
import type { Command } from '@aws-sdk/smithy-client'
import type stream from 'stream'
let s3Config = null
let s3Config: Nullable<S3ClientConfig> = null
const getS3Config = () => {
if (!s3Config) {
@ -36,7 +47,7 @@ const getS3Config = () => {
return s3Config
}
let storageBucket = null
let storageBucket: Nullable<string> = null
const getStorageBucket = () => {
if (!storageBucket) {
@ -51,32 +62,43 @@ const getObjectStorage = () => ({
createBucket: createS3Bucket()
})
const sendCommand = async (command) => {
const sendCommand = async <CommandOutput extends ServiceOutputTypes>(
command: (Bucket: string) => Command<any, CommandOutput, any, any, any>
) => {
const { client, Bucket } = getObjectStorage()
try {
return await client.send(command(Bucket))
const ret = await client.send(command(Bucket))
return ret
} catch (err) {
if (err instanceof S3ServiceException && err.Code === 'NoSuchKey')
if (err instanceof S3ServiceException && get(err, 'Code') === 'NoSuchKey')
throw new NotFoundError(err.message)
throw err
}
}
const getObjectStream = async ({ objectKey }) => {
export const getObjectStream = async ({ objectKey }: { objectKey: string }) => {
const data = await sendCommand(
(Bucket) => new GetObjectCommand({ Bucket, Key: objectKey })
)
return data.Body
// TODO: Apparently not always stream.Readable according to types, but in practice this works
return data.Body as stream.Readable
}
const getObjectAttributes = async ({ objectKey }) => {
export const getObjectAttributes = async ({ objectKey }: { objectKey: string }) => {
const data = await sendCommand(
(Bucket) => new GetObjectCommand({ Bucket, Key: objectKey })
)
return { fileSize: data.ContentLength }
return { fileSize: data.ContentLength || 0 }
}
const storeFileStream = async ({ objectKey, fileStream }) => {
export const storeFileStream = async ({
objectKey,
fileStream
}: {
objectKey: string
fileStream: UploadOptions['params']['Body']
}) => {
const { client, Bucket } = getObjectStorage()
const parallelUploads3 = new Upload({
client,
@ -95,20 +117,40 @@ const storeFileStream = async ({ objectKey, fileStream }) => {
const data = await parallelUploads3.done()
// the ETag is a hash of the object. Could be used to dedupe stuff...
if (!data || !('ETag' in data) || !data.ETag) {
throw new BadRequestError('No ETag in response')
}
const fileHash = data.ETag.replaceAll('"', '')
return { fileHash }
}
const deleteObject = async ({ objectKey }) => {
export const deleteObject = async ({ objectKey }: { objectKey: string }) => {
await sendCommand((Bucket) => new DeleteObjectCommand({ Bucket, Key: objectKey }))
}
const ensureStorageAccess = async () => {
// No idea what the actual error type is, too difficult to figure out
type EnsureStorageAccessError = Error & {
statusCode?: number
$metadata?: { httpStatusCode?: number }
}
const isExpectedEnsureStorageAccessError = (
err: unknown
): err is EnsureStorageAccessError =>
err instanceof Error && ('statusCode' in err || '$metadata' in err)
export const ensureStorageAccess = async () => {
const { client, Bucket, createBucket } = getObjectStorage()
try {
await client.send(new HeadBucketCommand({ Bucket }))
return
} catch (err) {
if (err.statusCode === 403 || err['$metadata']?.httpStatusCode === 403) {
if (
isExpectedEnsureStorageAccessError(err) &&
(err.statusCode === 403 || err['$metadata']?.httpStatusCode === 403)
) {
throw new EnvironmentResourceError("Access denied to S3 bucket '{bucket}'", {
cause: err,
info: { bucket: Bucket }
@ -121,7 +163,7 @@ const ensureStorageAccess = async () => {
throw new EnvironmentResourceError(
"Can't open S3 bucket '{bucket}', and have failed to create it.",
{
cause: err,
cause: ensureError(err),
info: { bucket: Bucket }
}
)
@ -130,18 +172,10 @@ const ensureStorageAccess = async () => {
throw new EnvironmentResourceError(
"Can't open S3 bucket '{bucket}', and the Speckle server configuration has disabled creation of the bucket.",
{
cause: err,
cause: ensureError(err),
info: { bucket: Bucket }
}
)
}
}
}
module.exports = {
ensureStorageAccess,
deleteObject,
getObjectAttributes,
storeFileStream,
getObjectStream
}

Просмотреть файл

@ -1,74 +0,0 @@
const { withFilter } = require('graphql-subscriptions')
const {
pubsub,
BranchSubscriptions: BranchPubsubEvents
} = require('@/modules/shared/utils/subscriptions')
const { authorizeResolver } = require('@/modules/shared')
const { Roles } = require('@speckle/shared')
/**
* TODO: Clean up and move to branchesNew.ts
*/
// subscription events
const BRANCH_CREATED = BranchPubsubEvents.BranchCreated
const BRANCH_UPDATED = BranchPubsubEvents.BranchUpdated
const BRANCH_DELETED = BranchPubsubEvents.BranchDeleted
/** @type {import('@/modules/core/graph/generated/graphql').Resolvers} */
module.exports = {
Subscription: {
branchCreated: {
subscribe: withFilter(
() => pubsub.asyncIterator([BRANCH_CREATED]),
async (payload, variables, context) => {
await authorizeResolver(
context.userId,
payload.streamId,
Roles.Stream.Reviewer,
context.resourceAccessRules
)
return payload.streamId === variables.streamId
}
)
},
branchUpdated: {
subscribe: withFilter(
() => pubsub.asyncIterator([BRANCH_UPDATED]),
async (payload, variables, context) => {
await authorizeResolver(
context.userId,
payload.streamId,
Roles.Stream.Reviewer,
context.resourceAccessRules
)
const streamMatch = payload.streamId === variables.streamId
if (streamMatch && variables.branchId) {
return payload.branchId === variables.branchId
}
return streamMatch
}
)
},
branchDeleted: {
subscribe: withFilter(
() => pubsub.asyncIterator([BRANCH_DELETED]),
async (payload, variables, context) => {
await authorizeResolver(
context.userId,
payload.streamId,
Roles.Stream.Reviewer,
context.resourceAccessRules
)
return payload.streamId === variables.streamId
}
)
}
}
}

Просмотреть файл

@ -1,4 +1,4 @@
import { authorizeResolver } from '@/modules/shared'
import { authorizeResolver, BranchPubsubEvents } from '@/modules/shared'
import {
createBranchAndNotifyFactory,
updateBranchAndNotifyFactory,
@ -30,7 +30,7 @@ import { legacyGetUserFactory } from '@/modules/core/repositories/users'
import { Resolvers } from '@/modules/core/graph/generated/graphql'
import { getPaginatedStreamBranchesFactory } from '@/modules/core/services/branch/retrieval'
import { saveActivityFactory } from '@/modules/activitystream/repositories'
import { publish } from '@/modules/shared/utils/subscriptions'
import { filteredSubscribe, publish } from '@/modules/shared/utils/subscriptions'
const markBranchStreamUpdated = markBranchStreamUpdatedFactory({ db })
const getStream = getStreamFactory({ db })
@ -137,5 +137,57 @@ export = {
const deleted = await deleteBranchAndNotify(args.branch, context.userId!)
return deleted
}
},
Subscription: {
branchCreated: {
subscribe: filteredSubscribe(
BranchPubsubEvents.BranchCreated,
async (payload, variables, context) => {
await authorizeResolver(
context.userId,
payload.streamId,
Roles.Stream.Reviewer,
context.resourceAccessRules
)
return payload.streamId === variables.streamId
}
)
},
branchUpdated: {
subscribe: filteredSubscribe(
BranchPubsubEvents.BranchUpdated,
async (payload, variables, context) => {
await authorizeResolver(
context.userId,
payload.streamId,
Roles.Stream.Reviewer,
context.resourceAccessRules
)
const streamMatch = payload.streamId === variables.streamId
if (streamMatch && variables.branchId) {
return payload.branchId === variables.branchId
}
return streamMatch
}
)
},
branchDeleted: {
subscribe: filteredSubscribe(
BranchPubsubEvents.BranchDeleted,
async (payload, variables, context) => {
await authorizeResolver(
context.userId,
payload.streamId,
Roles.Stream.Reviewer,
context.resourceAccessRules
)
return payload.streamId === variables.streamId
}
)
}
}
} as Resolvers

Просмотреть файл

@ -40,6 +40,13 @@ import {
ProjectUpdateInput,
SubscriptionStreamUpdatedArgs,
SubscriptionStreamDeletedArgs,
SubscriptionBranchCreatedArgs,
SubscriptionBranchUpdatedArgs,
BranchUpdateInput,
UpdateModelInput,
SubscriptionBranchDeletedArgs,
BranchDeleteInput,
DeleteModelInput,
SubscriptionCommitCreatedArgs,
CommitCreateInput,
SubscriptionCommitUpdatedArgs,
@ -59,6 +66,7 @@ import {
} from '@/modules/automate/helpers/graphTypes'
import { CommentRecord } from '@/modules/comments/helpers/types'
import { CommitRecord } from '@/modules/core/helpers/types'
import { BranchRecord } from '@/modules/core/helpers/types'
/**
* GraphQL Subscription PubSub instance
@ -306,6 +314,22 @@ type SubscriptionTypeMap = {
payload: { streamDeleted: { streamId: string }; streamId: string }
variables: SubscriptionStreamDeletedArgs
}
[BranchSubscriptions.BranchCreated]: {
payload: { branchCreated: BranchRecord; streamId: string }
variables: SubscriptionBranchCreatedArgs
}
[BranchSubscriptions.BranchUpdated]: {
payload: {
branchUpdated: BranchUpdateInput | UpdateModelInput
streamId: string
branchId: string
}
variables: SubscriptionBranchUpdatedArgs
}
[BranchSubscriptions.BranchDeleted]: {
payload: { branchDeleted: BranchDeleteInput | DeleteModelInput; streamId: string }
variables: SubscriptionBranchDeletedArgs
}
[CommitSubscriptions.CommitCreated]: {
payload: {
commitCreated: CommitCreateInput & { id: string; authorId: string }
@ -334,6 +358,7 @@ type SubscriptionEvent =
| StreamSubscriptions
| UserSubscriptions
| ViewerSubscriptions
| BranchSubscriptions
/**
* Publish a GQL subscription event