From caeee618cc0b8714e275753d2df4176cc9fd1bd2 Mon Sep 17 00:00:00 2001 From: Dimitrie Stefanescu Date: Mon, 13 Apr 2020 16:16:01 +0100 Subject: [PATCH] refactor(naming): refactored to use camelCase in table properties --- modules/core/auth/index.js | 5 --- modules/core/migrations/000-core.js | 53 +++++++++++------------ modules/core/objects/services.js | 24 +++++----- modules/core/references/services.js | 24 +++++----- modules/core/streams/services.js | 26 +++++------ modules/core/tests/000-actors.spec.js | 2 +- modules/core/tests/002-objects.spec.js | 48 ++++++++++---------- modules/core/tests/003-references.spec.js | 14 +++--- modules/core/users/services.js | 22 +++++----- modules/shared/index.js | 2 +- 10 files changed, 107 insertions(+), 113 deletions(-) delete mode 100644 modules/core/auth/index.js diff --git a/modules/core/auth/index.js b/modules/core/auth/index.js deleted file mode 100644 index ffb3f3888..000000000 --- a/modules/core/auth/index.js +++ /dev/null @@ -1,5 +0,0 @@ -'use strict' - -const auth = require( 'express' ).Router( { mergeParams: true } ) - -module.exports = auth \ No newline at end of file diff --git a/modules/core/migrations/000-core.js b/modules/core/migrations/000-core.js index 69222f577..05ba67b32 100644 --- a/modules/core/migrations/000-core.js +++ b/modules/core/migrations/000-core.js @@ -8,26 +8,26 @@ exports.up = async knex => { await knex.schema.createTable( 'users', table => { table.string( 'id', 10 ).primary( ) table.string( 'username', 20 ).unique( ).notNullable( ) - table.timestamp( 'created_at' ).defaultTo( knex.fn.now( ) ) + table.timestamp( 'createdAt' ).defaultTo( knex.fn.now( ) ) table.string( 'name' ).notNullable( ) table.string( 'email' ).unique( ) table.jsonb( 'profiles' ) - table.text( 'password_digest' ) // bcrypted pwd + table.text( 'passwordDigest' ) // bcrypted pwd table.bool( 'verified' ).defaultTo( false ) } ) // Api tokens. TODO: add moar comments await knex.schema.createTable( 'api_tokens', table => { table.string( 'id', 10 ).primary( ) - table.string( 'token_digest' ).unique( ) - table.string( 'owner_id', 10 ).references( 'id' ).inTable( 'users' ).notNullable( ) + table.string( 'tokenDigest' ).unique( ) + table.string( 'owner', 10 ).references( 'id' ).inTable( 'users' ).notNullable( ) table.string( 'name' ) - table.string( 'last_chars', 6 ) + table.string( 'lastChars', 6 ) table.specificType( 'scopes', 'text[]' ) table.boolean( 'revoked' ).defaultTo( false ) table.bigint( 'lifespan' ).defaultTo( 3.154e+12 ) // defaults to a lifespan of 100 years - table.timestamp( 'created_at' ).defaultTo( knex.fn.now( ) ) - table.timestamp( 'last_used' ).defaultTo( knex.fn.now( ) ) + table.timestamp( 'createdAt' ).defaultTo( knex.fn.now( ) ) + table.timestamp( 'lastUsed' ).defaultTo( knex.fn.now( ) ) } ) // Streams Table @@ -36,10 +36,9 @@ exports.up = async knex => { table.string( 'name' ) table.text( 'description' ) table.boolean( 'isPublic' ).defaultTo( true ) - table.string( 'cloned_from', 10 ).references( 'id' ).inTable( 'streams' ) - table.timestamp( 'created_at' ).defaultTo( knex.fn.now( ) ) - table.timestamp( 'updated_at' ).defaultTo( knex.fn.now( ) ) - // table.unique( [ 'owner_id', 'name' ] ) + table.string( 'clonedFrom', 10 ).references( 'id' ).inTable( 'streams' ) + table.timestamp( 'createdAt' ).defaultTo( knex.fn.now( ) ) + table.timestamp( 'updatedAt' ).defaultTo( knex.fn.now( ) ) } ) // creates an enum type for stream acl roles. @@ -54,21 +53,21 @@ exports.up = async knex => { // Stream-users access control list. await knex.schema.createTable( 'stream_acl', table => { - table.string( 'user_id', 10 ).references( 'id' ).inTable( 'users' ).notNullable( ).onDelete( 'cascade' ) - table.string( 'resource_id', 10 ).references( 'id' ).inTable( 'streams' ).notNullable( ).onDelete( 'cascade' ) - table.primary( [ 'user_id', 'resource_id' ] ) - table.unique( [ 'user_id', 'resource_id' ] ) + table.string( 'userId', 10 ).references( 'id' ).inTable( 'users' ).notNullable( ).onDelete( 'cascade' ) + table.string( 'resourceId', 10 ).references( 'id' ).inTable( 'streams' ).notNullable( ).onDelete( 'cascade' ) + table.primary( [ 'userId', 'resourceId' ] ) + table.unique( [ 'userId', 'resourceId' ] ) table.specificType( 'role', 'speckle_acl_role_type' ).defaultTo( 'write' ) } ) // Objects Table await knex.schema.createTable( 'objects', table => { - table.string( 'hash' ).primary( ) + table.string( 'id' ).primary( ) table.string( 'speckle_type' ).defaultTo( 'Base' ).notNullable( ) table.string( 'applicationId' ) table.jsonb( 'data' ) table.string( 'author', 10 ).references( 'id' ).inTable( 'users' ) - table.timestamp( 'created_at' ).defaultTo( knex.fn.now( ) ) + table.timestamp( 'createdAt' ).defaultTo( knex.fn.now( ) ) table.index( [ 'speckle_type' ], 'type_index' ) } ) @@ -94,32 +93,32 @@ exports.up = async knex => { // Reference table. A reference can be a branch or a tag. await knex.schema.createTable( 'references', table => { table.string( 'id', 10 ).primary( ) - table.string( 'stream_id', 10 ).references( 'id' ).inTable( 'streams' ).notNullable( ).onDelete( 'cascade' ) + table.string( 'streamId', 10 ).references( 'id' ).inTable( 'streams' ).notNullable( ).onDelete( 'cascade' ) table.string( 'author', 10 ).references( 'id' ).inTable( 'users' ) table.string( 'name' ) table.specificType( 'type', 'speckle_reference_type' ).defaultTo( 'branch' ) table.text( 'description' ) // (Sparse) Only populated for tags, which hold one commit. - table.string( 'commit_id' ).references( 'hash' ).inTable( 'objects' ) - table.timestamp( 'created_at' ).defaultTo( knex.fn.now( ) ) + table.string( 'commitId' ).references( 'id' ).inTable( 'objects' ) + table.timestamp( 'createdAt' ).defaultTo( knex.fn.now( ) ) table.timestamp( 'updatedAt' ).defaultTo( knex.fn.now( ) ) - table.unique( [ 'stream_id', 'name' ] ) + table.unique( [ 'streamId', 'name' ] ) } ) // Junction Table Branches >- -< Commits // Note: Branches >- -< Commits is a many-to-many relationship (one commit can belong to multiple branches, one branch can have multiple commits) await knex.schema.createTable( 'branch_commits', table => { - table.string( 'branch_id', 10 ).references( 'id' ).inTable( 'references' ).notNullable( ).onDelete( 'cascade' ) - table.string( 'commit_id' ).references( 'hash' ).inTable( 'objects' ).notNullable( ) - table.primary( [ 'branch_id', 'commit_id' ] ) + table.string( 'branchId', 10 ).references( 'id' ).inTable( 'references' ).notNullable( ).onDelete( 'cascade' ) + table.string( 'commitId' ).references( 'id' ).inTable( 'objects' ).notNullable( ) + table.primary( [ 'branchId', 'commitId' ] ) } ) // Flat table to store all commits to this stream, regardless of branch. // Optional, might be removed as you can get all the commits from each branch... await knex.schema.createTable( 'stream_commits', table => { - table.string( 'stream_id', 10 ).references( 'id' ).inTable( 'streams' ).notNullable( ).onDelete( 'cascade' ) - table.string( 'commit_id' ).references( 'hash' ).inTable( 'objects' ).notNullable( ) - table.primary( [ 'stream_id', 'commit_id' ] ) + table.string( 'streamId', 10 ).references( 'id' ).inTable( 'streams' ).notNullable( ).onDelete( 'cascade' ) + table.string( 'commitId' ).references( 'id' ).inTable( 'objects' ).notNullable( ) + table.primary( [ 'streamId', 'commitId' ] ) } ) } diff --git a/modules/core/objects/services.js b/modules/core/objects/services.js index 9c012a944..8e0385056 100644 --- a/modules/core/objects/services.js +++ b/modules/core/objects/services.js @@ -24,16 +24,16 @@ module.exports = { object.speckle_type = 'commit' object.author = userId - let hash = await module.exports.createObject( object ) + let id = await module.exports.createObject( object ) - let query = StreamCommits( ).insert( { stream_id: streamId, commit_id: hash } ).toString( ) + ' on conflict do nothing' + let query = StreamCommits( ).insert( { streamId: streamId, commitId: id } ).toString( ) + ' on conflict do nothing' await knex.raw( query ) - return hash + return id }, async getCommits( streamId ) { - let commits = await StreamCommits( ).where( { stream_id: streamId } ).rightOuterJoin( 'objects', { 'objects.hash': 'stream_commits.commit_id' } ).select( 'data' ) + let commits = await StreamCommits( ).where( { streamId: streamId } ).rightOuterJoin( 'objects', { 'objects.id': 'stream_commits.commitId' } ).select( 'data' ) return commits.map( o => o.data ) }, @@ -56,7 +56,7 @@ module.exports = { await knex.raw( q2 ) } - return insertionObject.hash + return insertionObject.id }, async createObjects( objects ) { @@ -70,7 +70,7 @@ module.exports = { batches.push( objects ) } - let hashes = [ ] + let ids = [ ] let promises = batches.map( async ( batch, index ) => new Promise( async ( resolve, reject ) => { let objTreeRefs = [ ] @@ -89,7 +89,7 @@ module.exports = { let insertionObject = prepInsertionObject( obj ) objsToInsert.push( insertionObject ) - hashes.push( insertionObject.hash ) + ids.push( insertionObject.id ) } ) let queryObjs = Objects( ).insert( objsToInsert ).toString( ) + ' on conflict do nothing' @@ -107,16 +107,16 @@ module.exports = { await Promise.all( promises ) - return hashes + return ids }, async getObject( objectId ) { - let { data } = await Objects( ).where( { hash: objectId } ).select( 'data' ).first( ) + let { data } = await Objects( ).where( { id: objectId } ).select( 'data' ).first( ) return data }, async getObjects( objectIds ) { - let res = await Objects( ).whereIn( 'hash', objectIds ).select( 'data' ) + let res = await Objects( ).whereIn( 'id', objectIds ).select( 'data' ) return res.map( r => r.data ) }, @@ -136,12 +136,12 @@ module.exports = { // limitations when doing upserts - ignored fields are not always returned, hence // we cannot provide a full response back including all object hashes. function prepInsertionObject( obj ) { - obj.hash = obj.hash || crypto.createHash( 'md5' ).update( JSON.stringify( obj ) ).digest( 'hex' ) // generate a hash if none is present + obj.id = obj.id || crypto.createHash( 'md5' ).update( JSON.stringify( obj ) ).digest( 'hex' ) // generate a hash if none is present delete obj.__tree let stringifiedObj = JSON.stringify( obj ) return { data: stringifiedObj, // stored in jsonb column - hash: obj.hash, + id: obj.id, applicationId: obj.applicationId, speckle_type: obj.speckle_type } diff --git a/modules/core/references/services.js b/modules/core/references/services.js index ca77b33e4..6f508293f 100644 --- a/modules/core/references/services.js +++ b/modules/core/references/services.js @@ -16,7 +16,7 @@ module.exports = { async createTag( tag, streamId, userId ) { delete tag.commits // let's make sure tag.id = crs( { length: 10 } ) - tag.stream_id = streamId + tag.streamId = streamId tag.author = userId tag.type = 'tag' let [ id ] = await Refs( ).returning( 'id' ).insert( tag ) @@ -39,7 +39,7 @@ module.exports = { }, async getTagsByStreamId( streamId ) { - return Refs( ).where( { stream_id: streamId, type: 'tag' } ).select( '*' ) + return Refs( ).where( { streamId: streamId, type: 'tag' } ).select( '*' ) }, /* @@ -48,16 +48,16 @@ module.exports = { async createBranch( branch, streamId, userId ) { let commits = branch.commits || [ ] delete branch.commits - delete branch.commit_id + delete branch.commitId branch.id = crs( { length: 10 } ) - branch.stream_id = streamId + branch.streamId = streamId branch.author = userId branch.type = 'branch' let [ id ] = await Refs( ).returning( 'id' ).insert( branch ) if ( commits.length !== 0 ) { - let branchCommits = commits.map( commitId => { return { branch_id: id, commit_id: commitId } } ) + let branchCommits = commits.map( commitId => { return { branchId: id, commitId: commitId } } ) await knex.raw( BranchCommits( ).insert( branchCommits ) + ' on conflict do nothing' ) } return branch.id @@ -66,10 +66,10 @@ module.exports = { async updateBranch( branch ) { let commits = branch.commits || [ ] delete branch.commits - delete branch.commit_id + delete branch.commitId if ( commits.length !== 0 ) { - let branchCommits = commits.map( commitId => { return { branch_id: branch.id, commit_id: commitId } } ) + let branchCommits = commits.map( commitId => { return { branchId: branch.id, commitId: commitId } } ) await knex.raw( BranchCommits( ).insert( branchCommits ) + ' on conflict do nothing' ) } @@ -77,19 +77,19 @@ module.exports = { }, async getBranchCommits( branchId ) { - return BranchCommits( ).where( { branch_id: branchId } ).select( 'commit_id' ) + return BranchCommits( ).where( { branchId: branchId } ).select( 'commitId' ) }, async getBranchById( branchId ) { let branch = await Refs( ).where( { id: branchId, type: 'branch' } ).first( ).select( '*' ) - let commits = await BranchCommits( ).where( { branch_id: branchId } ) - branch.commits = commits.map( c => c.commit_id ) + let commits = await BranchCommits( ).where( { branchId: branchId } ) + branch.commits = commits.map( c => c.commitId ) return branch }, async getBranchesByStreamId( streamId ) { - return Refs( ).where( { stream_id: streamId, type: 'branch' } ).select( '*' ) + return Refs( ).where( { streamId: streamId, type: 'branch' } ).select( '*' ) }, async deleteBranchById( branchId ) { @@ -99,7 +99,7 @@ module.exports = { Generic */ async getStreamReferences( streamId ) { - return Refs( ).where( { stream_id: streamId } ).select( '*' ) + return Refs( ).where( { streamId: streamId } ).select( '*' ) } } \ No newline at end of file diff --git a/modules/core/streams/services.js b/modules/core/streams/services.js index 1ff1d9e29..8126ce474 100644 --- a/modules/core/streams/services.js +++ b/modules/core/streams/services.js @@ -9,12 +9,12 @@ const Acl = ( ) => knex( 'stream_acl' ) module.exports = { async createStream( stream, ownerId ) { - delete stream.created_at - stream.updated_at = knex.fn.now( ) + delete stream.createdAt + stream.updatedAt = knex.fn.now( ) stream.id = crs( { length: 10 } ) let [ res ] = await Streams( ).returning( 'id' ).insert( stream ) - await Acl( ).insert( { user_id: ownerId, resource_id: res, role: 'owner' } ) + await Acl( ).insert( { userId: ownerId, resourceId: res, role: 'owner' } ) return res }, @@ -24,26 +24,26 @@ module.exports = { }, async updateStream( stream ) { - delete stream.created_at + delete stream.createdAt let [ res ] = await Streams( ).returning( 'id' ).where( { id: stream.id } ).update( stream ) return res }, async grantPermissionsStream( streamId, userId, role ) { if ( role === 'owner' ) { - let [ ownerAcl ] = await Acl( ).where( { resource_id: streamId, role: 'owner' } ).returning( '*' ).del( ) - await Acl( ).insert( { resource_id: streamId, user_id: ownerAcl.user_id, role: 'write' } ) + let [ ownerAcl ] = await Acl( ).where( { resourceId: streamId, role: 'owner' } ).returning( '*' ).del( ) + await Acl( ).insert( { resourceId: streamId, userId: ownerAcl.userId, role: 'write' } ) } // upsert - let query = Acl( ).insert( { user_id: userId, resource_id: streamId, role: role } ).toString( ) + ` on conflict on constraint stream_acl_pkey do update set role=excluded.role` + let query = Acl( ).insert( { userId: userId, resourceId: streamId, role: role } ).toString( ) + ` on conflict on constraint stream_acl_pkey do update set role=excluded.role` await knex.raw( query ) }, async revokePermissionsStream( streamId, userId ) { - let streamAclEntries = Acl( ).where( { resource_id: streamId } ).select( '*' ) - let delCount = await Acl( ).where( { resource_id: streamId, user_id: userId } ).whereNot( { role: 'owner' } ).del( ) + let streamAclEntries = Acl( ).where( { resourceId: streamId } ).select( '*' ) + let delCount = await Acl( ).where( { resourceId: streamId, userId: userId } ).whereNot( { role: 'owner' } ).del( ) if ( delCount === 0 ) throw new Error( 'Could not revoke permissions for user. Is he an owner?' ) }, @@ -64,14 +64,14 @@ module.exports = { offset = offset || 0 limit = limit || 100 - return Acl( ).where( { user_id: userId } ) - .rightJoin( 'streams', { 'streams.id': 'stream_acl.resource_id' } ) + return Acl( ).where( { userId: userId } ) + .rightJoin( 'streams', { 'streams.id': 'stream_acl.resourceId' } ) .limit( limit ).offset( offset ) }, async getStreamUsers( streamId ) { - return Acl( ).where( { resource_id: streamId } ) - .rightJoin( 'users', { 'users.id': 'stream_acl.user_id' } ) + return Acl( ).where( { resourceId: streamId } ) + .rightJoin( 'users', { 'users.id': 'stream_acl.userId' } ) .select( 'role', 'username', 'name', 'id' ) } } \ No newline at end of file diff --git a/modules/core/tests/000-actors.spec.js b/modules/core/tests/000-actors.spec.js index 128a94f86..bade538c1 100644 --- a/modules/core/tests/000-actors.spec.js +++ b/modules/core/tests/000-actors.spec.js @@ -52,7 +52,7 @@ describe( 'Actors & Tokens', ( ) => { it( 'Should get an actor', async ( ) => { let actor = await getUser( myTestActor.id ) - expect( actor ).to.not.have.property( 'password_digest' ) + expect( actor ).to.not.have.property( 'passwordDigest' ) } ) it( 'Should update an actor', async ( ) => { diff --git a/modules/core/tests/002-objects.spec.js b/modules/core/tests/002-objects.spec.js index b2c38ad0f..b03f131eb 100644 --- a/modules/core/tests/002-objects.spec.js +++ b/modules/core/tests/002-objects.spec.js @@ -26,7 +26,7 @@ let sampleCommit = JSON.parse( `{ "beb6c53c4e531f4c259a59e943dd3043" ], "CreatedOn": "2020-03-18T12:06:07.82307Z", - "hash": "79eb41764cc2c065de752bd704bfc4aa", + "id": "79eb41764cc2c065de752bd704bfc4aa", "speckle_type": "Speckle.Core.Commit", "__tree": [ "79eb41764cc2c065de752bd704bfc4aa.8a9b0676b7fe3e5e487bb34549e67f67" @@ -35,7 +35,7 @@ let sampleCommit = JSON.parse( `{ let sampleObject = JSON.parse( `{ "Vertices": [], - "hash": "8a9b0676b7fe3e5e487bb34549e67f67", + "id": "8a9b0676b7fe3e5e487bb34549e67f67", "applicationId": "test", "speckle_type": "Tests.Polyline" }` ) @@ -73,13 +73,13 @@ describe( 'Objects', ( ) => { } ) it( 'Should create a commit', async ( ) => { - let myHash = await createCommit( stream.id, userOne.id, sampleCommit ) - expect( myHash ).to.not.be.null + let myId = await createCommit( stream.id, userOne.id, sampleCommit ) + expect( myId ).to.not.be.null } ) it( 'Should create objects', async ( ) => { - sampleObject.hash = await createObject( sampleObject ) - sampleCommit.hash = await createObject( sampleCommit ) + sampleObject.id = await createObject( sampleObject ) + sampleCommit.id = await createObject( sampleCommit ) } ) let objCount_1 = 10 @@ -100,9 +100,9 @@ describe( 'Objects', ( ) => { } ) } - let hashes = await createObjects( objs ) + let ids = await createObjects( objs ) - expect( hashes ).to.have.lengthOf( objCount_1 ) + expect( ids ).to.have.lengthOf( objCount_1 ) } ).timeout( 30000 ) @@ -124,31 +124,31 @@ describe( 'Objects', ( ) => { } ) } - let hashes = await createObjects( objs2 ) + let myIds = await createObjects( objs2 ) - hashes.forEach( ( h, i ) => objs2[ i ].hash = h ) + myIds.forEach( ( h, i ) => objs2[ i ].id = h ) - expect( hashes ).to.have.lengthOf( objCount_2 ) + expect( myIds ).to.have.lengthOf( objCount_2 ) } ).timeout( 30000 ) it( 'Should get a single object', async ( ) => { - let obj = await getObject( sampleCommit.hash ) + let obj = await getObject( sampleCommit.id ) expect( obj ).to.deep.equal( sampleCommit ) } ) it( 'Should get more objects', async ( ) => { - let myObjs = await getObjects( objs.map( o => o.hash ) ) + let myObjs = await getObjects( objs.map( o => o.id ) ) expect( myObjs ).to.have.lengthOf( objs.length ) - let match1 = myObjs.find( o => o.hash === objs[ 0 ].hash ) + let match1 = myObjs.find( o => o.id === objs[ 0 ].id ) expect( match1 ).to.not.be.null - expect( match1.hash ).to.equal( objs[ 0 ].hash ) + expect( match1.id ).to.equal( objs[ 0 ].id ) - let match2 = myObjs.find( o => o.hash === objs[ 2 ].hash ) + let match2 = myObjs.find( o => o.id === objs[ 2 ].id ) expect( match2 ).to.not.be.null - expect( match2.hash ).to.equal( objs[ 2 ].hash ) + expect( match2.id ).to.equal( objs[ 2 ].id ) } ) } ) @@ -176,7 +176,7 @@ describe( 'Objects', ( ) => { let secondCommit = { ...sampleCommit } secondCommit.description = "Something else" - delete secondCommit.hash + delete secondCommit.id const secondCommitRes = await chai.request( app ).post( `${baseUrl}/commits` ).send( secondCommit ).set( 'Authorization', `Bearer ${tokenA}` ) @@ -189,7 +189,7 @@ describe( 'Objects', ( ) => { expect( commits ).to.have.status( 200 ) expect( commits.body ).to.have.lengthOf( 2 ) - expect( commits.body[ 0 ] ).to.have.property( 'hash' ) + expect( commits.body[ 0 ] ).to.have.property( 'id' ) expect( commits.body[ 0 ] ).to.have.property( 'speckle_type' ) expect( commits.body[ 0 ].speckle_type ).to.equal( 'commit' ) } ) @@ -210,23 +210,23 @@ describe( 'Objects', ( ) => { expect( objectCreationResult ).to.have.status( 201 ) expect( objectCreationResult.body ).to.have.lengthOf( objCount ) - objs.forEach( ( o, i ) => o.hash = objectCreationResult.body[ i ] ) + objs.forEach( ( o, i ) => o.id = objectCreationResult.body[ i ] ) } ) it( 'Should get 10 objects', async ( ) => { - const url = `${baseUrl}/objects/${objs.slice(0,10).map( o => o.hash ).join( )}` + const url = `${baseUrl}/objects/${objs.slice(0,10).map( o => o.id ).join( )}` const objsResult = await chai.request( app ).get( url ).set( 'Authorization', `Bearer ${tokenA}` ) expect( objsResult ).to.have.status( 200 ) expect( objsResult.body ).to.have.lengthOf( 10 ) - expect( objsResult.body[ 0 ] ).to.have.property( 'hash' ) + expect( objsResult.body[ 0 ] ).to.have.property( 'id' ) } ) it( 'Should get many objects', async ( ) => { - const objsResult = await chai.request( app ).post( `${baseUrl}/objects/getmany` ).send( objs.map( o => o.hash ) ).set( 'Authorization', `Bearer ${tokenA}` ) + const objsResult = await chai.request( app ).post( `${baseUrl}/objects/getmany` ).send( objs.map( o => o.id ) ).set( 'Authorization', `Bearer ${tokenA}` ) expect( objsResult ).to.have.status( 200 ) expect( objsResult.body ).to.have.lengthOf( objCount ) - expect( objsResult.body[ 0 ] ).to.have.property( 'hash' ) + expect( objsResult.body[ 0 ] ).to.have.property( 'id' ) } ) } ) diff --git a/modules/core/tests/003-references.spec.js b/modules/core/tests/003-references.spec.js index 8df64fda3..889ed2709 100644 --- a/modules/core/tests/003-references.spec.js +++ b/modules/core/tests/003-references.spec.js @@ -64,7 +64,7 @@ describe( 'Tags & Branches', ( ) => { commit2.parents = [ commit1.hash ] commit2.hash = await createCommit( stream.id, user.id, commit2 ) - tag.commit_id = commit2.hash + tag.commitId = commit2.hash } ) after( async ( ) => { @@ -90,9 +90,9 @@ describe( 'Tags & Branches', ( ) => { it( 'Should get a branch', async ( ) => { let myBranch = await getBranchById( branch.id ) - delete myBranch.created_at // delete minor stuffs + delete myBranch.createdAt // delete minor stuffs delete myBranch.updatedAt - delete myBranch.commit_id + delete myBranch.commitId delete myBranch.commits expect( myBranch ).to.deep.equal( branch ) @@ -136,7 +136,7 @@ describe( 'Tags & Branches', ( ) => { it( 'Should get a tag', async ( ) => { let myTag = await getTagById( tag.id ) - delete myTag.created_at + delete myTag.createdAt delete myTag.updatedAt expect( myTag ).to.deep.equal( tag ) } ) @@ -158,8 +158,8 @@ describe( 'Tags & Branches', ( ) => { } ) it( 'Should get all stream tags', async ( ) => { - await createTag( { name: 'v3.0.0', commit_id: commit2.hash }, stream.id, user.id ) - await createTag( { name: 'v4.0.0', commit_id: commit1.hash }, stream.id, user.id ) + await createTag( { name: 'v3.0.0', commitId: commit2.hash }, stream.id, user.id ) + await createTag( { name: 'v4.0.0', commitId: commit1.hash }, stream.id, user.id ) let tags = await getTagsByStreamId( stream.id ) expect( tags ).to.have.lengthOf( 4 ) } ) @@ -186,7 +186,7 @@ describe( 'Tags & Branches', ( ) => { commit1.hash = await createCommit( stream.id, user.id, commit1 ) commit2.parents = [ commit1.hash ] commit2.hash = await createCommit( stream.id, user.id, commit2 ) - tag.commit_id = commit2.hash + tag.commitId = commit2.hash } ) after( async ( ) => { diff --git a/modules/core/users/services.js b/modules/core/users/services.js index 9b43723f2..4cccdb5c0 100644 --- a/modules/core/users/services.js +++ b/modules/core/users/services.js @@ -19,7 +19,7 @@ module.exports = { user.id = crs( { length: 10 } ) if ( user.password ) { - user.password_digest = await bcrypt.hash( user.password, 10 ) + user.passwordDigest = await bcrypt.hash( user.password, 10 ) delete user.password } @@ -34,15 +34,15 @@ module.exports = { async updateUser( id, user ) { delete user.id - delete user.password_digest + delete user.passwordDigest delete user.password delete user.email await Users( ).where( { id: id } ).update( user ) }, async validatePasssword( userId, password ) { - var { password_digest } = await Users( ).where( { id: userId } ).select( 'password_digest' ).first( ) - return bcrypt.compare( password, password_digest ) + var { passwordDigest } = await Users( ).where( { id: userId } ).select( 'passwordDigest' ).first( ) + return bcrypt.compare( password, passwordDigest ) }, async deleteUser( id ) { @@ -62,9 +62,9 @@ module.exports = { let tokenString = crs( { length: 32 } ) let tokenHash = await bcrypt.hash( tokenString, 10 ) - let last_chars = tokenString.slice( tokenString.length - 6, tokenString.length ) + let lastChars = tokenString.slice( tokenString.length - 6, tokenString.length ) - let res = await Keys( ).returning( 'id' ).insert( { id: tokenId, token_digest: tokenHash, last_chars: last_chars, owner_id: userId, name: name, scopes: scopes, lifespan: lifespan } ) + let res = await Keys( ).returning( 'id' ).insert( { id: tokenId, tokenDigest: tokenHash, lastChars: lastChars, owner: userId, name: name, scopes: scopes, lifespan: lifespan } ) return tokenId + tokenString }, @@ -79,17 +79,17 @@ module.exports = { return { valid: false } } - const timeDiff = Math.abs( Date.now( ) - new Date( token.created_at ) ) + const timeDiff = Math.abs( Date.now( ) - new Date( token.createdAt ) ) if ( timeDiff > token.lifespan ) { await module.exports.revokeToken( tokenId ) return { valid: false } } - let valid = bcrypt.compare( tokenContent, token.token_digest ) + let valid = bcrypt.compare( tokenContent, token.tokenDigest ) if ( valid ) { - await Keys( ).where( { id: tokenId } ).update( { last_used: knex.fn.now( ) } ) - return { valid: true, userId: token.owner_id, scopes: token.scopes } + await Keys( ).where( { id: tokenId } ).update( { lastUsed: knex.fn.now( ) } ) + return { valid: true, userId: token.owner, scopes: token.scopes } } else return { valid: false } }, @@ -100,6 +100,6 @@ module.exports = { }, async getUserTokens( userId ) { - return Keys( ).where( { owner_id: userId } ).select( 'id', 'name', 'last_chars', 'scopes', 'created_at', 'last_used' ) + return Keys( ).where( { owner: userId } ).select( 'id', 'name', 'lastChars', 'scopes', 'createdAt', 'lastUsed' ) } } \ No newline at end of file diff --git a/modules/shared/index.js b/modules/shared/index.js index cb5700caf..0b02d2ac0 100644 --- a/modules/shared/index.js +++ b/modules/shared/index.js @@ -69,7 +69,7 @@ function authorize( aclTable, resourceTable, requiredRole ) { if ( !req.user ) return res.status( 401 ).send( { error: 'Unauthorized' } ) - let [ entry ] = await ACL( ).where( { resource_id: req.params.resourceId, user_id: req.user.id } ).select( '*' ) + let [ entry ] = await ACL( ).where( { resourceId: req.params.resourceId, userId: req.user.id } ).select( '*' ) if ( !entry ) { return res.status( 401 ).send( { error: 'Unauthorized' } )