Separate functional tests from E2E tests (#762)

This commit is contained in:
Elizabeth Craig 2022-08-22 10:56:45 -07:00 коммит произвёл GitHub
Родитель af71f9cdfe
Коммит f1e376e1b9
Не найден ключ, соответствующий данной подписи
Идентификатор ключа GPG: 4AEE18F83AFDEB23
21 изменённых файлов: 713 добавлений и 547 удалений

2
.github/workflows/pr.yml поставляемый
Просмотреть файл

@ -52,4 +52,6 @@ jobs:
- run: yarn test:unit
- run: yarn test:func
- run: yarn test:e2e

24
.vscode/launch.json поставляемый
Просмотреть файл

@ -8,10 +8,22 @@
"type": "node",
"request": "launch",
"name": "Debug Unit Jest Tests",
"program": "${workspaceRoot}/node_modules/jest/bin/jest.js",
"runtimeExecutable": "npm",
"cwd": "${workspaceFolder}",
"runtimeArgs": ["--nolazy", "--inspect"],
"args": ["--runInBand", "--watch", "--config", "${workspaceRoot}/jest.config.js", "${fileBasenameNoExtension}"],
"runtimeArgs": ["run-script", "test:unit"],
"args": ["--", "--runInBand", "--watch", "${fileBasenameNoExtension}"],
"sourceMaps": true,
"outputCapture": "std",
"console": "integratedTerminal"
},
{
"type": "node",
"request": "launch",
"name": "Debug Functional Jest Tests",
"runtimeExecutable": "npm",
"cwd": "${workspaceFolder}",
"runtimeArgs": ["run-script", "test:func"],
"args": ["--", "--runInBand", "--watch", "${fileBasenameNoExtension}"],
"sourceMaps": true,
"outputCapture": "std",
"console": "integratedTerminal"
@ -20,10 +32,10 @@
"type": "node",
"request": "launch",
"name": "Debug E2E Jest Tests",
"program": "${workspaceRoot}/node_modules/jest/bin/jest.js",
"runtimeExecutable": "npm",
"cwd": "${workspaceFolder}",
"runtimeArgs": ["--nolazy", "--inspect"],
"args": ["--runInBand", "--watch", "--config", "${workspaceRoot}/jest.e2e.js", "${fileBasenameNoExtension}"],
"runtimeArgs": ["run-script", "test:e2e"],
"args": ["--", "--runInBand", "--watch", "${fileBasenameNoExtension}"],
"sourceMaps": true,
"outputCapture": "std",
"console": "integratedTerminal"

Просмотреть файл

@ -10,9 +10,7 @@ module.exports = {
'docs/**',
'jest.*.js',
'renovate.json5',
'src/__e2e__/**',
'src/__fixtures__/**',
'src/__tests__/**',
'src/__*/**',
// This one is especially important (otherwise dependabot would be blocked by change file requirements)
'yarn.lock',
],

Просмотреть файл

@ -4,6 +4,6 @@ module.exports = {
'^.+\\.tsx?$': 'ts-jest',
},
testEnvironment: 'node',
testRegex: '/__tests__/.*\\.(test|spec)\\.ts$',
testTimeout: 60000,
testMatch: ['**/__tests__/**/*.test.ts'],
};

Просмотреть файл

@ -1,4 +1,4 @@
module.exports = {
...require('./jest.config'),
testRegex: '/__e2e__/.*\\.(test|spec)\\.ts$',
testMatch: ['**/__e2e__/**/*.test.ts'],
};

4
jest.func.js Normal file
Просмотреть файл

@ -0,0 +1,4 @@
module.exports = {
...require('./jest.config'),
testMatch: ['**/__functional__/**/*.test.ts'],
};

Просмотреть файл

@ -31,11 +31,12 @@
"release": "node ./lib/cli.js publish -y",
"release:docs": "yarn docs:build && yarn gh-pages -d docs/.vuepress/dist --dotfiles",
"start": "tsc -w --preserveWatchOutput",
"test": "yarn test:unit && yarn test:e2e",
"test": "yarn test:unit && yarn test:func && yarn test:e2e",
"test:e2e": "jest --config jest.e2e.js",
"test:unit": "jest --config jest.config.js",
"test:func": "jest --config jest.func.js",
"test:unit": "jest",
"test:watch": "jest --watch",
"update-snapshots": "yarn test:unit -u && yarn test:e2e -u"
"update-snapshots": "yarn test:unit -u && yarn test:func -u && yarn test:e2e -u"
},
"dependencies": {
"cosmiconfig": "^7.0.0",

1
src/__e2e__/README.md Normal file
Просмотреть файл

@ -0,0 +1 @@
These tests are true E2E tests covering major beachball scenarios. Tests that cover specific helper functions but need filesystem-based fixtures should go in `__functional__` instead.

Просмотреть файл

@ -1,378 +0,0 @@
import _ from 'lodash';
import { generateChangeFiles } from '../__fixtures__/changeFiles';
import { cleanChangelogJson, readChangelogJson, readChangelogMd } from '../__fixtures__/changelog';
import { initMockLogs } from '../__fixtures__/mockLogs';
import { RepositoryFactory } from '../__fixtures__/repositoryFactory';
import { writeChangelog } from '../changelog/writeChangelog';
import { getPackageInfos } from '../monorepo/getPackageInfos';
import { readChangeFiles } from '../changefile/readChangeFiles';
import { BeachballOptions } from '../types/BeachballOptions';
import { ChangeFileInfo, ChangeInfo } from '../types/ChangeInfo';
function getChange(packageName: string, comment: string): ChangeFileInfo {
return {
comment,
email: 'test@testtestme.com',
packageName,
type: 'patch',
dependentChangeType: 'patch',
};
}
describe('changelog generation', () => {
let repositoryFactory: RepositoryFactory;
let monoRepoFactory: RepositoryFactory;
const logs = initMockLogs();
beforeAll(() => {
// These tests can share the same repo factories because they don't push to origin
// (the actual tests run against a clone)
repositoryFactory = new RepositoryFactory('single');
monoRepoFactory = new RepositoryFactory('monorepo');
});
afterAll(() => {
repositoryFactory.cleanUp();
monoRepoFactory.cleanUp();
});
describe('readChangeFiles', () => {
it('does not add commit hash', () => {
const repository = repositoryFactory.cloneRepository();
repository.commitChange('foo');
generateChangeFiles(['foo'], repository.rootPath);
const packageInfos = getPackageInfos(repository.rootPath);
const changeSet = readChangeFiles({ path: repository.rootPath } as BeachballOptions, packageInfos);
expect(changeSet).toHaveLength(1);
expect(changeSet[0].change.commit).toBe(undefined);
});
it('excludes invalid change files', () => {
const monoRepo = monoRepoFactory.cloneRepository();
monoRepo.updateJsonFile('packages/bar/package.json', { private: true });
// fake doesn't exist, bar is private, foo is okay
generateChangeFiles(['fake', 'bar', 'foo'], monoRepo.rootPath);
const packageInfos = getPackageInfos(monoRepo.rootPath);
const changeSet = readChangeFiles({ path: monoRepo.rootPath } as BeachballOptions, packageInfos);
expect(changeSet).toHaveLength(1);
expect(logs.mocks.warn).toHaveBeenCalledWith(expect.stringContaining('Change detected for private package bar'));
expect(logs.mocks.warn).toHaveBeenCalledWith(
expect.stringContaining('Change detected for nonexistent package fake')
);
});
it('excludes invalid changes from grouped change file', () => {
const monoRepo = monoRepoFactory.cloneRepository();
monoRepo.updateJsonFile('packages/bar/package.json', { private: true });
// fake doesn't exist, bar is private, foo is okay
generateChangeFiles(['fake', 'bar', 'foo'], monoRepo.rootPath, true /*groupChanges*/);
const packageInfos = getPackageInfos(monoRepo.rootPath);
const changeSet = readChangeFiles(
{ path: monoRepo.rootPath, groupChanges: true } as BeachballOptions,
packageInfos
);
expect(changeSet).toHaveLength(1);
expect(logs.mocks.warn).toHaveBeenCalledWith(expect.stringContaining('Change detected for private package bar'));
expect(logs.mocks.warn).toHaveBeenCalledWith(
expect.stringContaining('Change detected for nonexistent package fake')
);
});
it('excludes out of scope change files', () => {
const monoRepo = monoRepoFactory.cloneRepository();
generateChangeFiles(['bar', 'foo'], monoRepo.rootPath);
const packageInfos = getPackageInfos(monoRepo.rootPath);
const changeSet = readChangeFiles(
{ path: monoRepo.rootPath, scope: ['packages/foo'] } as BeachballOptions,
packageInfos
);
expect(changeSet).toHaveLength(1);
expect(logs.mocks.warn).not.toHaveBeenCalled();
});
it('excludes out of scope changes from grouped change file', () => {
const monoRepo = monoRepoFactory.cloneRepository();
generateChangeFiles(['bar', 'foo'], monoRepo.rootPath, true /*groupChanges*/);
const packageInfos = getPackageInfos(monoRepo.rootPath);
const changeSet = readChangeFiles(
{ path: monoRepo.rootPath, scope: ['packages/foo'], groupChanges: true } as BeachballOptions,
packageInfos
);
expect(changeSet).toHaveLength(1);
expect(logs.mocks.warn).not.toHaveBeenCalled();
});
});
describe('writeChangelog', () => {
it('generates correct changelog', async () => {
const repository = repositoryFactory.cloneRepository();
repository.commitChange('foo');
generateChangeFiles([getChange('foo', 'additional comment 2')], repository.rootPath);
generateChangeFiles([getChange('foo', 'additional comment 1')], repository.rootPath);
generateChangeFiles([getChange('foo', 'comment 1')], repository.rootPath);
repository.commitChange('bar');
generateChangeFiles([getChange('foo', 'comment 2')], repository.rootPath);
const beachballOptions = { path: repository.rootPath } as BeachballOptions;
const packageInfos = getPackageInfos(repository.rootPath);
const changes = readChangeFiles(beachballOptions, packageInfos);
await writeChangelog(beachballOptions, changes, { foo: 'patch' }, { foo: new Set(['foo']) }, packageInfos);
expect(readChangelogMd(repository.rootPath)).toMatchSnapshot('changelog md');
const changelogJson = readChangelogJson(repository.rootPath);
expect(cleanChangelogJson(changelogJson)).toMatchSnapshot('changelog json');
// Every entry should have a different commit hash
const patchComments = changelogJson.entries[0].comments.patch!;
const commits = patchComments.map(entry => entry.commit);
expect(new Set(commits).size).toEqual(patchComments.length);
// The first entry should be the newest
expect(patchComments[0].commit).toBe(repository.getCurrentHash());
});
it('generates correct changelog in monorepo with groupChanges (grouped change FILES)', async () => {
const monoRepo = monoRepoFactory.cloneRepository();
monoRepo.commitChange('foo');
const params = [monoRepo.rootPath, true /*groupChanges*/] as const;
generateChangeFiles(
[getChange('foo', 'additional comment 2'), getChange('bar', 'comment from bar change ')],
...params
);
generateChangeFiles([getChange('foo', 'additional comment 1')], ...params);
generateChangeFiles([getChange('foo', 'comment 1')], ...params);
monoRepo.commitChange('bar');
generateChangeFiles([getChange('foo', 'comment 2')], ...params);
const beachballOptions = { path: monoRepo.rootPath, groupChanges: true } as BeachballOptions;
const packageInfos = getPackageInfos(monoRepo.rootPath);
const changes = readChangeFiles(beachballOptions, packageInfos);
await writeChangelog(beachballOptions, changes, { foo: 'patch', bar: 'patch' }, {}, packageInfos);
// check changelogs for both foo and bar
expect(readChangelogMd(monoRepo.pathTo('packages/foo'))).toMatchSnapshot('foo CHANGELOG.md');
expect(readChangelogMd(monoRepo.pathTo('packages/bar'))).toMatchSnapshot('bar CHANGELOG.md');
const fooJson = readChangelogJson(monoRepo.pathTo('packages/foo'));
expect(cleanChangelogJson(fooJson)).toMatchSnapshot('foo CHANGELOG.json');
expect(readChangelogJson(monoRepo.pathTo('packages/bar'), true /*clean*/)).toMatchSnapshot('bar CHANGELOG.json');
// Every entry should have a different commit hash
const patchComments = fooJson.entries[0].comments.patch!;
const commits = patchComments.map(entry => entry.commit);
expect(new Set(commits).size).toEqual(patchComments.length);
// The first entry should be the newest
expect(patchComments[0].commit).toBe(monoRepo.getCurrentHash());
});
it('generates correct grouped changelog', async () => {
const monoRepo = monoRepoFactory.cloneRepository();
monoRepo.commitChange('foo');
generateChangeFiles([getChange('foo', 'comment 1')], monoRepo.rootPath);
monoRepo.commitChange('bar');
generateChangeFiles([getChange('bar', 'comment 2')], monoRepo.rootPath);
generateChangeFiles([getChange('bar', 'comment 3')], monoRepo.rootPath);
const beachballOptions: Partial<BeachballOptions> = {
path: monoRepo.rootPath,
changelog: {
groups: [
{
masterPackageName: 'foo',
changelogPath: monoRepo.rootPath,
include: ['packages/foo', 'packages/bar'],
},
],
},
};
const packageInfos = getPackageInfos(monoRepo.rootPath);
const changes = readChangeFiles(beachballOptions as BeachballOptions, packageInfos);
await writeChangelog(beachballOptions as BeachballOptions, changes, {}, {}, packageInfos);
// Validate changelog for foo and bar packages
expect(readChangelogMd(monoRepo.pathTo('packages/foo'))).toMatchSnapshot('foo CHANGELOG.md');
expect(readChangelogMd(monoRepo.pathTo('packages/bar'))).toMatchSnapshot('bar CHANGELOG.md');
// Validate grouped changelog for foo and bar packages
expect(readChangelogMd(monoRepo.rootPath)).toMatchSnapshot('grouped CHANGELOG.md');
});
it('generates grouped changelog without dependent change entries', async () => {
const monoRepo = monoRepoFactory.cloneRepository();
monoRepo.commitChange('baz');
generateChangeFiles([getChange('baz', 'comment 1')], monoRepo.rootPath);
const beachballOptions: Partial<BeachballOptions> = {
path: monoRepo.rootPath,
changelog: {
groups: [
{
masterPackageName: 'foo',
changelogPath: monoRepo.rootPath,
include: ['packages/foo', 'packages/bar', 'packages/baz'],
},
],
},
};
const packageInfos = getPackageInfos(monoRepo.rootPath);
const changes = readChangeFiles(beachballOptions as BeachballOptions, packageInfos);
await writeChangelog(
beachballOptions as BeachballOptions,
changes,
{ bar: 'patch', baz: 'patch' },
{ bar: new Set(['baz']) },
packageInfos
);
// Validate changelog for bar package
const barChangelogText = readChangelogMd(monoRepo.pathTo('packages/bar'));
expect(barChangelogText).toContain('- Bump baz');
expect(barChangelogText).toMatchSnapshot('bar CHANGELOG.md');
// Validate changelog for baz package
expect(readChangelogMd(monoRepo.pathTo('packages/baz'))).toMatchSnapshot('baz CHANGELOG.md');
// Validate grouped changelog for foo master package
const groupedChangelogText = readChangelogMd(monoRepo.rootPath);
expect(groupedChangelogText).toContain('- comment 1');
expect(groupedChangelogText).not.toContain('- Bump baz');
expect(groupedChangelogText).toMatchSnapshot('grouped CHANGELOG.md');
});
it('generates grouped changelog without dependent change entries where packages have normal changes and dependency changes', async () => {
const monoRepo = monoRepoFactory.cloneRepository();
monoRepo.commitChange('baz');
generateChangeFiles([getChange('baz', 'comment 1')], monoRepo.rootPath);
generateChangeFiles([getChange('bar', 'comment 1')], monoRepo.rootPath);
const beachballOptions: Partial<BeachballOptions> = {
path: monoRepo.rootPath,
changelog: {
groups: [
{
masterPackageName: 'foo',
changelogPath: monoRepo.rootPath,
include: ['packages/foo', 'packages/bar', 'packages/baz'],
},
],
},
};
const packageInfos = getPackageInfos(monoRepo.rootPath);
const changes = readChangeFiles(beachballOptions as BeachballOptions, packageInfos);
await writeChangelog(
beachballOptions as BeachballOptions,
changes,
{ bar: 'patch', baz: 'patch' },
{ bar: new Set(['baz']) },
packageInfos
);
// Validate changelog for bar and baz packages
expect(readChangelogMd(monoRepo.pathTo('packages/bar'))).toMatchSnapshot('bar CHANGELOG.md');
expect(readChangelogMd(monoRepo.pathTo('packages/baz'))).toMatchSnapshot('baz CHANGELOG.md');
// Validate grouped changelog for foo master package
expect(readChangelogMd(monoRepo.rootPath)).toMatchSnapshot('grouped CHANGELOG.md');
});
it('generates correct grouped changelog when grouped change log is saved to the same dir as a regular changelog', async () => {
const monoRepo = monoRepoFactory.cloneRepository();
monoRepo.commitChange('foo');
generateChangeFiles([getChange('foo', 'comment 1')], monoRepo.rootPath);
monoRepo.commitChange('bar');
generateChangeFiles([getChange('bar', 'comment 2')], monoRepo.rootPath);
const beachballOptions: Partial<BeachballOptions> = {
path: monoRepo.rootPath,
changelog: {
groups: [
{
masterPackageName: 'foo',
changelogPath: monoRepo.pathTo('packages/foo'),
include: ['packages/foo', 'packages/bar'],
},
],
},
};
const packageInfos = getPackageInfos(monoRepo.rootPath);
const changes = readChangeFiles(beachballOptions as BeachballOptions, packageInfos);
await writeChangelog(beachballOptions as BeachballOptions, changes, {}, {}, packageInfos);
// Validate changelog for bar package
expect(readChangelogMd(monoRepo.pathTo('packages/bar'))).toMatchSnapshot();
// Validate grouped changelog for foo and bar packages
expect(readChangelogMd(monoRepo.pathTo('packages/foo'))).toMatchSnapshot();
});
it('Verify that the changeFile transform functions are run, if provided', async () => {
const editedComment: string = 'Edited comment for testing';
const monoRepo = monoRepoFactory.cloneRepository();
monoRepo.commitChange('foo');
generateChangeFiles([getChange('foo', 'comment 1')], monoRepo.rootPath);
monoRepo.commitChange('bar');
generateChangeFiles([getChange('bar', 'comment 2')], monoRepo.rootPath);
const beachballOptions: Partial<BeachballOptions> = {
path: monoRepo.rootPath,
transform: {
changeFiles: (changeFile, changeFilePath) => {
// For test, we will be changing the comment based on the package name
if ((changeFile as ChangeInfo).packageName === 'foo') {
(changeFile as ChangeInfo).comment = editedComment;
}
return changeFile as ChangeInfo;
},
},
changelog: {
groups: [
{
masterPackageName: 'foo',
changelogPath: monoRepo.pathTo('packages/foo'),
include: ['packages/foo', 'packages/bar'],
},
],
},
};
const packageInfos = getPackageInfos(monoRepo.rootPath);
const changes = readChangeFiles(beachballOptions as BeachballOptions, packageInfos);
// Verify that the comment of only the intended change file is changed
for (const { change, changeFile } of changes) {
if (changeFile.startsWith('foo')) {
expect(change.comment).toBe(editedComment);
} else {
expect(change.comment).toBe('comment 2');
}
}
});
});
});

Просмотреть файл

@ -1,126 +0,0 @@
import fs from 'fs-extra';
import { generateChangeFiles } from '../__fixtures__/changeFiles';
import { defaultRemoteBranchName, defaultRemoteName } from '../__fixtures__/gitDefaults';
import { initMockLogs } from '../__fixtures__/mockLogs';
import { Repository } from '../__fixtures__/repository';
import { RepositoryFactory } from '../__fixtures__/repositoryFactory';
import { isChangeFileNeeded } from '../validation/isChangeFileNeeded';
import { BeachballOptions } from '../types/BeachballOptions';
import { areChangeFilesDeleted } from '../validation/areChangeFilesDeleted';
import { getChangePath } from '../paths';
import { getPackageInfos } from '../monorepo/getPackageInfos';
describe('validation', () => {
let repositoryFactory: RepositoryFactory;
initMockLogs();
beforeAll(() => {
repositoryFactory = new RepositoryFactory('single');
});
afterAll(() => {
repositoryFactory.cleanUp();
});
describe('isChangeFileNeeded', () => {
let repository: Repository;
beforeEach(() => {
repository = repositoryFactory.cloneRepository();
});
it('is false when no changes have been made', () => {
const result = isChangeFileNeeded(
{
branch: defaultRemoteBranchName,
path: repository.rootPath,
fetch: false,
} as BeachballOptions,
getPackageInfos(repository.rootPath)
);
expect(result).toBeFalsy();
});
it('is true when changes exist in a new branch', () => {
repository.checkout('-b', 'feature-0');
repository.commitChange('myFilename');
const result = isChangeFileNeeded(
{
branch: defaultRemoteBranchName,
path: repository.rootPath,
fetch: false,
} as BeachballOptions,
getPackageInfos(repository.rootPath)
);
expect(result).toBeTruthy();
});
it('is false when changes are CHANGELOG files', () => {
repository.checkout('-b', 'feature-0');
repository.commitChange('CHANGELOG.md');
const result = isChangeFileNeeded(
{
branch: defaultRemoteBranchName,
path: repository.rootPath,
fetch: false,
} as BeachballOptions,
getPackageInfos(repository.rootPath)
);
expect(result).toBeFalsy();
});
it('throws if the remote is invalid', () => {
// make a separate clone due to messing with the remote
const repo = repositoryFactory.cloneRepository();
repo.git(['remote', 'set-url', defaultRemoteName, 'file:///__nonexistent']);
repo.checkout('-b', 'feature-0');
repo.commitChange('CHANGELOG.md');
expect(() => {
isChangeFileNeeded(
{
branch: defaultRemoteBranchName,
path: repo.rootPath,
fetch: true,
} as BeachballOptions,
getPackageInfos(repo.rootPath)
);
}).toThrow();
});
});
describe('areChangeFilesDeleted', () => {
let repository: Repository;
beforeEach(() => {
repository = repositoryFactory.cloneRepository();
generateChangeFiles(['pkg-1'], repository.rootPath);
repository.push();
});
it('is false when no change files are deleted', () => {
repository.checkout('-b', 'feature-0');
const result = areChangeFilesDeleted({
branch: defaultRemoteBranchName,
path: repository.rootPath,
} as BeachballOptions);
expect(result).toBeFalsy();
});
it('is true when change files are deleted', () => {
repository.checkout('-b', 'feature-0');
const changeDirPath = getChangePath(repository.rootPath);
fs.removeSync(changeDirPath);
repository.commitAll();
const result = areChangeFilesDeleted({
branch: defaultRemoteBranchName,
path: repository.rootPath,
} as BeachballOptions);
expect(result).toBeTruthy();
});
});
});

Просмотреть файл

@ -0,0 +1,3 @@
Tests in this folder are more like unit tests than true end-to-end tests (usually covering one helper function), but they must run against the actual filesystem and therefore must create temporary files for fixtures.
These tests are run before the E2E tests because a bug in one of these functions would likely cause many E2E tests to fail.

Просмотреть файл

@ -0,0 +1,100 @@
import _ from 'lodash';
import { generateChangeFiles } from '../../__fixtures__/changeFiles';
import { initMockLogs } from '../../__fixtures__/mockLogs';
import { RepositoryFactory } from '../../__fixtures__/repositoryFactory';
import { getPackageInfos } from '../../monorepo/getPackageInfos';
import { readChangeFiles } from '../../changefile/readChangeFiles';
import { BeachballOptions } from '../../types/BeachballOptions';
describe('readChangeFiles', () => {
let repositoryFactory: RepositoryFactory;
let monoRepoFactory: RepositoryFactory;
const logs = initMockLogs();
beforeAll(() => {
// These tests can share the same repo factories because they don't push to origin
// (the actual tests run against a clone)
repositoryFactory = new RepositoryFactory('single');
monoRepoFactory = new RepositoryFactory('monorepo');
});
afterAll(() => {
repositoryFactory.cleanUp();
monoRepoFactory.cleanUp();
});
it('does not add commit hash', () => {
const repository = repositoryFactory.cloneRepository();
repository.commitChange('foo');
generateChangeFiles(['foo'], repository.rootPath);
const packageInfos = getPackageInfos(repository.rootPath);
const changeSet = readChangeFiles({ path: repository.rootPath } as BeachballOptions, packageInfos);
expect(changeSet).toHaveLength(1);
expect(changeSet[0].change.commit).toBe(undefined);
});
it('excludes invalid change files', () => {
const monoRepo = monoRepoFactory.cloneRepository();
monoRepo.updateJsonFile('packages/bar/package.json', { private: true });
// fake doesn't exist, bar is private, foo is okay
generateChangeFiles(['fake', 'bar', 'foo'], monoRepo.rootPath);
const packageInfos = getPackageInfos(monoRepo.rootPath);
const changeSet = readChangeFiles({ path: monoRepo.rootPath } as BeachballOptions, packageInfos);
expect(changeSet).toHaveLength(1);
expect(logs.mocks.warn).toHaveBeenCalledWith(expect.stringContaining('Change detected for private package bar'));
expect(logs.mocks.warn).toHaveBeenCalledWith(
expect.stringContaining('Change detected for nonexistent package fake')
);
});
it('excludes invalid changes from grouped change file', () => {
const monoRepo = monoRepoFactory.cloneRepository();
monoRepo.updateJsonFile('packages/bar/package.json', { private: true });
// fake doesn't exist, bar is private, foo is okay
generateChangeFiles(['fake', 'bar', 'foo'], monoRepo.rootPath, true /*groupChanges*/);
const packageInfos = getPackageInfos(monoRepo.rootPath);
const changeSet = readChangeFiles(
{ path: monoRepo.rootPath, groupChanges: true } as BeachballOptions,
packageInfos
);
expect(changeSet).toHaveLength(1);
expect(logs.mocks.warn).toHaveBeenCalledWith(expect.stringContaining('Change detected for private package bar'));
expect(logs.mocks.warn).toHaveBeenCalledWith(
expect.stringContaining('Change detected for nonexistent package fake')
);
});
it('excludes out of scope change files', () => {
const monoRepo = monoRepoFactory.cloneRepository();
generateChangeFiles(['bar', 'foo'], monoRepo.rootPath);
const packageInfos = getPackageInfos(monoRepo.rootPath);
const changeSet = readChangeFiles(
{ path: monoRepo.rootPath, scope: ['packages/foo'] } as BeachballOptions,
packageInfos
);
expect(changeSet).toHaveLength(1);
expect(logs.mocks.warn).not.toHaveBeenCalled();
});
it('excludes out of scope changes from grouped change file', () => {
const monoRepo = monoRepoFactory.cloneRepository();
generateChangeFiles(['bar', 'foo'], monoRepo.rootPath, true /*groupChanges*/);
const packageInfos = getPackageInfos(monoRepo.rootPath);
const changeSet = readChangeFiles(
{ path: monoRepo.rootPath, scope: ['packages/foo'], groupChanges: true } as BeachballOptions,
packageInfos
);
expect(changeSet).toHaveLength(1);
expect(logs.mocks.warn).not.toHaveBeenCalled();
});
});

Просмотреть файл

@ -0,0 +1,111 @@
import fs from 'fs-extra';
import { initMockLogs } from '../../__fixtures__/mockLogs';
import { RepositoryFactory } from '../../__fixtures__/repositoryFactory';
import { ChangeFileInfo } from '../../types/ChangeInfo';
import { writeChangeFiles } from '../../changefile/writeChangeFiles';
import { getChangeFiles } from '../../__fixtures__/changeFiles';
import { listAllTrackedFiles } from 'workspace-tools';
const uuidRegex = /[\da-f]{8}-[\da-f]{4}-[\da-f]{4}-[\da-f]{4}-[\da-f]{12}/;
const uuidGeneric = '00000000-0000-0000-0000-000000000000';
function cleanChangeFilePaths(root: string, changeFiles: string[]) {
root = root.replace(/\\/g, '/');
return changeFiles.map(changeFile =>
changeFile.replace(/\\/g, '/').replace(root, '').replace(uuidRegex, uuidGeneric).replace(/^\//, '')
);
}
describe('writeChangeFiles', () => {
let repositoryFactory: RepositoryFactory;
let monorepoFactory: RepositoryFactory;
initMockLogs();
beforeAll(() => {
// These tests can share the same repo factories because they don't push to origin
// (the actual tests run against a clone)
repositoryFactory = new RepositoryFactory('single');
monorepoFactory = new RepositoryFactory('monorepo');
});
afterAll(() => {
repositoryFactory.cleanUp();
monorepoFactory.cleanUp();
});
it('writes individual change files', () => {
const repo = monorepoFactory.cloneRepository();
const previousHead = repo.getCurrentHash();
writeChangeFiles({
changes: [{ packageName: 'foo' }, { packageName: 'bar' }] as ChangeFileInfo[],
cwd: repo.rootPath,
});
const expectedFiles = [`change/bar-${uuidGeneric}.json`, `change/foo-${uuidGeneric}.json`];
// change files are created
const changeFiles = getChangeFiles(repo.rootPath);
expect(cleanChangeFilePaths(repo.rootPath, changeFiles)).toEqual(expectedFiles);
// and tracked
const trackedFiles = listAllTrackedFiles(['change/*'], repo.rootPath);
expect(cleanChangeFilePaths(repo.rootPath, trackedFiles)).toEqual(expectedFiles);
// and committed
expect(repo.getCurrentHash()).not.toEqual(previousHead);
// also verify contents of one file
const changeFileContents = fs.readJSONSync(changeFiles[0]);
expect(changeFileContents).toEqual({ packageName: 'bar' });
});
it('respects commitChangeFiles=false', () => {
const repo = monorepoFactory.cloneRepository();
const previousHead = repo.getCurrentHash();
writeChangeFiles({
changes: [{ packageName: 'foo' }, { packageName: 'bar' }] as ChangeFileInfo[],
cwd: repo.rootPath,
commitChangeFiles: false,
});
const expectedFiles = [`change/bar-${uuidGeneric}.json`, `change/foo-${uuidGeneric}.json`];
// change files are created
const changeFiles = getChangeFiles(repo.rootPath);
expect(cleanChangeFilePaths(repo.rootPath, changeFiles)).toEqual(expectedFiles);
// and tracked
const trackedFiles = listAllTrackedFiles(['change/*'], repo.rootPath);
expect(cleanChangeFilePaths(repo.rootPath, trackedFiles)).toEqual(expectedFiles);
// but NOT committed
expect(repo.getCurrentHash()).toEqual(previousHead);
});
it('writes grouped change files', () => {
const repo = monorepoFactory.cloneRepository();
writeChangeFiles({
changes: [{ packageName: 'foo' }, { packageName: 'bar' }] as ChangeFileInfo[],
cwd: repo.rootPath,
groupChanges: true,
});
const expectedFile = [`change/change-${uuidGeneric}.json`];
const changeFiles = getChangeFiles(repo.rootPath);
expect(cleanChangeFilePaths(repo.rootPath, changeFiles)).toEqual(expectedFile);
const trackedFiles = listAllTrackedFiles(['change/*'], repo.rootPath);
expect(cleanChangeFilePaths(repo.rootPath, trackedFiles)).toEqual(expectedFile);
const changeFileContents = fs.readJSONSync(changeFiles[0]);
expect(changeFileContents).toEqual({
changes: [{ packageName: 'foo' }, { packageName: 'bar' }],
});
});
});

Просмотреть файл

@ -1,6 +1,6 @@
// Jest Snapshot v1, https://goo.gl/fbAQLP
exports[`changelog generation writeChangelog generates correct changelog in monorepo with groupChanges (grouped change FILES): bar CHANGELOG.json 1`] = `
exports[`writeChangelog generates correct changelog in monorepo with groupChanges (grouped change FILES): bar CHANGELOG.json 1`] = `
Object {
"entries": Array [
Object {
@ -23,7 +23,7 @@ Object {
}
`;
exports[`changelog generation writeChangelog generates correct changelog in monorepo with groupChanges (grouped change FILES): bar CHANGELOG.md 1`] = `
exports[`writeChangelog generates correct changelog in monorepo with groupChanges (grouped change FILES): bar CHANGELOG.md 1`] = `
"# Change Log - bar
This log was last generated on (date) and should not be manually modified.
@ -40,7 +40,7 @@ This log was last generated on (date) and should not be manually modified.
"
`;
exports[`changelog generation writeChangelog generates correct changelog in monorepo with groupChanges (grouped change FILES): foo CHANGELOG.json 1`] = `
exports[`writeChangelog generates correct changelog in monorepo with groupChanges (grouped change FILES): foo CHANGELOG.json 1`] = `
Object {
"entries": Array [
Object {
@ -81,7 +81,7 @@ Object {
}
`;
exports[`changelog generation writeChangelog generates correct changelog in monorepo with groupChanges (grouped change FILES): foo CHANGELOG.md 1`] = `
exports[`writeChangelog generates correct changelog in monorepo with groupChanges (grouped change FILES): foo CHANGELOG.md 1`] = `
"# Change Log - foo
This log was last generated on (date) and should not be manually modified.
@ -101,7 +101,7 @@ This log was last generated on (date) and should not be manually modified.
"
`;
exports[`changelog generation writeChangelog generates correct changelog: changelog json 1`] = `
exports[`writeChangelog generates correct changelog: changelog json 1`] = `
Object {
"entries": Array [
Object {
@ -142,7 +142,7 @@ Object {
}
`;
exports[`changelog generation writeChangelog generates correct changelog: changelog md 1`] = `
exports[`writeChangelog generates correct changelog: changelog md 1`] = `
"# Change Log - foo
This log was last generated on (date) and should not be manually modified.
@ -162,7 +162,7 @@ This log was last generated on (date) and should not be manually modified.
"
`;
exports[`changelog generation writeChangelog generates correct grouped changelog when grouped change log is saved to the same dir as a regular changelog 1`] = `
exports[`writeChangelog generates correct grouped changelog when grouped change log is saved to the same dir as a regular changelog 1`] = `
"# Change Log - bar
This log was last generated on (date) and should not be manually modified.
@ -179,7 +179,7 @@ This log was last generated on (date) and should not be manually modified.
"
`;
exports[`changelog generation writeChangelog generates correct grouped changelog when grouped change log is saved to the same dir as a regular changelog 2`] = `
exports[`writeChangelog generates correct grouped changelog when grouped change log is saved to the same dir as a regular changelog 2`] = `
"# Change Log - foo
This log was last generated on (date) and should not be manually modified.
@ -199,7 +199,7 @@ This log was last generated on (date) and should not be manually modified.
"
`;
exports[`changelog generation writeChangelog generates correct grouped changelog: bar CHANGELOG.md 1`] = `
exports[`writeChangelog generates correct grouped changelog: bar CHANGELOG.md 1`] = `
"# Change Log - bar
This log was last generated on (date) and should not be manually modified.
@ -217,7 +217,7 @@ This log was last generated on (date) and should not be manually modified.
"
`;
exports[`changelog generation writeChangelog generates correct grouped changelog: foo CHANGELOG.md 1`] = `
exports[`writeChangelog generates correct grouped changelog: foo CHANGELOG.md 1`] = `
"# Change Log - foo
This log was last generated on (date) and should not be manually modified.
@ -234,7 +234,7 @@ This log was last generated on (date) and should not be manually modified.
"
`;
exports[`changelog generation writeChangelog generates correct grouped changelog: grouped CHANGELOG.md 1`] = `
exports[`writeChangelog generates correct grouped changelog: grouped CHANGELOG.md 1`] = `
"# Change Log - foo
This log was last generated on (date) and should not be manually modified.
@ -255,7 +255,7 @@ This log was last generated on (date) and should not be manually modified.
"
`;
exports[`changelog generation writeChangelog generates grouped changelog without dependent change entries where packages have normal changes and dependency changes: bar CHANGELOG.md 1`] = `
exports[`writeChangelog generates grouped changelog without dependent change entries where packages have normal changes and dependency changes: bar CHANGELOG.md 1`] = `
"# Change Log - bar
This log was last generated on (date) and should not be manually modified.
@ -273,7 +273,7 @@ This log was last generated on (date) and should not be manually modified.
"
`;
exports[`changelog generation writeChangelog generates grouped changelog without dependent change entries where packages have normal changes and dependency changes: baz CHANGELOG.md 1`] = `
exports[`writeChangelog generates grouped changelog without dependent change entries where packages have normal changes and dependency changes: baz CHANGELOG.md 1`] = `
"# Change Log - baz
This log was last generated on (date) and should not be manually modified.
@ -290,7 +290,7 @@ This log was last generated on (date) and should not be manually modified.
"
`;
exports[`changelog generation writeChangelog generates grouped changelog without dependent change entries where packages have normal changes and dependency changes: grouped CHANGELOG.md 1`] = `
exports[`writeChangelog generates grouped changelog without dependent change entries where packages have normal changes and dependency changes: grouped CHANGELOG.md 1`] = `
"# Change Log - foo
This log was last generated on (date) and should not be manually modified.
@ -310,7 +310,7 @@ This log was last generated on (date) and should not be manually modified.
"
`;
exports[`changelog generation writeChangelog generates grouped changelog without dependent change entries: bar CHANGELOG.md 1`] = `
exports[`writeChangelog generates grouped changelog without dependent change entries: bar CHANGELOG.md 1`] = `
"# Change Log - bar
This log was last generated on (date) and should not be manually modified.
@ -327,7 +327,7 @@ This log was last generated on (date) and should not be manually modified.
"
`;
exports[`changelog generation writeChangelog generates grouped changelog without dependent change entries: baz CHANGELOG.md 1`] = `
exports[`writeChangelog generates grouped changelog without dependent change entries: baz CHANGELOG.md 1`] = `
"# Change Log - baz
This log was last generated on (date) and should not be manually modified.
@ -344,7 +344,7 @@ This log was last generated on (date) and should not be manually modified.
"
`;
exports[`changelog generation writeChangelog generates grouped changelog without dependent change entries: grouped CHANGELOG.md 1`] = `
exports[`writeChangelog generates grouped changelog without dependent change entries: grouped CHANGELOG.md 1`] = `
"# Change Log - foo
This log was last generated on (date) and should not be manually modified.

Просмотреть файл

@ -0,0 +1,300 @@
import { generateChangeFiles } from '../../__fixtures__/changeFiles';
import { cleanChangelogJson, readChangelogJson, readChangelogMd } from '../../__fixtures__/changelog';
import { initMockLogs } from '../../__fixtures__/mockLogs';
import { RepositoryFactory } from '../../__fixtures__/repositoryFactory';
import { writeChangelog } from '../../changelog/writeChangelog';
import { getPackageInfos } from '../../monorepo/getPackageInfos';
import { readChangeFiles } from '../../changefile/readChangeFiles';
import { BeachballOptions } from '../../types/BeachballOptions';
import { ChangeFileInfo, ChangeInfo } from '../../types/ChangeInfo';
function getChange(packageName: string, comment: string): ChangeFileInfo {
return {
comment,
email: 'test@testtestme.com',
packageName,
type: 'patch',
dependentChangeType: 'patch',
};
}
describe('writeChangelog', () => {
let repositoryFactory: RepositoryFactory;
let monoRepoFactory: RepositoryFactory;
initMockLogs();
beforeAll(() => {
// These tests can share the same repo factories because they don't push to origin
// (the actual tests run against a clone)
repositoryFactory = new RepositoryFactory('single');
monoRepoFactory = new RepositoryFactory('monorepo');
});
afterAll(() => {
repositoryFactory.cleanUp();
monoRepoFactory.cleanUp();
});
it('generates correct changelog', async () => {
const repository = repositoryFactory.cloneRepository();
repository.commitChange('foo');
generateChangeFiles([getChange('foo', 'additional comment 2')], repository.rootPath);
generateChangeFiles([getChange('foo', 'additional comment 1')], repository.rootPath);
generateChangeFiles([getChange('foo', 'comment 1')], repository.rootPath);
repository.commitChange('bar');
generateChangeFiles([getChange('foo', 'comment 2')], repository.rootPath);
const beachballOptions = { path: repository.rootPath } as BeachballOptions;
const packageInfos = getPackageInfos(repository.rootPath);
const changes = readChangeFiles(beachballOptions, packageInfos);
await writeChangelog(beachballOptions, changes, { foo: 'patch' }, { foo: new Set(['foo']) }, packageInfos);
expect(readChangelogMd(repository.rootPath)).toMatchSnapshot('changelog md');
const changelogJson = readChangelogJson(repository.rootPath);
expect(cleanChangelogJson(changelogJson)).toMatchSnapshot('changelog json');
// Every entry should have a different commit hash
const patchComments = changelogJson.entries[0].comments.patch!;
const commits = patchComments.map(entry => entry.commit);
expect(new Set(commits).size).toEqual(patchComments.length);
// The first entry should be the newest
expect(patchComments[0].commit).toBe(repository.getCurrentHash());
});
it('generates correct changelog in monorepo with groupChanges (grouped change FILES)', async () => {
const monoRepo = monoRepoFactory.cloneRepository();
monoRepo.commitChange('foo');
const params = [monoRepo.rootPath, true /*groupChanges*/] as const;
generateChangeFiles(
[getChange('foo', 'additional comment 2'), getChange('bar', 'comment from bar change ')],
...params
);
generateChangeFiles([getChange('foo', 'additional comment 1')], ...params);
generateChangeFiles([getChange('foo', 'comment 1')], ...params);
monoRepo.commitChange('bar');
generateChangeFiles([getChange('foo', 'comment 2')], ...params);
const beachballOptions = { path: monoRepo.rootPath, groupChanges: true } as BeachballOptions;
const packageInfos = getPackageInfos(monoRepo.rootPath);
const changes = readChangeFiles(beachballOptions, packageInfos);
await writeChangelog(beachballOptions, changes, { foo: 'patch', bar: 'patch' }, {}, packageInfos);
// check changelogs for both foo and bar
expect(readChangelogMd(monoRepo.pathTo('packages/foo'))).toMatchSnapshot('foo CHANGELOG.md');
expect(readChangelogMd(monoRepo.pathTo('packages/bar'))).toMatchSnapshot('bar CHANGELOG.md');
const fooJson = readChangelogJson(monoRepo.pathTo('packages/foo'));
expect(cleanChangelogJson(fooJson)).toMatchSnapshot('foo CHANGELOG.json');
expect(readChangelogJson(monoRepo.pathTo('packages/bar'), true /*clean*/)).toMatchSnapshot('bar CHANGELOG.json');
// Every entry should have a different commit hash
const patchComments = fooJson.entries[0].comments.patch!;
const commits = patchComments.map(entry => entry.commit);
expect(new Set(commits).size).toEqual(patchComments.length);
// The first entry should be the newest
expect(patchComments[0].commit).toBe(monoRepo.getCurrentHash());
});
it('generates correct grouped changelog', async () => {
const monoRepo = monoRepoFactory.cloneRepository();
monoRepo.commitChange('foo');
generateChangeFiles([getChange('foo', 'comment 1')], monoRepo.rootPath);
monoRepo.commitChange('bar');
generateChangeFiles([getChange('bar', 'comment 2')], monoRepo.rootPath);
generateChangeFiles([getChange('bar', 'comment 3')], monoRepo.rootPath);
const beachballOptions: Partial<BeachballOptions> = {
path: monoRepo.rootPath,
changelog: {
groups: [
{
masterPackageName: 'foo',
changelogPath: monoRepo.rootPath,
include: ['packages/foo', 'packages/bar'],
},
],
},
};
const packageInfos = getPackageInfos(monoRepo.rootPath);
const changes = readChangeFiles(beachballOptions as BeachballOptions, packageInfos);
await writeChangelog(beachballOptions as BeachballOptions, changes, {}, {}, packageInfos);
// Validate changelog for foo and bar packages
expect(readChangelogMd(monoRepo.pathTo('packages/foo'))).toMatchSnapshot('foo CHANGELOG.md');
expect(readChangelogMd(monoRepo.pathTo('packages/bar'))).toMatchSnapshot('bar CHANGELOG.md');
// Validate grouped changelog for foo and bar packages
expect(readChangelogMd(monoRepo.rootPath)).toMatchSnapshot('grouped CHANGELOG.md');
});
it('generates grouped changelog without dependent change entries', async () => {
const monoRepo = monoRepoFactory.cloneRepository();
monoRepo.commitChange('baz');
generateChangeFiles([getChange('baz', 'comment 1')], monoRepo.rootPath);
const beachballOptions: Partial<BeachballOptions> = {
path: monoRepo.rootPath,
changelog: {
groups: [
{
masterPackageName: 'foo',
changelogPath: monoRepo.rootPath,
include: ['packages/foo', 'packages/bar', 'packages/baz'],
},
],
},
};
const packageInfos = getPackageInfos(monoRepo.rootPath);
const changes = readChangeFiles(beachballOptions as BeachballOptions, packageInfos);
await writeChangelog(
beachballOptions as BeachballOptions,
changes,
{ bar: 'patch', baz: 'patch' },
{ bar: new Set(['baz']) },
packageInfos
);
// Validate changelog for bar package
const barChangelogText = readChangelogMd(monoRepo.pathTo('packages/bar'));
expect(barChangelogText).toContain('- Bump baz');
expect(barChangelogText).toMatchSnapshot('bar CHANGELOG.md');
// Validate changelog for baz package
expect(readChangelogMd(monoRepo.pathTo('packages/baz'))).toMatchSnapshot('baz CHANGELOG.md');
// Validate grouped changelog for foo master package
const groupedChangelogText = readChangelogMd(monoRepo.rootPath);
expect(groupedChangelogText).toContain('- comment 1');
expect(groupedChangelogText).not.toContain('- Bump baz');
expect(groupedChangelogText).toMatchSnapshot('grouped CHANGELOG.md');
});
it('generates grouped changelog without dependent change entries where packages have normal changes and dependency changes', async () => {
const monoRepo = monoRepoFactory.cloneRepository();
monoRepo.commitChange('baz');
generateChangeFiles([getChange('baz', 'comment 1')], monoRepo.rootPath);
generateChangeFiles([getChange('bar', 'comment 1')], monoRepo.rootPath);
const beachballOptions: Partial<BeachballOptions> = {
path: monoRepo.rootPath,
changelog: {
groups: [
{
masterPackageName: 'foo',
changelogPath: monoRepo.rootPath,
include: ['packages/foo', 'packages/bar', 'packages/baz'],
},
],
},
};
const packageInfos = getPackageInfos(monoRepo.rootPath);
const changes = readChangeFiles(beachballOptions as BeachballOptions, packageInfos);
await writeChangelog(
beachballOptions as BeachballOptions,
changes,
{ bar: 'patch', baz: 'patch' },
{ bar: new Set(['baz']) },
packageInfos
);
// Validate changelog for bar and baz packages
expect(readChangelogMd(monoRepo.pathTo('packages/bar'))).toMatchSnapshot('bar CHANGELOG.md');
expect(readChangelogMd(monoRepo.pathTo('packages/baz'))).toMatchSnapshot('baz CHANGELOG.md');
// Validate grouped changelog for foo master package
expect(readChangelogMd(monoRepo.rootPath)).toMatchSnapshot('grouped CHANGELOG.md');
});
it('generates correct grouped changelog when grouped change log is saved to the same dir as a regular changelog', async () => {
const monoRepo = monoRepoFactory.cloneRepository();
monoRepo.commitChange('foo');
generateChangeFiles([getChange('foo', 'comment 1')], monoRepo.rootPath);
monoRepo.commitChange('bar');
generateChangeFiles([getChange('bar', 'comment 2')], monoRepo.rootPath);
const beachballOptions: Partial<BeachballOptions> = {
path: monoRepo.rootPath,
changelog: {
groups: [
{
masterPackageName: 'foo',
changelogPath: monoRepo.pathTo('packages/foo'),
include: ['packages/foo', 'packages/bar'],
},
],
},
};
const packageInfos = getPackageInfos(monoRepo.rootPath);
const changes = readChangeFiles(beachballOptions as BeachballOptions, packageInfos);
await writeChangelog(beachballOptions as BeachballOptions, changes, {}, {}, packageInfos);
// Validate changelog for bar package
expect(readChangelogMd(monoRepo.pathTo('packages/bar'))).toMatchSnapshot();
// Validate grouped changelog for foo and bar packages
expect(readChangelogMd(monoRepo.pathTo('packages/foo'))).toMatchSnapshot();
});
it('Verify that the changeFile transform functions are run, if provided', async () => {
const editedComment: string = 'Edited comment for testing';
const monoRepo = monoRepoFactory.cloneRepository();
monoRepo.commitChange('foo');
generateChangeFiles([getChange('foo', 'comment 1')], monoRepo.rootPath);
monoRepo.commitChange('bar');
generateChangeFiles([getChange('bar', 'comment 2')], monoRepo.rootPath);
const beachballOptions: Partial<BeachballOptions> = {
path: monoRepo.rootPath,
transform: {
changeFiles: (changeFile, changeFilePath) => {
// For test, we will be changing the comment based on the package name
if ((changeFile as ChangeInfo).packageName === 'foo') {
(changeFile as ChangeInfo).comment = editedComment;
}
return changeFile as ChangeInfo;
},
},
changelog: {
groups: [
{
masterPackageName: 'foo',
changelogPath: monoRepo.pathTo('packages/foo'),
include: ['packages/foo', 'packages/bar'],
},
],
},
};
const packageInfos = getPackageInfos(monoRepo.rootPath);
const changes = readChangeFiles(beachballOptions as BeachballOptions, packageInfos);
// Verify that the comment of only the intended change file is changed
for (const { change, changeFile } of changes) {
if (changeFile.startsWith('foo')) {
expect(change.comment).toBe(editedComment);
} else {
expect(change.comment).toBe('comment 2');
}
}
});
});

Просмотреть файл

@ -1,11 +1,11 @@
import fs from 'fs-extra';
import { Repository } from '../__fixtures__/repository';
import { RepositoryFactory } from '../__fixtures__/repositoryFactory';
import { getOptions } from '../options/getOptions';
import { Repository } from '../../__fixtures__/repository';
import { RepositoryFactory } from '../../__fixtures__/repositoryFactory';
import { getOptions } from '../../options/getOptions';
const baseArgv = ['node.exe', 'bin.js'];
describe('config', () => {
describe('getOptions', () => {
let repositoryFactory: RepositoryFactory;
let repo: Repository;
@ -24,10 +24,10 @@ describe('config', () => {
it('uses the branch name defined in beachball.config.js', () => {
const config = inDirectory(repo.rootPath, () => {
fs.writeFileSync('beachball.config.js', 'module.exports = { branch: "origin/main" };');
fs.writeFileSync('beachball.config.js', 'module.exports = { branch: "origin/foo" };');
return getOptions(baseArgv);
});
expect(config.branch).toEqual('origin/main');
expect(config.branch).toEqual('origin/foo');
});
it('--config overrides configuration path', () => {

Просмотреть файл

@ -1,11 +1,11 @@
import fs from 'fs-extra';
import path from 'path';
import { initMockLogs } from '../__fixtures__/mockLogs';
import { npmShow, NpmShowResult } from '../__fixtures__/npmShow';
import { Registry } from '../__fixtures__/registry';
import { tmpdir } from '../__fixtures__/tmpdir';
import { packagePublish } from '../packageManager/packagePublish';
import { PackageInfo } from '../types/PackageInfo';
import { initMockLogs } from '../../__fixtures__/mockLogs';
import { npmShow, NpmShowResult } from '../../__fixtures__/npmShow';
import { Registry } from '../../__fixtures__/registry';
import { tmpdir } from '../../__fixtures__/tmpdir';
import { packagePublish } from '../../packageManager/packagePublish';
import { PackageInfo } from '../../types/PackageInfo';
const testTag = 'testbeachballtag';
const testName = 'testbeachballpackage';

Просмотреть файл

@ -0,0 +1,54 @@
import fs from 'fs-extra';
import { generateChangeFiles } from '../../__fixtures__/changeFiles';
import { defaultRemoteBranchName } from '../../__fixtures__/gitDefaults';
import { initMockLogs } from '../../__fixtures__/mockLogs';
import { Repository } from '../../__fixtures__/repository';
import { RepositoryFactory } from '../../__fixtures__/repositoryFactory';
import { BeachballOptions } from '../../types/BeachballOptions';
import { areChangeFilesDeleted } from '../../validation/areChangeFilesDeleted';
import { getChangePath } from '../../paths';
describe('areChangeFilesDeleted', () => {
let repositoryFactory: RepositoryFactory;
let repository: Repository;
initMockLogs();
beforeAll(() => {
repositoryFactory = new RepositoryFactory('single');
});
beforeEach(() => {
repository = repositoryFactory.cloneRepository();
generateChangeFiles(['pkg-1'], repository.rootPath);
repository.push();
});
afterAll(() => {
repositoryFactory.cleanUp();
});
it('is false when no change files are deleted', () => {
repository.checkout('-b', 'feature-0');
const result = areChangeFilesDeleted({
branch: defaultRemoteBranchName,
path: repository.rootPath,
} as BeachballOptions);
expect(result).toBeFalsy();
});
it('is true when change files are deleted', () => {
repository.checkout('-b', 'feature-0');
const changeDirPath = getChangePath(repository.rootPath);
fs.removeSync(changeDirPath);
repository.commitAll();
const result = areChangeFilesDeleted({
branch: defaultRemoteBranchName,
path: repository.rootPath,
} as BeachballOptions);
expect(result).toBeTruthy();
});
});

Просмотреть файл

@ -0,0 +1,84 @@
import { defaultRemoteBranchName, defaultRemoteName } from '../../__fixtures__/gitDefaults';
import { initMockLogs } from '../../__fixtures__/mockLogs';
import { Repository } from '../../__fixtures__/repository';
import { RepositoryFactory } from '../../__fixtures__/repositoryFactory';
import { isChangeFileNeeded } from '../../validation/isChangeFileNeeded';
import { BeachballOptions } from '../../types/BeachballOptions';
import { getPackageInfos } from '../../monorepo/getPackageInfos';
describe('isChangeFileNeeded', () => {
let repositoryFactory: RepositoryFactory;
let repository: Repository;
initMockLogs();
beforeAll(() => {
repositoryFactory = new RepositoryFactory('single');
});
beforeEach(() => {
repository = repositoryFactory.cloneRepository();
});
afterAll(() => {
repositoryFactory.cleanUp();
});
it('is false when no changes have been made', () => {
const result = isChangeFileNeeded(
{
branch: defaultRemoteBranchName,
path: repository.rootPath,
fetch: false,
} as BeachballOptions,
getPackageInfos(repository.rootPath)
);
expect(result).toBeFalsy();
});
it('is true when changes exist in a new branch', () => {
repository.checkout('-b', 'feature-0');
repository.commitChange('myFilename');
const result = isChangeFileNeeded(
{
branch: defaultRemoteBranchName,
path: repository.rootPath,
fetch: false,
} as BeachballOptions,
getPackageInfos(repository.rootPath)
);
expect(result).toBeTruthy();
});
it('is false when changes are CHANGELOG files', () => {
repository.checkout('-b', 'feature-0');
repository.commitChange('CHANGELOG.md');
const result = isChangeFileNeeded(
{
branch: defaultRemoteBranchName,
path: repository.rootPath,
fetch: false,
} as BeachballOptions,
getPackageInfos(repository.rootPath)
);
expect(result).toBeFalsy();
});
it('throws if the remote is invalid', () => {
// make a separate clone due to messing with the remote
const repo = repositoryFactory.cloneRepository();
repo.git(['remote', 'set-url', defaultRemoteName, 'file:///__nonexistent']);
repo.checkout('-b', 'feature-0');
repo.commitChange('CHANGELOG.md');
expect(() => {
isChangeFileNeeded(
{
branch: defaultRemoteBranchName,
path: repo.rootPath,
fetch: true,
} as BeachballOptions,
getPackageInfos(repo.rootPath)
);
}).toThrow();
});
});