зеркало из https://github.com/microsoft/beachball.git
Add concurrency flag (#998)
* Add concurrency flag * Change files * Adding bunch of tests * Adding bunch of tests * Fixing tests * Unify dependency graph generation * Adding E2E tests * switch to using getPackageDependencies * Bump workspace-tools version --------- Co-authored-by: Nemanja Tesic <nemanjatesic@microsoft.com>
This commit is contained in:
Родитель
2b5cea800f
Коммит
d97e089805
|
@ -0,0 +1,7 @@
|
|||
{
|
||||
"type": "minor",
|
||||
"comment": "Add concurrency flag",
|
||||
"packageName": "beachball",
|
||||
"email": "nemanjatesic@microsoft.com",
|
||||
"dependentChangeType": "patch"
|
||||
}
|
|
@ -50,8 +50,9 @@
|
|||
"prompts": "^2.4.2",
|
||||
"semver": "^7.0.0",
|
||||
"toposort": "^2.0.2",
|
||||
"p-graph": "^1.1.2",
|
||||
"uuid": "^9.0.0",
|
||||
"workspace-tools": "^0.36.3",
|
||||
"workspace-tools": "^0.38.0",
|
||||
"yargs-parser": "^21.0.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
|
|
|
@ -7,11 +7,12 @@ import { defaultBranchName, defaultRemoteBranchName } from '../__fixtures__/gitD
|
|||
import { initMockLogs } from '../__fixtures__/mockLogs';
|
||||
import { npmShow } from '../__fixtures__/npmShow';
|
||||
import { Repository } from '../__fixtures__/repository';
|
||||
import { RepositoryFactory } from '../__fixtures__/repositoryFactory';
|
||||
import { PackageJsonFixture, RepositoryFactory } from '../__fixtures__/repositoryFactory';
|
||||
import { publish } from '../commands/publish';
|
||||
import { getDefaultOptions } from '../options/getDefaultOptions';
|
||||
import { BeachballOptions } from '../types/BeachballOptions';
|
||||
import { initNpmMock } from '../__fixtures__/mockNpm';
|
||||
import { _mockNpmPublish, initNpmMock } from '../__fixtures__/mockNpm';
|
||||
import os from 'os';
|
||||
|
||||
// Spawning actual npm to run commands against a fake registry is extremely slow, so mock it for
|
||||
// this test (packagePublish covers the more complete npm registry scenario).
|
||||
|
@ -21,7 +22,8 @@ import { initNpmMock } from '../__fixtures__/mockNpm';
|
|||
jest.mock('../packageManager/npm');
|
||||
|
||||
describe('publish command (e2e)', () => {
|
||||
initNpmMock();
|
||||
const concurrencyValues = [[1],[os.cpus().length]];
|
||||
const npmMock = initNpmMock();
|
||||
|
||||
let repositoryFactory: RepositoryFactory | undefined;
|
||||
let repo: Repository | undefined;
|
||||
|
@ -55,11 +57,11 @@ describe('publish command (e2e)', () => {
|
|||
repo = undefined;
|
||||
});
|
||||
|
||||
it('can perform a successful npm publish', async () => {
|
||||
it.each(concurrencyValues)('can perform a successful npm publish, concurrency: %s', async (concurrency: number) => {
|
||||
repositoryFactory = new RepositoryFactory('single');
|
||||
repo = repositoryFactory.cloneRepository();
|
||||
|
||||
const options = getOptions();
|
||||
const options = getOptions({ concurrency: concurrency });
|
||||
|
||||
generateChangeFiles(['foo'], options);
|
||||
repo.push();
|
||||
|
@ -81,7 +83,9 @@ describe('publish command (e2e)', () => {
|
|||
repositoryFactory = new RepositoryFactory('single');
|
||||
repo = repositoryFactory.cloneRepository();
|
||||
|
||||
const options = getOptions({ push: false });
|
||||
const options = getOptions({
|
||||
push: false,
|
||||
});
|
||||
|
||||
generateChangeFiles(['foo'], options);
|
||||
repo.push();
|
||||
|
@ -97,11 +101,11 @@ describe('publish command (e2e)', () => {
|
|||
});
|
||||
});
|
||||
|
||||
it('can perform a successful npm publish from a race condition', async () => {
|
||||
it.each(concurrencyValues)('can perform a successful npm publish from a race condition, concurrency: %s', async (concurrency: number) => {
|
||||
repositoryFactory = new RepositoryFactory('single');
|
||||
repo = repositoryFactory.cloneRepository();
|
||||
|
||||
const options = getOptions();
|
||||
const options = getOptions({ concurrency: concurrency });
|
||||
|
||||
generateChangeFiles(['foo'], options);
|
||||
repo.push();
|
||||
|
@ -138,11 +142,11 @@ describe('publish command (e2e)', () => {
|
|||
expect(fetchCount).toBe(2);
|
||||
});
|
||||
|
||||
it('can perform a successful npm publish from a race condition in the dependencies', async () => {
|
||||
it.each(concurrencyValues)('can perform a successful npm publish from a race condition in the dependencies, concurrency: %s', async (concurrency: number) => {
|
||||
repositoryFactory = new RepositoryFactory('single');
|
||||
repo = repositoryFactory.cloneRepository();
|
||||
|
||||
const options = getOptions();
|
||||
const options = getOptions({ concurrency: concurrency });
|
||||
|
||||
generateChangeFiles(['foo'], options);
|
||||
repo.push();
|
||||
|
@ -281,7 +285,9 @@ describe('publish command (e2e)', () => {
|
|||
});
|
||||
repo = repositoryFactory.cloneRepository();
|
||||
|
||||
const options = getOptions({ new: true });
|
||||
const options = getOptions({
|
||||
new: true,
|
||||
});
|
||||
|
||||
generateChangeFiles(['foo'], options);
|
||||
repo.push();
|
||||
|
@ -296,11 +302,14 @@ describe('publish command (e2e)', () => {
|
|||
expect(repo.getCurrentTags()).toEqual(['bar_v1.3.4', 'foo_v1.1.0']);
|
||||
});
|
||||
|
||||
it('should not perform npm publish on out-of-scope package', async () => {
|
||||
it.each(concurrencyValues)('should not perform npm publish on out-of-scope package, concurrency: %s', async (concurrency: number) => {
|
||||
repositoryFactory = new RepositoryFactory('monorepo');
|
||||
repo = repositoryFactory.cloneRepository();
|
||||
|
||||
const options = getOptions({ scope: ['!packages/foo'] });
|
||||
const options = getOptions({
|
||||
scope: ['!packages/foo'],
|
||||
concurrency: concurrency,
|
||||
});
|
||||
|
||||
generateChangeFiles(['foo'], options);
|
||||
generateChangeFiles(['bar'], options);
|
||||
|
@ -393,7 +402,9 @@ describe('publish command (e2e)', () => {
|
|||
repositoryFactory = new RepositoryFactory('single');
|
||||
repo = repositoryFactory.cloneRepository();
|
||||
|
||||
const options = getOptions({ fetch: false });
|
||||
const options = getOptions({
|
||||
fetch: false,
|
||||
});
|
||||
|
||||
generateChangeFiles(['foo'], options);
|
||||
repo.push();
|
||||
|
@ -422,7 +433,9 @@ describe('publish command (e2e)', () => {
|
|||
repositoryFactory = new RepositoryFactory('single');
|
||||
repo = repositoryFactory.cloneRepository();
|
||||
|
||||
const options = getOptions({ depth: 10 });
|
||||
const options = getOptions({
|
||||
depth: 10,
|
||||
});
|
||||
|
||||
generateChangeFiles(['foo'], options);
|
||||
|
||||
|
@ -474,4 +487,175 @@ describe('publish command (e2e)', () => {
|
|||
const manifestJson = fs.readFileSync(repo.pathTo('foo.txt'));
|
||||
expect(manifestJson.toString()).toMatchInlineSnapshot(`"foo"`);
|
||||
});
|
||||
|
||||
it('publishes multiple packages concurrently respecting the concurrency limit', async () => {
|
||||
const packagesToPublish = ['pkg1', 'pkg2', 'pkg3', 'pkg4', 'pkg5', 'pkg6', 'pkg7', 'pkg8', 'pkg9'];
|
||||
const packages: { [packageName: string]: PackageJsonFixture } = {};
|
||||
for (const name of packagesToPublish) {
|
||||
packages[name] = { version: '1.0.0' };
|
||||
}
|
||||
|
||||
repositoryFactory = new RepositoryFactory({
|
||||
folders: {
|
||||
packages: packages,
|
||||
},
|
||||
});
|
||||
repo = repositoryFactory.cloneRepository();
|
||||
|
||||
const concurrency = 2;
|
||||
const options = getOptions({ concurrency: concurrency });
|
||||
|
||||
generateChangeFiles(packagesToPublish, options);
|
||||
repo.push();
|
||||
|
||||
const simulateWait = (ms: number) => new Promise(resolve => setTimeout(resolve, ms));
|
||||
|
||||
let currentConcurrency = 0;
|
||||
let maxConcurrency = 0;
|
||||
npmMock.setCommandOverride('publish', async (registryData, args, opts) => {
|
||||
currentConcurrency++;
|
||||
await simulateWait(100);
|
||||
const result = await _mockNpmPublish(registryData, args, opts);
|
||||
maxConcurrency = Math.max(maxConcurrency, currentConcurrency);
|
||||
currentConcurrency--;
|
||||
return result;
|
||||
});
|
||||
|
||||
await publish(options);
|
||||
// Verify that at most `concurrency` number of packages were published concurrently
|
||||
expect(maxConcurrency).toBe(concurrency);
|
||||
|
||||
// Verify all packages were published
|
||||
for (const pkg of packagesToPublish) {
|
||||
expect(await npmShow(pkg)).toMatchObject({
|
||||
name: pkg,
|
||||
versions: ['1.1.0'],
|
||||
'dist-tags': { latest: '1.1.0' },
|
||||
});
|
||||
}
|
||||
|
||||
repo.checkout(defaultBranchName);
|
||||
repo.pull();
|
||||
const expectedTags = packagesToPublish.map(pkg => `${pkg}_v1.1.0`);
|
||||
// Verify all tags were updated
|
||||
expect(repo.getCurrentTags().sort()).toEqual(expectedTags.sort());
|
||||
});
|
||||
|
||||
it('handles errors correctly when one of the packages fails during concurrent publishing', async () => {
|
||||
const packageNames = ['pkg1', 'pkg2', 'pkg3', 'pkg4', 'pkg5', 'pkg6', 'pkg7', 'pkg8'];
|
||||
const packages: { [packageName: string]: PackageJsonFixture } = {};
|
||||
const packageToFail = 'pkg4';
|
||||
for (const name of packageNames) {
|
||||
packages[name] = { version: '1.0.0' };
|
||||
}
|
||||
packages['pkg8'].dependencies = { [packageToFail]: '1.0.0' };
|
||||
packages['pkg7'].dependencies = { [packageToFail]: '1.0.0' };
|
||||
|
||||
repositoryFactory = new RepositoryFactory({
|
||||
folders: {
|
||||
packages: packages,
|
||||
},
|
||||
});
|
||||
repo = repositoryFactory.cloneRepository();
|
||||
|
||||
const options = getOptions({ concurrency: 3 });
|
||||
|
||||
generateChangeFiles(packageNames, options);
|
||||
repo.push();
|
||||
|
||||
npmMock.setCommandOverride('publish', async (registryData, args, opts) => {
|
||||
if (opts.cwd?.endsWith(packageToFail)) {
|
||||
return {
|
||||
failed: true,
|
||||
stderr: 'Failed to publish package',
|
||||
stdout: '',
|
||||
success: false,
|
||||
all: 'Failed to publish package',
|
||||
}
|
||||
}
|
||||
return _mockNpmPublish(registryData, args, opts);
|
||||
});
|
||||
|
||||
await expect(publish(options)).rejects.toThrow('Error publishing! Refer to the previous logs for recovery instructions.');
|
||||
|
||||
for (const name of packageNames) {
|
||||
if (['pkg7', 'pkg8', packageToFail].includes(name)) {
|
||||
// Verify that the packages that failed to publish are not published
|
||||
// pkg7 and pkg8 are not published because they depend on pkg4 and pkg4 failed to publish
|
||||
await npmShow(name, { shouldFail: true });
|
||||
} else {
|
||||
// Verify that the packages that did not fail to publish are published
|
||||
expect(await npmShow(name)).toMatchObject({
|
||||
name: name,
|
||||
versions: ['1.1.0'],
|
||||
'dist-tags': { latest: '1.1.0' },
|
||||
});
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
it('should respect postpublish hook respecting the concurrency limit when publishing multiple packages concurrently', async () => {
|
||||
const packagesToPublish = ['pkg1', 'pkg2', 'pkg3', 'pkg4', 'pkg5', 'pkg6', 'pkg7', 'pkg8', 'pkg9'];
|
||||
const packages: { [packageName: string]: PackageJsonFixture } = {};
|
||||
for (const name of packagesToPublish) {
|
||||
packages[name] = {
|
||||
version: '1.0.0',
|
||||
afterPublish: {
|
||||
notify: `message-${name}`,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
repositoryFactory = new RepositoryFactory({
|
||||
folders: {
|
||||
packages: packages,
|
||||
},
|
||||
});
|
||||
repo = repositoryFactory.cloneRepository();
|
||||
|
||||
const simulateWait = (ms: number) => new Promise(resolve => setTimeout(resolve, ms));
|
||||
|
||||
const afterPublishStrings: { packageName: string; notify: string }[] = [];
|
||||
const concurrency = 2;
|
||||
let currentConcurrency = 0;
|
||||
let maxConcurrency = 0;
|
||||
const options = getOptions({
|
||||
hooks: {
|
||||
postpublish: async (packagePath) => {
|
||||
currentConcurrency++;
|
||||
await simulateWait(100);
|
||||
const packageName = path.basename(packagePath);
|
||||
const packageJsonPath = path.join(packagePath, 'package.json');
|
||||
const packageJson = fs.readJSONSync(packageJsonPath);
|
||||
if (packageJson.afterPublish) {
|
||||
afterPublishStrings.push({
|
||||
packageName,
|
||||
notify: packageJson.afterPublish.notify,
|
||||
});
|
||||
}
|
||||
maxConcurrency = Math.max(maxConcurrency, currentConcurrency);
|
||||
currentConcurrency--;
|
||||
},
|
||||
},
|
||||
concurrency: concurrency,
|
||||
});
|
||||
|
||||
generateChangeFiles(packagesToPublish, options);
|
||||
repo.push();
|
||||
|
||||
await publish(options);
|
||||
// Verify that at most `concurrency` number of postpublish hooks were running concurrently
|
||||
expect(maxConcurrency).toBe(concurrency);
|
||||
|
||||
for (const pkg of packagesToPublish) {
|
||||
const packageJson = fs.readJSONSync(repo.pathTo(`packages/${pkg}/package.json`));
|
||||
if (packageJson.afterPublish) {
|
||||
// Verify that all postpublish hooks were called
|
||||
expect(afterPublishStrings).toContainEqual({
|
||||
packageName: pkg,
|
||||
notify: packageJson.afterPublish.notify,
|
||||
});
|
||||
}
|
||||
}
|
||||
});
|
||||
});
|
||||
|
|
|
@ -120,50 +120,50 @@ describe('_mockNpmShow', () => {
|
|||
},
|
||||
});
|
||||
|
||||
it("errors if package doesn't exist", () => {
|
||||
it("errors if package doesn't exist", async () => {
|
||||
const emptyData = _makeRegistryData({});
|
||||
const result = _mockNpmShow(emptyData, ['foo'], { cwd: undefined });
|
||||
const result = await _mockNpmShow(emptyData, ['foo'], { cwd: undefined });
|
||||
expect(result).toEqual(getShowResult({ error: '[fake] code E404 - foo - not found' }));
|
||||
});
|
||||
|
||||
it('returns requested version plus dist-tags and version list', () => {
|
||||
const result = _mockNpmShow(data, ['foo@1.0.0'], { cwd: undefined });
|
||||
it('returns requested version plus dist-tags and version list', async () => {
|
||||
const result = await _mockNpmShow(data, ['foo@1.0.0'], { cwd: undefined });
|
||||
expect(result).toEqual(getShowResult({ data: data, name: 'foo', version: '1.0.0' }));
|
||||
});
|
||||
|
||||
it('returns requested version of scoped package', () => {
|
||||
const result = _mockNpmShow(data, ['@foo/bar@2.0.0'], { cwd: undefined });
|
||||
it('returns requested version of scoped package', async () => {
|
||||
const result = await _mockNpmShow(data, ['@foo/bar@2.0.0'], { cwd: undefined });
|
||||
expect(result).toEqual(getShowResult({ data, name: '@foo/bar', version: '2.0.0' }));
|
||||
});
|
||||
|
||||
it('returns requested tag', () => {
|
||||
const result = _mockNpmShow(data, ['foo@beta'], { cwd: undefined });
|
||||
it('returns requested tag', async () => {
|
||||
const result = await _mockNpmShow(data, ['foo@beta'], { cwd: undefined });
|
||||
expect(result).toEqual(getShowResult({ data, name: 'foo', version: '1.0.0-beta' }));
|
||||
});
|
||||
|
||||
it('returns requested tag of scoped package', () => {
|
||||
const result = _mockNpmShow(data, ['@foo/bar@beta'], { cwd: undefined });
|
||||
it('returns requested tag of scoped package', async () => {
|
||||
const result = await _mockNpmShow(data, ['@foo/bar@beta'], { cwd: undefined });
|
||||
expect(result).toEqual(getShowResult({ data, name: '@foo/bar', version: '2.0.0-beta' }));
|
||||
});
|
||||
|
||||
it('returns latest version if no version requested', () => {
|
||||
const result = _mockNpmShow(data, ['foo'], { cwd: undefined });
|
||||
it('returns latest version if no version requested', async () => {
|
||||
const result = await _mockNpmShow(data, ['foo'], { cwd: undefined });
|
||||
expect(result).toEqual(getShowResult({ data, name: 'foo', version: '1.0.1' }));
|
||||
});
|
||||
|
||||
it('returns latest version of scoped package if no version requested', () => {
|
||||
const result = _mockNpmShow(data, ['@foo/bar'], { cwd: undefined });
|
||||
it('returns latest version of scoped package if no version requested', async () => {
|
||||
const result = await _mockNpmShow(data, ['@foo/bar'], { cwd: undefined });
|
||||
expect(result).toEqual(getShowResult({ data, name: '@foo/bar', version: '2.0.1' }));
|
||||
});
|
||||
|
||||
it("errors if requested version doesn't exist", () => {
|
||||
const result = _mockNpmShow(data, ['foo@2.0.0'], { cwd: undefined });
|
||||
it("errors if requested version doesn't exist", async () => {
|
||||
const result = await _mockNpmShow(data, ['foo@2.0.0'], { cwd: undefined });
|
||||
expect(result).toEqual(getShowResult({ error: '[fake] code E404 - foo@2.0.0 - not found' }));
|
||||
});
|
||||
|
||||
// support for this could be added later
|
||||
it('currently throws if requested version is a range', () => {
|
||||
expect(() => _mockNpmShow(data, ['foo@^1.0.0'], { cwd: undefined })).toThrow(/not currently supported/);
|
||||
it('currently throws if requested version is a range', async () => {
|
||||
await expect(() => _mockNpmShow(data, ['foo@^1.0.0'], { cwd: undefined })).rejects.toThrow(/not currently supported/);
|
||||
});
|
||||
});
|
||||
|
||||
|
@ -198,19 +198,19 @@ describe('_mockNpmPublish', () => {
|
|||
jest.restoreAllMocks();
|
||||
});
|
||||
|
||||
it('throws if cwd is not specified', () => {
|
||||
expect(() => _mockNpmPublish({}, [], { cwd: undefined })).toThrow('cwd is required for mock npm publish');
|
||||
it('throws if cwd is not specified', async () => {
|
||||
await expect(() => _mockNpmPublish({}, [], { cwd: undefined })).rejects.toThrow('cwd is required for mock npm publish');
|
||||
});
|
||||
|
||||
it('errors if reading package.json fails', () => {
|
||||
it('errors if reading package.json fails', async () => {
|
||||
// this error is from the fs.readJsonSync mock, but it's the same code path as if reading the file fails
|
||||
expect(() => _mockNpmPublish({}, [], { cwd: 'fake' })).toThrow('packageJson not set');
|
||||
await expect(() => _mockNpmPublish({}, [], { cwd: 'fake' })).rejects.toThrow('packageJson not set');
|
||||
});
|
||||
|
||||
it('errors on re-publish', () => {
|
||||
it('errors on re-publish', async () => {
|
||||
const data = _makeRegistryData({ foo: { versions: ['1.0.0'] } });
|
||||
packageJson = { name: 'foo', version: '1.0.0', main: 'nope.js' };
|
||||
const result = _mockNpmPublish(data, [], { cwd: 'fake' });
|
||||
const result = await _mockNpmPublish(data, [], { cwd: 'fake' });
|
||||
expect(result).toEqual(
|
||||
getPublishResult({
|
||||
error: '[fake] EPUBLISHCONFLICT foo@1.0.0 already exists in registry',
|
||||
|
@ -221,11 +221,11 @@ describe('_mockNpmPublish', () => {
|
|||
expect(data.foo.versionData['1.0.0'].main).toBeUndefined();
|
||||
});
|
||||
|
||||
it('publishes to empty registry with default tag latest', () => {
|
||||
it('publishes to empty registry with default tag latest', async () => {
|
||||
const data = _makeRegistryData({});
|
||||
packageJson = { name: 'foo', version: '1.0.0', main: 'index.js' };
|
||||
|
||||
const result = _mockNpmPublish(data, [], { cwd: 'fake' });
|
||||
const result = await _mockNpmPublish(data, [], { cwd: 'fake' });
|
||||
expect(result).toEqual(getPublishResult({ tag: 'latest' }));
|
||||
expect(data.foo).toEqual({
|
||||
versions: ['1.0.0'],
|
||||
|
@ -234,13 +234,13 @@ describe('_mockNpmPublish', () => {
|
|||
});
|
||||
});
|
||||
|
||||
it('publishes package and updates latest tag', () => {
|
||||
it('publishes package and updates latest tag', async () => {
|
||||
const data = _makeRegistryData({
|
||||
foo: { versions: ['1.0.0'], 'dist-tags': { latest: '1.0.0' } },
|
||||
});
|
||||
packageJson = { name: 'foo', version: '2.0.0', main: 'index.js' };
|
||||
|
||||
const result = _mockNpmPublish(data, [], { cwd: 'fake' });
|
||||
const result = await _mockNpmPublish(data, [], { cwd: 'fake' });
|
||||
expect(result).toEqual(getPublishResult({ tag: 'latest' }));
|
||||
expect(data.foo).toEqual({
|
||||
versions: ['1.0.0', '2.0.0'],
|
||||
|
@ -253,13 +253,13 @@ describe('_mockNpmPublish', () => {
|
|||
});
|
||||
});
|
||||
|
||||
it('publishes requested tag and does not update latest', () => {
|
||||
it('publishes requested tag and does not update latest', async () => {
|
||||
const data = _makeRegistryData({
|
||||
foo: { versions: ['1.0.0'], 'dist-tags': { latest: '1.0.0', beta: '1.0.0' } },
|
||||
});
|
||||
packageJson = { name: 'foo', version: '2.0.0', main: 'index.js' };
|
||||
|
||||
const result = _mockNpmPublish(data, ['--tag', 'beta'], { cwd: 'fake' });
|
||||
const result = await _mockNpmPublish(data, ['--tag', 'beta'], { cwd: 'fake' });
|
||||
expect(result).toEqual(getPublishResult({ tag: 'beta' }));
|
||||
expect(data.foo).toEqual({
|
||||
versions: ['1.0.0', '2.0.0'],
|
||||
|
|
|
@ -31,7 +31,7 @@ type MockNpmCommand = (
|
|||
registryData: MockNpmRegistry,
|
||||
args: string[],
|
||||
opts: Parameters<typeof npm>[1]
|
||||
) => Pick<NpmResult, 'stdout' | 'stderr' | 'all' | 'success' | 'failed'>;
|
||||
) => Promise<Pick<NpmResult, 'stdout' | 'stderr' | 'all' | 'success' | 'failed'>>;
|
||||
|
||||
export type NpmMock = {
|
||||
/**
|
||||
|
@ -88,7 +88,7 @@ export function initNpmMock(): NpmMock {
|
|||
if (!func) {
|
||||
throw new Error(`Command not supported by mock npm: ${command}`);
|
||||
}
|
||||
return func(registryData, args, opts) as NpmResult;
|
||||
return await func(registryData, args, opts) as NpmResult;
|
||||
});
|
||||
});
|
||||
|
||||
|
@ -142,7 +142,7 @@ export function _makeRegistryData(data: PartialRegistryData): MockNpmRegistry {
|
|||
}
|
||||
|
||||
/** (exported for testing) Mock npm show based on the registry data */
|
||||
export const _mockNpmShow: MockNpmCommand = (registryData, args) => {
|
||||
export const _mockNpmShow: MockNpmCommand = async (registryData, args) => {
|
||||
// Assumption: all beachball callers to "npm show" list the package name last
|
||||
const packageSpec = args.slice(-1)[0];
|
||||
|
||||
|
@ -187,7 +187,7 @@ export const _mockNpmShow: MockNpmCommand = (registryData, args) => {
|
|||
};
|
||||
|
||||
/** (exported for testing) Mock npm publish to the registry data */
|
||||
export const _mockNpmPublish: MockNpmCommand = (registryData, args: string[], opts: Parameters<typeof npm>[1]) => {
|
||||
export const _mockNpmPublish: MockNpmCommand = async (registryData, args: string[], opts: Parameters<typeof npm>[1]) => {
|
||||
if (!opts?.cwd) {
|
||||
throw new Error('cwd is required for mock npm publish');
|
||||
}
|
||||
|
|
|
@ -0,0 +1,154 @@
|
|||
import { describe, expect, it } from '@jest/globals';
|
||||
import { toposortPackages } from '../../publish/toposortPackages';
|
||||
import { PackageInfo, PackageInfos } from '../../types/PackageInfo';
|
||||
import { makePackageInfos } from '../../__fixtures__/packageInfos';
|
||||
import { getPackageGraph } from '../../monorepo/getPackageGraph';
|
||||
|
||||
describe('getPackageGraph', () => {
|
||||
/**
|
||||
* @returns all package names in the package graph
|
||||
*/
|
||||
async function getPackageGraphPackageNames(affectedPackages: Iterable<string>, packageInfos: PackageInfos, runHook?: (packageInfo: PackageInfo) => Promise<void>): Promise<string[]> {
|
||||
const visitedPackages: string[] = [];
|
||||
const packageGraph = getPackageGraph(affectedPackages, packageInfos, async (packageInfo: PackageInfo) => {
|
||||
visitedPackages.push(packageInfo.name);
|
||||
if (runHook) {
|
||||
await runHook(packageInfo);
|
||||
}
|
||||
});
|
||||
await packageGraph.run({
|
||||
concurrency: 1,
|
||||
continue: false,
|
||||
});
|
||||
|
||||
return visitedPackages;
|
||||
}
|
||||
|
||||
/**
|
||||
* Ensure that both `toposortPackages` and `getPackageGraph` are running the same logic for sorting packages.
|
||||
*/
|
||||
async function validateToposortPackagesAndPackageGraph(inputPackages: string[], packageInfos: PackageInfos, possibleSolutions: string[][]): Promise<void> {
|
||||
const toposortPackagesOutput = (toposortPackages(inputPackages, packageInfos))
|
||||
const getPackageGraphPackageNamesOutput = await getPackageGraphPackageNames(inputPackages, packageInfos);
|
||||
|
||||
expect(possibleSolutions).toContainEqual(toposortPackagesOutput);
|
||||
expect(possibleSolutions).toContainEqual(getPackageGraphPackageNamesOutput);
|
||||
}
|
||||
|
||||
it('sort packages which none of them has dependency', async () => {
|
||||
const packageInfos: PackageInfos = makePackageInfos({ foo: {}, bar: {} });
|
||||
|
||||
await validateToposortPackagesAndPackageGraph(['foo', 'bar'], packageInfos, [
|
||||
['foo', 'bar'],
|
||||
['bar', 'foo'],
|
||||
]);
|
||||
});
|
||||
|
||||
it('sort packages with dependencies', async () => {
|
||||
const packageInfos = makePackageInfos({
|
||||
foo: {
|
||||
dependencies: { foo3: '1.0.0', bar2: '1.0.0' },
|
||||
},
|
||||
foo3: { dependencies: { foo2: '1.0.0' } },
|
||||
foo2: {},
|
||||
});
|
||||
|
||||
await validateToposortPackagesAndPackageGraph(['foo', 'foo2', 'foo3'], packageInfos, [
|
||||
['foo2', 'foo3', 'foo']
|
||||
]);
|
||||
});
|
||||
|
||||
it('sort packages with different kinds of dependencies', async () => {
|
||||
const packageInfos = makePackageInfos({
|
||||
foo: { dependencies: { foo3: '1.0.0' }, peerDependencies: { foo4: '1.0.0', bar: '1.0.0' } },
|
||||
foo2: { dependencies: {} },
|
||||
foo3: { dependencies: { foo2: '1.0.0' } },
|
||||
foo4: { devDependencies: { foo2: '1.0.0' } },
|
||||
});
|
||||
|
||||
await validateToposortPackagesAndPackageGraph(['foo', 'foo2', 'foo3', 'foo4'], packageInfos, [
|
||||
['foo2', 'foo3', 'foo4', 'foo'],
|
||||
['foo2', 'foo4', 'foo3', 'foo'],
|
||||
]);
|
||||
});
|
||||
|
||||
it('sort packages with all different kinds of dependencies', async () => {
|
||||
const packageInfos = makePackageInfos({
|
||||
foo: { dependencies: { foo3: '1.0.0' }, peerDependencies: { foo4: '1.0.0', bar: '1.0.0' } },
|
||||
foo2: { dependencies: {} },
|
||||
foo3: { optionalDependencies: { foo2: '1.0.0' } },
|
||||
foo4: { devDependencies: { foo2: '1.0.0' } },
|
||||
});
|
||||
|
||||
await validateToposortPackagesAndPackageGraph(['foo', 'foo2', 'foo3', 'foo4'], packageInfos, [
|
||||
['foo2', 'foo3', 'foo4', 'foo']
|
||||
]);
|
||||
});
|
||||
|
||||
it('do not sort packages if it is not included', async () => {
|
||||
const packageInfos = makePackageInfos({
|
||||
foo: { dependencies: { foo3: '1.0.0', bar: '1.0.0' } },
|
||||
foo2: {},
|
||||
foo3: { dependencies: { foo2: '1.0.0' } },
|
||||
});
|
||||
|
||||
await validateToposortPackagesAndPackageGraph(['foo', 'foo3'], packageInfos, [
|
||||
['foo3', 'foo']
|
||||
]);
|
||||
});
|
||||
|
||||
it('do not sort packages if it is not included harder scenario', async () => {
|
||||
const packageInfos = makePackageInfos({
|
||||
foo: { dependencies: { foo3: '1.0.0', bar: '1.0.0' } },
|
||||
foo2: { dependencies: { foo4: '1.0.0' } },
|
||||
foo3: { dependencies: { foo2: '1.0.0' } },
|
||||
foo4: { dependencies: {}},
|
||||
bar: { dependencies: { foo: '1.0.0' } },
|
||||
});
|
||||
|
||||
await validateToposortPackagesAndPackageGraph(['foo', 'foo3'], packageInfos, [
|
||||
['foo3', 'foo']
|
||||
]);
|
||||
});
|
||||
|
||||
it('throws if contains circular dependencies inside affected packages', async () => {
|
||||
const packageInfos = makePackageInfos({
|
||||
foo: { dependencies: { bar: '1.0.0', } },
|
||||
bar: { dependencies: { foo: '1.0.0' } },
|
||||
});
|
||||
|
||||
await expect(async () => {
|
||||
await getPackageGraphPackageNames(['foo', 'bar'], packageInfos);
|
||||
}).rejects.toThrow(/We could not find a node in the graph with no dependencies, this likely means there is a cycle including all nodes/);
|
||||
});
|
||||
|
||||
it('throws if contains circular dependencies', async () => {
|
||||
const packageInfos = makePackageInfos({
|
||||
foo: { dependencies: { bar: '1.0.0', bar2: '1.0.0' } },
|
||||
bar: { dependencies: { foo: '1.0.0' } },
|
||||
});
|
||||
|
||||
await expect(async () => {
|
||||
await getPackageGraphPackageNames(['foo', 'bar'], packageInfos);
|
||||
}).rejects.toThrow(/We could not find a node in the graph with no dependencies, this likely means there is a cycle including all nodes/);
|
||||
});
|
||||
|
||||
it(`doesn't throws if graph contains circular dependencies outside affected packages`, async () => {
|
||||
const packageInfos = makePackageInfos({
|
||||
foo: { dependencies: { } },
|
||||
bar: { dependencies: { } },
|
||||
bar2: { dependencies: { bar3: '1.0.0' } },
|
||||
bar3: { dependencies: { bar2: '1.0.0', bar: '1.0.0' } },
|
||||
});
|
||||
|
||||
await getPackageGraphPackageNames(['foo', 'bar'], packageInfos)
|
||||
});
|
||||
|
||||
it('throws if package info is missing', async () => {
|
||||
const packageInfos = {} as any as PackageInfos;
|
||||
|
||||
await expect(async () => {
|
||||
await getPackageGraphPackageNames(['foo', 'bar'], packageInfos)
|
||||
}).rejects.toThrow(`Package info is missing for foo.`);
|
||||
});
|
||||
});
|
|
@ -1,6 +1,7 @@
|
|||
import path from 'path';
|
||||
import { HooksOptions } from '../types/BeachballOptions';
|
||||
import { PackageInfos } from '../types/PackageInfo';
|
||||
import { PackageInfo, PackageInfos } from '../types/PackageInfo';
|
||||
import { getPackageGraph } from '../monorepo/getPackageGraph';
|
||||
|
||||
/**
|
||||
* Call a hook for each affected package. Does nothing if the hook is undefined.
|
||||
|
@ -8,16 +9,28 @@ import { PackageInfos } from '../types/PackageInfo';
|
|||
export async function callHook(
|
||||
hook: HooksOptions['prebump' | 'postbump' | 'prepublish' | 'postpublish'],
|
||||
affectedPackages: Iterable<string>,
|
||||
packageInfos: PackageInfos
|
||||
packageInfos: PackageInfos,
|
||||
concurrency: number
|
||||
): Promise<void> {
|
||||
if (!hook) {
|
||||
return;
|
||||
}
|
||||
|
||||
for (const pkg of affectedPackages) {
|
||||
const packageInfo = packageInfos[pkg];
|
||||
const callHookInternal = async (packageInfo: PackageInfo) => {
|
||||
const packagePath = path.dirname(packageInfo.packageJsonPath);
|
||||
|
||||
await hook(packagePath, packageInfo.name, packageInfo.version, packageInfos);
|
||||
};
|
||||
|
||||
if (concurrency === 1) {
|
||||
for (const pkg of affectedPackages) {
|
||||
await callHookInternal(packageInfos[pkg]);
|
||||
}
|
||||
} else {
|
||||
const packageGraph = getPackageGraph(affectedPackages, packageInfos, callHookInternal);
|
||||
|
||||
await packageGraph.run({
|
||||
concurrency: concurrency,
|
||||
continue: false
|
||||
})
|
||||
}
|
||||
}
|
||||
|
|
|
@ -13,7 +13,7 @@ import { updateLockFile } from './updateLockFile';
|
|||
export async function performBump(bumpInfo: BumpInfo, options: BeachballOptions): Promise<BumpInfo> {
|
||||
const { modifiedPackages, packageInfos, changeFileChangeInfos } = bumpInfo;
|
||||
|
||||
await callHook(options.hooks?.prebump, modifiedPackages, packageInfos);
|
||||
await callHook(options.hooks?.prebump, modifiedPackages, packageInfos, options.concurrency);
|
||||
|
||||
updatePackageJsons(modifiedPackages, packageInfos);
|
||||
await updateLockFile(options.path);
|
||||
|
@ -28,7 +28,7 @@ export async function performBump(bumpInfo: BumpInfo, options: BeachballOptions)
|
|||
unlinkChangeFiles(changeFileChangeInfos, packageInfos, options);
|
||||
}
|
||||
|
||||
await callHook(options.hooks?.postbump, modifiedPackages, packageInfos);
|
||||
await callHook(options.hooks?.postbump, modifiedPackages, packageInfos, options.concurrency);
|
||||
|
||||
// This is returned from bump() for testing
|
||||
return bumpInfo;
|
||||
|
|
|
@ -0,0 +1,36 @@
|
|||
import { getPackageDependencies } from 'workspace-tools/lib/graph/getPackageDependencies';
|
||||
import { PackageInfos } from '../types/PackageInfo';
|
||||
|
||||
/**
|
||||
* @returns Each element is a tuple of [dependency, dependent] where `dependent` depends on `dependency`.
|
||||
* These are the edges of the dependency graph.
|
||||
*/
|
||||
export function getPackageDependencyGraph(
|
||||
packages: string[],
|
||||
packageInfos: PackageInfos
|
||||
): [string | undefined, string][] {
|
||||
const packageSet = new Set(packages);
|
||||
const dependencyGraph: [string | undefined, string][] = [];
|
||||
|
||||
for (const pkgName of packageSet) {
|
||||
const info = packageInfos[pkgName];
|
||||
if (!info) {
|
||||
throw new Error(`Package info is missing for ${pkgName}.`);
|
||||
}
|
||||
|
||||
const allDeps = getPackageDependencies(info, packageSet, {
|
||||
withDevDependencies: true,
|
||||
withPeerDependencies: true,
|
||||
withOptionalDependencies: true,
|
||||
});
|
||||
if (allDeps.length > 0) {
|
||||
for (const depPkgName of allDeps) {
|
||||
dependencyGraph.push([depPkgName, pkgName]);
|
||||
}
|
||||
} else {
|
||||
dependencyGraph.push([undefined, pkgName]);
|
||||
}
|
||||
}
|
||||
|
||||
return dependencyGraph;
|
||||
}
|
|
@ -0,0 +1,24 @@
|
|||
import { PackageInfo, PackageInfos } from '../types/PackageInfo';
|
||||
import pGraph, { PGraphNodeMap } from 'p-graph';
|
||||
import { getPackageDependencyGraph } from './getPackageDependencyGraph';
|
||||
|
||||
export function getPackageGraph(
|
||||
affectedPackages: Iterable<string>,
|
||||
packageInfos: PackageInfos,
|
||||
runHook: (packageInfo: PackageInfo) => Promise<void>
|
||||
) {
|
||||
const nodeMap: PGraphNodeMap = new Map();
|
||||
for (const packageToBump of affectedPackages) {
|
||||
nodeMap.set(packageToBump, {
|
||||
run: async () => await runHook(packageInfos[packageToBump]),
|
||||
});
|
||||
}
|
||||
|
||||
const dependencyGraph: [string | undefined, string][] = getPackageDependencyGraph(Array.from(affectedPackages), packageInfos);
|
||||
const filteredDependencyGraph = filterDependencyGraph(dependencyGraph);
|
||||
return pGraph(nodeMap, filteredDependencyGraph);
|
||||
}
|
||||
|
||||
function filterDependencyGraph(dependencyGraph: [string | undefined, string][]): [string, string][] {
|
||||
return dependencyGraph.filter(([dep, _]) => dep !== undefined) as [string, string][];
|
||||
}
|
|
@ -23,7 +23,7 @@ const booleanOptions = [
|
|||
'version',
|
||||
'yes',
|
||||
] as const;
|
||||
const numberOptions = ['depth', 'gitTimeout', 'retries', 'timeout'] as const;
|
||||
const numberOptions = ['concurrency', 'depth', 'gitTimeout', 'retries', 'timeout'] as const;
|
||||
const stringOptions = [
|
||||
'access',
|
||||
'authType',
|
||||
|
|
|
@ -17,6 +17,7 @@ export function getDefaultOptions(): BeachballOptions {
|
|||
changeDir: 'change',
|
||||
command: 'change',
|
||||
commit: true,
|
||||
concurrency: 1,
|
||||
defaultNpmTag: 'latest',
|
||||
depth: undefined,
|
||||
disallowedChangeTypes: null,
|
||||
|
|
|
@ -40,7 +40,7 @@ export async function packagePublish(
|
|||
});
|
||||
|
||||
if (result.success) {
|
||||
console.log('Published!');
|
||||
console.log(`Published! - ${packageSpec}`);
|
||||
return result;
|
||||
}
|
||||
|
||||
|
|
|
@ -9,6 +9,8 @@ import { validatePackageDependencies } from './validatePackageDependencies';
|
|||
import { performPublishOverrides } from './performPublishOverrides';
|
||||
import { getPackagesToPublish } from './getPackagesToPublish';
|
||||
import { callHook } from '../bump/callHook';
|
||||
import { getPackageGraph } from '../monorepo/getPackageGraph';
|
||||
import { PackageInfo } from '../types/PackageInfo';
|
||||
|
||||
/**
|
||||
* Publish all the bumped packages to the registry.
|
||||
|
@ -42,21 +44,44 @@ export async function publishToRegistry(originalBumpInfo: PublishBumpInfo, optio
|
|||
performPublishOverrides(packagesToPublish, bumpInfo.packageInfos);
|
||||
|
||||
// if there is a prepublish hook perform a prepublish pass, calling the routine on each package
|
||||
await callHook(options.hooks?.prepublish, packagesToPublish, bumpInfo.packageInfos);
|
||||
await callHook(options.hooks?.prepublish, packagesToPublish, bumpInfo.packageInfos, options.concurrency);
|
||||
|
||||
// finally pass through doing the actual npm publish command
|
||||
const succeededPackages = new Set<string>();
|
||||
|
||||
for (const pkg of packagesToPublish) {
|
||||
const result = await packagePublish(bumpInfo.packageInfos[pkg], options);
|
||||
const packagePublishInternal = async (packageInfo: PackageInfo) => {
|
||||
const result = await packagePublish(packageInfo, options);
|
||||
if (result.success) {
|
||||
succeededPackages.add(pkg);
|
||||
succeededPackages.add(packageInfo.name);
|
||||
} else {
|
||||
displayManualRecovery(bumpInfo, succeededPackages);
|
||||
throw new Error('Error publishing! Refer to the previous logs for recovery instructions.');
|
||||
}
|
||||
};
|
||||
|
||||
try {
|
||||
if (options.concurrency === 1) {
|
||||
for (const pkg of packagesToPublish) {
|
||||
await packagePublishInternal(bumpInfo.packageInfos[pkg]);
|
||||
}
|
||||
} else {
|
||||
const packageGraph = getPackageGraph(packagesToPublish, bumpInfo.packageInfos, packagePublishInternal);
|
||||
await packageGraph.run({
|
||||
concurrency: options.concurrency,
|
||||
// This option is set to true to ensure that all tasks that are started are awaited,
|
||||
// this doesn't actually start tasks for packages of which dependencies have failed.
|
||||
continue: true
|
||||
});
|
||||
}
|
||||
} catch (error) {
|
||||
// p-graph will throw an array of errors if it fails to run all tasks
|
||||
if (Array.isArray(error)) {
|
||||
const errorSet = new Set(error);
|
||||
error = new Error(Array.from(errorSet).join('\n'));
|
||||
}
|
||||
displayManualRecovery(bumpInfo, succeededPackages);
|
||||
throw error;
|
||||
}
|
||||
|
||||
// if there is a postpublish hook perform a postpublish pass, calling the routine on each package
|
||||
await callHook(options.hooks?.postpublish, packagesToPublish, bumpInfo.packageInfos);
|
||||
await callHook(options.hooks?.postpublish, packagesToPublish, bumpInfo.packageInfos, options.concurrency);
|
||||
}
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
import toposort from 'toposort';
|
||||
import { PackageInfos } from '../types/PackageInfo';
|
||||
import { getPackageDependencyGraph } from '../monorepo/getPackageDependencyGraph';
|
||||
|
||||
/**
|
||||
* Topologically sort the packages based on their dependency graph.
|
||||
|
@ -8,31 +9,8 @@ import { PackageInfos } from '../types/PackageInfo';
|
|||
* @param packageInfos PackagesInfos for the sorted packages.
|
||||
*/
|
||||
export function toposortPackages(packages: string[], packageInfos: PackageInfos): string[] {
|
||||
const packageSet = new Set(packages);
|
||||
const dependencyGraph: [string | undefined, string][] = [];
|
||||
|
||||
for (const pkgName of packageSet) {
|
||||
const info = packageInfos[pkgName];
|
||||
if (!info) {
|
||||
throw new Error(`Package info is missing for ${pkgName}.`);
|
||||
}
|
||||
|
||||
const allDeps = new Set(
|
||||
[info.dependencies, info.devDependencies, info.peerDependencies, info.optionalDependencies]
|
||||
.flatMap(deps => Object.keys(deps || {}))
|
||||
.filter(pkg => packageSet.has(pkg))
|
||||
);
|
||||
if (allDeps.size) {
|
||||
for (const depPkgName of allDeps) {
|
||||
dependencyGraph.push([depPkgName, pkgName]);
|
||||
}
|
||||
} else {
|
||||
dependencyGraph.push([undefined, pkgName]);
|
||||
}
|
||||
}
|
||||
|
||||
try {
|
||||
return toposort(dependencyGraph).filter((pkg): pkg is string => !!pkg);
|
||||
return toposort(getPackageDependencyGraph(packages, packageInfos)).filter((pkg): pkg is string => !!pkg);
|
||||
} catch (err) {
|
||||
throw new Error(`Failed to topologically sort packages: ${(err as Error)?.message}`);
|
||||
}
|
||||
|
|
|
@ -34,6 +34,7 @@ export interface CliOptions
|
|||
canaryName?: string | undefined;
|
||||
command: string;
|
||||
commit?: boolean;
|
||||
concurrency: number;
|
||||
configPath?: string;
|
||||
dependentChangeType?: ChangeType;
|
||||
disallowDeletedChangeFiles?: boolean;
|
||||
|
|
13
yarn.lock
13
yarn.lock
|
@ -8746,6 +8746,11 @@ p-finally@^1.0.0:
|
|||
resolved "https://registry.yarnpkg.com/p-finally/-/p-finally-1.0.0.tgz#3fbcfb15b899a44123b34b6dcc18b724336a2cae"
|
||||
integrity sha512-LICb2p9CB7FS+0eR1oqWnHhp0FljGLZCWBE9aix0Uye9W8LTQPwMTYVGWQWIw9RdQiDg4+epXQODwIYJtSJaow==
|
||||
|
||||
p-graph@^1.1.2:
|
||||
version "1.1.2"
|
||||
resolved "https://registry.yarnpkg.com/p-graph/-/p-graph-1.1.2.tgz#594010591e258ebc013f275f414ef6c5bfc25d51"
|
||||
integrity sha512-GnEEHrOMozk0hCjXBm011oYb3zpaOolxHgqL2s7Od2niGAJKyk/4FZ2VRUAgjqqqoQnZQtwkF6fjGDJkIQTjDQ==
|
||||
|
||||
p-limit@^2.0.0, p-limit@^2.2.0, p-limit@^2.2.1:
|
||||
version "2.3.0"
|
||||
resolved "https://registry.yarnpkg.com/p-limit/-/p-limit-2.3.0.tgz#3dd33c647a214fdfffd835933eb086da0dc21db1"
|
||||
|
@ -11894,10 +11899,10 @@ worker-farm@^1.7.0:
|
|||
dependencies:
|
||||
errno "~0.1.7"
|
||||
|
||||
workspace-tools@^0.36.3:
|
||||
version "0.36.4"
|
||||
resolved "https://registry.yarnpkg.com/workspace-tools/-/workspace-tools-0.36.4.tgz#57504c687569785148c5b7ef1470dadc9be970c5"
|
||||
integrity sha512-v0UFVvw9BjHtRu2Dau5PEJKkuG8u4jPlpXZQWjSz9XgbSutpPURqtO2P0hp3cVmQVATh8lkMFCewFgJuDnyC/w==
|
||||
workspace-tools@^0.38.0:
|
||||
version "0.38.0"
|
||||
resolved "https://registry.yarnpkg.com/workspace-tools/-/workspace-tools-0.38.0.tgz#5d7677f9c9f0a7df592537b17b378d1c33eddb86"
|
||||
integrity sha512-BpvydL36Q+AVBU6Rj/a7nfxfEhxvX4ZkLVCsUx5LJ5UpzIcvLDgxvnolBSY+2MUU8VYhvf+PGtF7eWS8xBC1Iw==
|
||||
dependencies:
|
||||
"@yarnpkg/lockfile" "^1.1.0"
|
||||
fast-glob "^3.3.1"
|
||||
|
|
Загрузка…
Ссылка в новой задаче