Remove unnecessary build stuff (#94)

* fix packages for react-native to unblock builds

* Change files

* fix check for change and RN peer version

* update CI tasks to use yarn

* workaround CI running out of file system watchers

* try non-admin command for increasing watch limit

* clean up just tasks and remove unnecessary webpack configs

* Change files

* set CI environment variable to suppress metro file watching

* try manual set of ENV variable

* trying again with local env set

* try using macos for CI

* update environment setting to be done in the right way

* remove some unneeded build files

* remove more unneeded build files

* remove unused build files

* remove attempt to set CI in environment
This commit is contained in:
Jason Morse 2020-02-06 16:18:07 -08:00 коммит произвёл GitHub
Родитель d815c6631c
Коммит 62436203d9
Не найден ключ, соответствующий данной подписи
Идентификатор ключа GPG: 4AEE18F83AFDEB23
32 изменённых файлов: 92 добавлений и 1050 удалений

Просмотреть файл

@ -33,5 +33,5 @@ steps:
displayName: 'Verify API and Ensure Changed Files'
- script: |
export CI=true & yarn bundle
yarn bundle
displayName: 'yarn bundle'

Просмотреть файл

@ -0,0 +1,8 @@
{
"type": "patch",
"comment": "clean up just tasks and remove unnecessary webpack configs",
"packageName": "@uifabricshared/foundation-composable",
"email": "jasonmo@microsoft.com",
"commit": "325ea8c30a8c8fbe37f84cd5b5080fc7e6c2864b",
"date": "2020-02-05T23:35:06.840Z"
}

Просмотреть файл

@ -0,0 +1,8 @@
{
"type": "patch",
"comment": "clean up just tasks and remove unnecessary webpack configs",
"packageName": "@uifabricshared/foundation-compose",
"email": "jasonmo@microsoft.com",
"commit": "325ea8c30a8c8fbe37f84cd5b5080fc7e6c2864b",
"date": "2020-02-05T23:35:08.937Z"
}

Просмотреть файл

@ -0,0 +1,8 @@
{
"type": "patch",
"comment": "clean up just tasks and remove unnecessary webpack configs",
"packageName": "@uifabricshared/foundation-settings",
"email": "jasonmo@microsoft.com",
"commit": "325ea8c30a8c8fbe37f84cd5b5080fc7e6c2864b",
"date": "2020-02-05T23:35:11.247Z"
}

Просмотреть файл

@ -0,0 +1,8 @@
{
"type": "patch",
"comment": "clean up just tasks and remove unnecessary webpack configs",
"packageName": "@uifabricshared/foundation-tokens",
"email": "jasonmo@microsoft.com",
"commit": "325ea8c30a8c8fbe37f84cd5b5080fc7e6c2864b",
"date": "2020-02-05T23:35:12.473Z"
}

Просмотреть файл

@ -0,0 +1,8 @@
{
"type": "patch",
"comment": "clean up just tasks and remove unnecessary webpack configs",
"packageName": "@uifabricshared/immutable-merge",
"email": "jasonmo@microsoft.com",
"commit": "325ea8c30a8c8fbe37f84cd5b5080fc7e6c2864b",
"date": "2020-02-05T23:35:13.558Z"
}

Просмотреть файл

@ -0,0 +1,8 @@
{
"type": "patch",
"comment": "clean up just tasks and remove unnecessary webpack configs",
"packageName": "@uifabricshared/theme-registry",
"email": "jasonmo@microsoft.com",
"commit": "325ea8c30a8c8fbe37f84cd5b5080fc7e6c2864b",
"date": "2020-02-05T23:35:14.637Z"
}

Просмотреть файл

@ -0,0 +1,8 @@
{
"type": "patch",
"comment": "clean up just tasks and remove unnecessary webpack configs",
"packageName": "@uifabricshared/themed-settings",
"email": "jasonmo@microsoft.com",
"commit": "325ea8c30a8c8fbe37f84cd5b5080fc7e6c2864b",
"date": "2020-02-05T23:35:15.731Z"
}

Просмотреть файл

@ -0,0 +1,8 @@
{
"type": "patch",
"comment": "fix packages for react-native to unblock builds",
"packageName": "@uifabricshared/themed-stylesheet",
"email": "jasonmo@microsoft.com",
"commit": "30e802cb10f5e91cf3d8ed7d9d9e123c4b7c4fcb",
"date": "2020-02-04T23:30:10.428Z"
}

Просмотреть файл

@ -0,0 +1,8 @@
{
"type": "patch",
"comment": "clean up just tasks and remove unnecessary webpack configs",
"packageName": "@uifabricshared/theming-ramp",
"email": "jasonmo@microsoft.com",
"commit": "325ea8c30a8c8fbe37f84cd5b5080fc7e6c2864b",
"date": "2020-02-05T23:35:16.801Z"
}

Просмотреть файл

@ -0,0 +1,8 @@
{
"type": "patch",
"comment": "clean up just tasks and remove unnecessary webpack configs",
"packageName": "@uifabricshared/theming-react-native",
"email": "jasonmo@microsoft.com",
"commit": "325ea8c30a8c8fbe37f84cd5b5080fc7e6c2864b",
"date": "2020-02-05T23:35:17.840Z"
}

Просмотреть файл

@ -13,25 +13,14 @@
},
"scripts": {
"preinstall": "node ./scripts/use-yarn-please.js",
"postinstall": "node ./scripts/postinstall.js",
"clean-all": "node ./scripts/clean-all.js",
"start": "cd apps && cd demo && npm start",
"build": "lerna run build --stream -- --production --lint --color",
"buildci": "lerna run build --stream -- --lint --color",
"builddemo": "node ./scripts/monorepo/buildTo.js fabric-website-resources --min",
"buildfast": "lerna run build -- --min --color",
"buildto": "node ./scripts/monorepo/buildTo.js",
"bundle": "lerna run bundle --stream -- --production --lint --color",
"bundlesize": "cd scripts && npm run bundlesize",
"bundlesizecollect": "cd scripts && yarn just-scripts bundle-size-collect",
"rebuild": "node ./scripts/invalidate-just-cache.js && yarn build",
"change": "beachball change",
"check-for-changed-files": "cd scripts && yarn just-scripts check-for-modified-files",
"checkchange": "beachball check --changehint \"Run 'yarn change' to generate a change file\"",
"code-style": "lerna run code-style --stream",
"codepen": "cd packages/office-ui-fabric-react && node ../../scripts/local-codepen.js",
"generate-version-files": "cd scripts && yarnjust-scripts generate-version-files",
"prettier": "node scripts/prettier.js",
"prettier": "cd scripts && yarn prettier",
"publish:beachball": "beachball publish --bump-deps -m\"📦 applying package updates ***NO_CI***\"",
"bump-versions": "beachball bump",
"lint": "lerna run lint",
@ -58,4 +47,4 @@
"major"
]
}
}
}

Просмотреть файл

@ -1,16 +0,0 @@
const path = require('path');
const findConfig = require('./find-config');
const findGitRoot = require('./monorepo/findGitRoot');
const configPath = findConfig('jest.config.js');
const rootPath = findGitRoot();
if (!configPath || !rootPath) {
console.log(
'Unable to find jest.config.js relative to currently opened file. Run debug-test from an open source file in a jest enabled project.'
);
} else {
const jestCli = require.resolve('jest-cli/bin/jest.js');
process.chdir(path.dirname(configPath));
require(jestCli);
}

Просмотреть файл

@ -1,29 +0,0 @@
// @ts-check
const path = require('path');
const child_process = require('child_process');
const chalk = require('chalk').default;
const { logStatus } = require('./logging');
const SEPARATOR = process.platform === 'win32' ? ';' : ':';
const env = Object.assign({}, process.env);
env.PATH = path.resolve('./node_modules/.bin') + SEPARATOR + env.PATH;
/**
* Execute a command synchronously.
*
* @param {string} cmd Command to execute
* @param {string} [displayName] Display name for the command
* @param {string} [cwd] Working directory in which to run the command
*/
function execSync(cmd, displayName, cwd = process.cwd()) {
logStatus(chalk.gray('Executing: ') + chalk.cyan(displayName || cmd));
child_process.execSync(cmd, {
cwd,
env: env,
stdio: 'inherit'
});
}
module.exports = execSync;

Просмотреть файл

@ -1,52 +0,0 @@
// @ts-check
const path = require('path');
const child_process = require('child_process');
const chalk = require('chalk').default;
const { logStatus } = require('./logging');
const SEPARATOR = process.platform === 'win32' ? ';' : ':',
env = Object.assign({}, process.env);
env.PATH = path.resolve('./node_modules/.bin') + SEPARATOR + env.PATH;
/**
* Execute a command.
*
* @param {string} cmd Command to execute
* @param {string} [displayName] Display name for the command
* @param {string} [cwd] Working directory in which to run the command
* @param {{ stdout?: any; stderr?: any; }} [opts] Pipe stdout/stderr somewhere. Can pass `process` global.
*/
function exec(cmd, displayName, cwd = process.cwd(), opts = {}) {
logStatus(chalk.gray('Executing: ') + chalk.cyan(displayName || cmd));
const execOptions = {
cwd,
env: env,
encoding: 'utf8'
};
return new Promise((resolve, reject) => {
const child = child_process.exec(cmd, execOptions, (error, stdout, stderr) =>
error
? reject({
error,
stdout: stdout,
stderr: stderr
})
: resolve({
stdout: stdout,
stderr: stderr
})
);
if (opts.stdout) {
child.stdout.pipe(opts.stdout);
}
if (opts.stderr) {
child.stderr.pipe(opts.stderr);
}
});
}
module.exports = exec;

Просмотреть файл

@ -1,6 +1,6 @@
// @ts-check
const { task, series, parallel, condition, option, argv, addResolvePath, copyTask } = require('just-scripts');
const { task, series, parallel, condition, option, argv, addResolvePath, prettierCheckTask, prettierTask } = require('just-scripts');
const path = require('path');
const fs = require('fs');
@ -13,11 +13,7 @@ const { eslint } = require('./tasks/eslint');
const { webpack, webpackDevServer } = require('./tasks/webpack');
const { metroPackTask } = require('./tasks/metro-pack');
const { verifyApiExtractor, updateApiExtractor } = require('./tasks/api-extractor');
const prettier = require('./tasks/prettier');
const bundleSizeCollect = require('./tasks/bundle-size-collect');
const checkForModifiedFiles = require('./tasks/check-for-modified-files');
const generateVersionFiles = require('./tasks/generate-version-files');
const generatePackageManifestTask = require('./tasks/generate-package-manifest');
module.exports = function preset() {
// this add s a resolve path for the build tooling deps like TS from the scripts folder
@ -36,6 +32,9 @@ module.exports = function preset() {
// use Metro for bundling task instead of the default webpack
option('useMetro');
// for options that have a check/fix switch this puts them into fix mode
option('fix');
task('clean', clean);
task('copy', copy);
task('jest', jest);
@ -49,11 +48,8 @@ module.exports = function preset() {
task('webpack-dev-server', webpackDevServer);
task('verify-api-extractor', verifyApiExtractor);
task('update-api-extractor', updateApiExtractor);
task('prettier', prettier);
task('bundle-size-collect', bundleSizeCollect);
task('prettier', () => argv().fix ? prettierTask : prettierCheckTask);
task('check-for-modified-files', checkForModifiedFiles);
task('generate-version-files', generateVersionFiles);
task('generate-package-manifest', generatePackageManifestTask);
task(
'ts',
series(condition('ts:commonjs-only', () => argv().commonjs), condition(parallel('ts:commonjs', 'ts:esm'), () => !argv().commonjs))
@ -70,5 +66,5 @@ module.exports = function preset() {
task('build', series('clean', 'copy', parallel(condition('validate', () => !argv().min), 'ts'))).cached();
task('no-op', () => {}).cached();
task('no-op', () => { }).cached();
};

Просмотреть файл

@ -1,46 +0,0 @@
const webpack = require('webpack');
const WebpackDevServer = require('webpack-dev-server');
const path = require('path');
const fs = require('fs');
const commandLineArgs = require('command-line-args');
const optionDefinitions = [
{
name: 'webpackConfig',
alias: 'w',
type: String
}
];
const options = commandLineArgs(optionDefinitions);
let webpackConfigFilePath = 'webpack.codepen.config.js';
if (options && options.webpackConfig) {
webpackConfigFilePath = options.webpackConfig;
}
const configPath = path.resolve(process.cwd(), webpackConfigFilePath);
if (fs.existsSync(configPath)) {
const ngrok = require('ngrok');
const webpackConfig = require(configPath);
const compiler = webpack(webpackConfig);
const devServerOptions = Object.assign({}, webpackConfig.devServer, {
stats: {
colors: true
}
});
const server = new WebpackDevServer(compiler, devServerOptions);
server.listen(8080, '127.0.0.1', async () => {
const url = await ngrok.connect({ port: 8080, host_header: 'localhost:8080' });
console.log(`Starting server on http://${url}`);
console.log(
`Add this to CodePen:
<script type="text/javascript" src="https://unpkg.com/react@16/umd/react.development.js"></script>
<script type="text/javascript" src="https://unpkg.com/react-dom@16/umd/react-dom.development.js"></script>
<script type="text/javascript" src="${url}/office-ui-fabric-react.js"></script>
`
);
});
}

Просмотреть файл

@ -1,63 +0,0 @@
const chalk = require('chalk');
const isProduction = process.argv.indexOf('--production') > -1;
const isVerbose = process.argv.indexOf('--verbose') > -1;
module.exports.logStartTask = (packageName, task) => {
console.log(`${getTimePrefix(packageName)} Starting: ${chalk.cyan(task)}`);
};
module.exports.logEndTask = (packageName, task, startTime, errorMessage) => {
console.log(
`${getTimePrefix(packageName)} ${getPassFail(errorMessage === undefined)}: ${chalk.cyan(task)} (${getDuration(startTime)})${
errorMessage ? chalk.white(': ') + chalk.red(errorMessage) : ''
}`
);
};
module.exports.logStatus = taskStatus => {
if (isProduction || isVerbose) {
console.log(' ' + taskStatus);
}
};
module.exports.logEndBuild = (packageName, passed, startTime) => {
console.log();
console.log(
`${chalk.grey('============') +
chalk.white('[ ') +
chalk.cyan(packageName) +
chalk.white(' ]') +
chalk.grey('=') +
chalk.white('[ ') +
getPassFail(passed) +
chalk.white(' ]') +
chalk.grey('=') +
chalk.white('[ ') +
getDuration(startTime) +
chalk.white(' ]') +
chalk.grey('============')}
`
);
};
function getDuration(startTime) {
let duration = new Date().getTime() - startTime;
return chalk.yellow(formatTime(duration));
}
function getPassFail(passed) {
return passed ? chalk.green('Pass') : chalk.red('Error');
}
function getTimePrefix(packageName) {
return `[${chalk.magenta(packageName)} ${chalk.gray(new Date().toLocaleTimeString({ hour12: false }))}]`;
}
function formatTime(milliseconds) {
if (milliseconds >= 1000) {
return milliseconds / 1000 + 's';
} else {
return milliseconds + 'ms';
}
}

Просмотреть файл

Просмотреть файл

@ -1,5 +1,5 @@
const { spawnSync } = require('child_process');
const { readConfig } = require('../read-config');
const { readConfig } = require('./read-config');
const path = require('path');
const findGitRoot = require('./findGitRoot');

Просмотреть файл

Просмотреть файл

@ -12,6 +12,7 @@
"clean": "",
"code-style": "node ./just-scripts.js code-style",
"test": "jest",
"prettier": "node ./just-scripts.js prettier",
"verify-api": "echo no api to verify",
"update-api": "echo no api to update"
},
@ -112,4 +113,4 @@
"major"
]
}
}
}

Просмотреть файл

@ -1,13 +0,0 @@
const { spawnSync } = require('child_process');
const chalk = require('chalk');
// git v2.9.0 supports a custom hooks directory. This means we just need to checkin the hooks scripts
spawnSync('git', ['config', 'core.hooksPath', '.githooks']);
console.log(`${chalk.green('All depenencies are installed! This repo no longer automatically run builds when installing dependencies.')}
For innerloop development, run these commands:
${chalk.yellow('yarn builddemo')}
${chalk.yellow('yarn start')}
`);

Просмотреть файл

@ -1,60 +0,0 @@
// @ts-check
const { execSync } = require('child_process');
const path = require('path');
const { EOL, cpus } = require('os');
const { runPrettierMultiProject, runPrettierForProject, prettierExtensions } = require('./prettier/prettier-helpers');
const { default: PQueue } = require('p-queue');
const getAllPackageInfo = require('./monorepo/getAllPackageInfo');
const runOnAllFiles = require('yargs').argv.all;
/**
* Run prettier for some files.
* @param {string[]} filePaths Run for these file paths
*/
function runPrettierForFiles(filePaths) {
if (filePaths.length === 0) {
return Promise.resolve();
}
console.log(`Running for ${filePaths.length} files!`);
return runPrettierMultiProject(filePaths, true /*async*/);
}
const numberOfCpus = cpus().length / 2;
console.log(`Running prettier on ${runOnAllFiles ? 'changed' : 'all'} files (on ${numberOfCpus} processes):`);
const queue = new PQueue({ concurrency: numberOfCpus });
if (runOnAllFiles) {
const allPackages = getAllPackageInfo();
queue.addAll(Object.keys(allPackages).map(name => () => runPrettierForProject(allPackages[name].packagePath)));
} else {
const prettierIntroductionCommit = 'HEAD~1';
const passedDiffTarget = process.argv.slice(2).length ? process.argv.slice(2)[0] : prettierIntroductionCommit;
const projectPath = path.resolve(__dirname, '..');
const cmd = `git --no-pager diff ${passedDiffTarget} --diff-filter=AM --name-only --stat-name-width=0`;
const gitDiffOutput = execSync(cmd, { cwd: projectPath });
const prettierExtRegex = new RegExp(`\\.(${prettierExtensions.join('|')})$`);
const files = gitDiffOutput
.toString('utf8')
.split(EOL)
.filter(fileName => prettierExtRegex.test(fileName));
const fileGroups = [];
for (let chunkStart = 0; chunkStart < files.length; chunkStart += numberOfCpus) {
fileGroups.push(files.slice(chunkStart, chunkStart + numberOfCpus));
}
queue.addAll(fileGroups.map(group => () => runPrettierForFiles(group)));
}
queue
.onEmpty()
.then(() => {
console.log('🙌 All done! 🙌');
})
.catch(error => {
console.error(error);
process.exit(1);
});

Просмотреть файл

@ -1,136 +0,0 @@
// @ts-check
const path = require('path');
const fs = require('fs');
const execSync = require('../exec-sync');
const exec = require('../exec');
const prettierConfig = 'prettier.config.js';
const prettierIgnore = '.prettierignore';
const repoRoot = path.resolve(__dirname, '..', '..');
const prettierRulesConfig = path.join(repoRoot, 'packages', 'prettier-rules', prettierConfig);
const prettierIgnorePath = path.join(repoRoot, prettierIgnore);
const prettierBin = require.resolve('prettier/bin-prettier.js');
const getAllPackageInfo = require('../monorepo/getAllPackageInfo');
/** Array of absolute project paths with prettier configs */
let projectsWithPrettierConfig;
const prettierExtensions = ['ts', 'tsx', 'js', 'jsx', 'json', 'scss', 'html', 'md'];
function init() {
if (projectsWithPrettierConfig) {
return;
}
projectsWithPrettierConfig = [];
const projects = getAllPackageInfo();
if (projects) {
// Check the root of each project for a custom prettier config, and save the project paths that have one
for (const project of Object.keys(projects)) {
const info = projects[project];
const packagePath = path.resolve(repoRoot, info.packagePath);
if (fs.existsSync(path.join(packagePath, prettierConfig))) {
projectsWithPrettierConfig.push(packagePath);
}
}
}
}
/**
* Run prettier for a given set of files with the given config.
*
* @param {string[]} files List of files for which to run prettier
* @param {string} configPath Path to relevant prettier.config.js.
* @param {boolean} [runAsync] Whether to run the command synchronously or asynchronously
* @param {boolean} [logErrorsOnly] If true, log errors/warnings only. Otherwise log all output.
* @returns A promise if run asynchronously, or nothing if run synchronously
*/
function runPrettier(files, configPath, runAsync, logErrorsOnly) {
const cmd = [
'node',
prettierBin,
'--config',
configPath,
'--ignore-path',
`"${prettierIgnorePath}"`,
...(logErrorsOnly ? ['--loglevel', 'warn'] : []),
'--write',
...files
].join(' ');
if (runAsync) {
return exec(cmd, undefined, undefined, process);
} else {
execSync(cmd);
}
}
/**
* Runs prettier on all ts/tsx/json/js files in a project.
*
* @param {string} projectPath Path to the project root for which to run prettier
* @returns {Promise<void>}
*/
function runPrettierForProject(projectPath) {
init();
if (!path.isAbsolute(projectPath)) {
projectPath = path.join(repoRoot, projectPath);
}
const sourcePath = path.join(projectPath, '**', `*.{${prettierExtensions.join(',')}}`);
const configPath = projectsWithPrettierConfig.includes(projectPath) ? path.join(projectPath, prettierConfig) : prettierRulesConfig;
console.log(`Running prettier for ${sourcePath} using config ${configPath}`);
return runPrettier([sourcePath], configPath, true, true);
}
/**
* Runs prettier on the given list of files.
*
* @param {string[]} files Staged files passed in by lint-staged
* @param {boolean} [runAsync] Whether to run the command synchronously or asynchronously
* @returns A promise if run asynchronously, or nothing if run synchronously
*/
function runPrettierMultiProject(files, runAsync) {
if (files.length === 0) {
return runAsync ? Promise.resolve() : undefined;
}
init();
// Buid a mapping from config file name to files for which that config applies
const configMap = {};
for (const file of files) {
// Default to the repo-wide config
let configPath = prettierRulesConfig;
const absPath = path.resolve(repoRoot, file);
for (const projectPath of projectsWithPrettierConfig) {
// Check if this file is inside any of the projects with a custom config
if (absPath.startsWith(projectPath)) {
configPath = path.join(projectPath, prettierConfig);
break;
}
}
if (!configMap[configPath]) {
configMap[configPath] = [];
}
configMap[configPath].push(file);
}
const configPaths = Object.keys(configMap);
// Run all the prettier commands in sequence
if (runAsync) {
let promise = Promise.resolve();
for (const configPath of configPaths) {
promise = promise.then(() => runPrettier(configMap[configPath], configPath, true));
}
return promise;
} else {
for (const configPath of configPaths) {
runPrettier(configMap[configPath], configPath);
}
}
}
module.exports = { runPrettierForProject, runPrettierMultiProject, prettierExtensions };

Просмотреть файл

@ -1,23 +0,0 @@
// @ts-check
const path = require('path');
const chalk = require('chalk').default;
const execSync = require('./exec-sync');
const getAllPackageInfo = require('./monorepo/getAllPackageInfo');
const allPackages = getAllPackageInfo();
const packages = [];
Object.keys(allPackages).forEach(name => {
const info = allPackages[name];
if (info.packageJson.private !== true) {
packages.push(info);
}
});
for (const package of packages) {
const packagePath = path.resolve(__dirname, '..', package.packagePath);
console.log(`Publishing ${chalk.magenta(package.packageName)} in ${packagePath}`);
execSync('npm publish --tag next', undefined, packagePath);
}

Просмотреть файл

@ -1,40 +0,0 @@
// @ts-check
module.exports = function bundleSizeCollect() {
// This script collates bundle size information from
// minified files in apps/test-bundles/dist and writes to
// apps/test-bundles/dist/bundlesizes.json.
// It is uploaded as an artifact by the build definition in
// Azure Dev Ops and used to compare baseline and PR file size
// information which gets reported by Size Auditor
const fs = require('fs');
const path = require('path');
const distRoot = path.join(__dirname, '../../apps/test-bundles/dist');
const sizes = {};
const outputFilename = 'bundlesizes.json';
var items = fs.readdirSync(distRoot);
items.forEach(item => {
const file = path.join(distRoot, item);
const isMinifiedJavascriptFile = item.match(/.min.js$/);
if (isMinifiedJavascriptFile) {
sizes[getComponentName(item)] = getFilesizeInBytes(file);
}
});
fs.writeFileSync(path.join(distRoot, outputFilename), JSON.stringify({ sizes }));
function getFilesizeInBytes(fileName) {
return fs.statSync(fileName).size;
}
function getComponentName(fileName) {
if (fileName.startsWith('experiments-')) {
return path.basename(fileName, '.min.js');
}
return fileName.match('office-ui-fabric-react-(.*).min.js')[1];
}
};

Просмотреть файл

@ -1,10 +0,0 @@
const path = require('path');
const fs = require('fs');
const getAllPackageInfo = require('../monorepo/getAllPackageInfo');
const findGitRoot = require('../monorepo/findGitRoot');
module.exports = function generatePackageManifestTask() {
const allPackageInfo = getAllPackageInfo();
const root = findGitRoot();
fs.writeFileSync(path.join(root, 'package-manifest.json'), JSON.stringify(allPackageInfo, null, 2));
};

Просмотреть файл

@ -1,97 +0,0 @@
// @ts-check
const { spawnSync } = require('child_process');
const path = require('path');
const fs = require('fs');
const glob = require('glob');
const generateOnly = process.argv.indexOf('-g') > -1;
const beachballBin = require.resolve('beachball/bin/beachball.js');
const bumpCmd = [process.execPath, beachballBin];
const findGitRoot = require('../monorepo/findGitRoot');
const gitRoot = findGitRoot();
function run(args) {
const [cmd, ...restArgs] = args;
const runResult = spawnSync(cmd, restArgs, { cwd: gitRoot });
if (runResult.status === 0) {
return runResult.stdout.toString().trim();
}
return null;
}
module.exports = function generateVersionFiles() {
let modified = [];
let untracked = [];
const gitRoot = findGitRoot();
if (!generateOnly) {
// Check that no uncommitted changes exist
let status = run(['git', 'status', '-s']);
if (status) {
console.log('Repository needs to contain no changes for version generation to proceed.');
process.exit();
}
// Do a dry-run on all packages
run(bumpCmd);
status = run(['git', 'status', '--porcelain=1']);
status.split(/\n/g).forEach(line => {
if (line) {
const parts = line.trim().split(/\s/);
if (parts[0] === '??') {
// untracked files at this point would be things like CHANGELOG files for a brand new project
untracked.push(parts[1]);
} else {
// modified files include package.json, generated CHANGELOG files from beachball
modified.push('"' + parts[1] + '"');
}
}
});
}
const packageJsons = glob.sync('+(packages|apps)/*/package.json', { cwd: gitRoot });
packageJsons.forEach(packageJsonPath => {
const versionFile = path.join(gitRoot, path.dirname(packageJsonPath), 'src/version.ts');
const packageJson = JSON.parse(fs.readFileSync(path.join(gitRoot, packageJsonPath), 'utf-8'));
const dependencies = packageJson.dependencies || {};
if (
!fs.existsSync(path.dirname(versionFile)) ||
packageJsonPath.indexOf('set-version') > -1 ||
!dependencies['@uifabric/set-version']
) {
return;
}
let shouldGenerate = true;
if (fs.existsSync(versionFile) && process.argv.indexOf('-f') < 0) {
const originVersionFileContent = fs.readFileSync(versionFile).toString();
shouldGenerate = originVersionFileContent.indexOf(`${packageJson.name}@${packageJson.version}`) < 0;
}
if (shouldGenerate) {
console.log(`generating ${versionFile}`);
fs.writeFileSync(
versionFile,
`// ${packageJson.name}@${packageJson.version}
// Do not modify this file, the file is generated as part of publish. The checked in version is a placeholder only.
import { setVersion } from '@uifabric/set-version';
setVersion('${packageJson.name}', '${packageJson.version}');`
);
}
});
if (!generateOnly) {
// Undo the dry-run changes, preserve the version file changes
console.log(`remove untracked ${untracked.join(' ')}`);
untracked.forEach(f => fs.unlinkSync(f));
console.log(`reset ${modified.join(' ')}`);
run(['git', 'checkout', ...modified]);
}
};

Просмотреть файл

@ -1,59 +0,0 @@
// @ts-check
/**
* Script to update all versions and dependencies within the repo.
*
* Usage:
*
* node update-package-versions.js "6.0.0-alpha" ">=6.0.0-0 <7.0.0-0"
*/
const path = require('path');
const process = require('process');
const chalk = require('chalk').default;
const getAllPackageInfo = require('./monorepo/getAllPackageInfo');
const writeConfig = require('./write-config');
const allPackages = getAllPackageInfo();
const newVersion = process.argv[2];
const newDep = process.argv[3] || newVersion;
function help() {
console.error('update-package-versions.js - usage:\n node update-package-versions.js "6.0.0-alpha" ">=6.0.0-0 <7.0.0-0"');
}
if (!allPackages) {
help();
console.error('Could not find get all the packages');
process.exit(1);
}
if (!newVersion || !newDep) {
help();
console.error('Must specify newVersion and newDep');
process.exit(1);
}
for (const name of Object.keys(allPackages)) {
const info = allPackages[name];
const packageJson = info.packageJson;
console.log(`Updating ${chalk.magenta(name)} from ${chalk.grey(packageJson.version)} to ${chalk.green(newVersion)}.`);
packageJson.version = newVersion;
function updateDependencies(deps) {
for (const dependency in deps) {
if (Object.keys(allPackages).find(name => name === dependency)) {
console.log(` Updating deps ${dependency}`);
deps[dependency] = newDep;
}
}
}
updateDependencies(packageJson.dependencies);
updateDependencies(packageJson.devDependencies);
writeConfig(info.packagePath, packageJson);
}

Просмотреть файл

@ -1,350 +0,0 @@
// @ts-check
'use strict';
/**
* This script sends release notes to github. The release notes are pulled from
* CHANGELOG.json entries and are only sent if there aren't already notes for a
* given tag.
*
* @typedef {{
* comment: string;
* commit?: string;
* }} ChangelogComment
*
* @typedef {{
* comments: { major: ChangelogComment[]; minor: ChangelogComment[]; patch: ChangelogComment[]; };
* name?: string;
* date?: string;
* tag?: string;
* version: string;
* body?: string;
* }} ChangelogEntry
*
* @typedef {{
* number: number;
* url: string;
* author: string;
* authorUrl: string;
* }} PullRequest
*/
const path = require('path');
const fs = require('fs');
const yargs = require('yargs');
const execSync = require('child_process').execSync;
const GitHubApi = require('@octokit/rest');
const argv = yargs
.option('token', {
describe: 'GitHub personal access token',
type: 'string',
required:
'A GitHub personal access token is required even for dry runs due to the potential high rate of requests.\n' +
'Generate one here: https://github.com/settings/tokens\n'
})
.option('apply', { describe: 'Actually apply changes (without this option, do a dry run)', type: 'boolean', default: false })
.option('patch', { describe: 'Patch existing release notes for releases less than `age` days old', type: 'boolean', default: false })
.option('patch-all', { describe: 'Patch ALL existing release notes (will likely hit rate limits)', type: 'boolean', default: false })
.option('debug', { describe: 'Use debug mode for the GitHub API', type: 'boolean', default: false })
// Default to checking the past 5 days in case there were any missed days or other issues
.option('age', { describe: 'Get tags/releases up to this many days old', type: 'number', default: 5 })
.option('owner', { describe: 'Owner of the repo to work against', type: 'string', default: 'OfficeDev' })
.option('repo', { describe: 'Repo to work against', type: 'string', default: 'office-ui-fabric-react' })
.version(false)
.help().argv;
const EOL = '\n';
const MILLIS_PER_DAY = 1000 * 60 * 60 * 24;
const REPO_DETAILS = {
owner: argv.owner,
repo: argv.repo
};
if (!argv.apply) {
console.log('NOTE: This is a test run only. To actually update release notes on GitHub, use the "--apply" flag.');
}
// Authenticate with github and set up logging if debug arg is provided
const github = new GitHubApi({ ...(argv.debug ? { log: console } : {}), auth: 'token ' + argv.token });
// Call the primary entry point.
updateReleaseNotes();
/**
* For each file within the folder tree that matches the filename, call the callback
* with an object containing path/content.
* @param {string} folder
* @param {string} fileName
* @param {(result: { path: string; content: string; }) => void} cb
*/
function forEachFileRecursive(folder, fileName, cb) {
folder = folder || process.cwd();
let folderContent = fs.readdirSync(folder).filter(name => !['node_modules', '.git'].includes(name));
folderContent
.filter(itemName => itemName === fileName)
.forEach(matchedFileName =>
cb({
path: path.resolve(folder, matchedFileName),
content: fs.readFileSync(path.resolve(folder, matchedFileName), 'utf8')
})
);
folderContent.forEach(itemName => {
let itemPath = path.resolve(folder, itemName);
if (fs.lstatSync(itemPath).isDirectory()) {
forEachFileRecursive(itemPath, fileName, cb);
}
});
}
/**
* Build up the markdown from the entry description.
* @param {ChangelogEntry} entry
*/
async function getMarkdownForEntry(entry) {
const comments =
(await getChangeComments('Breaking changes', entry.comments.major)) +
(await getChangeComments('Minor changes', entry.comments.minor)) +
(await getChangeComments('Patches', entry.comments.patch));
return comments || '*Changes not tracked*' + EOL + EOL;
}
/**
* Gets the release notes markdown corresponding to the comment array.
* @param {string} title Section title (probably change type, major/minor/patch)
* @param {ChangelogComment[]} comments Changelog comments for a version
*/
async function getChangeComments(title, comments) {
if (comments) {
const lines = ['## ' + title, ''];
for (const comment of comments) {
let line = `- ${comment.comment}`;
if (comment.commit) {
line += ` ([commit](https://github.com/${REPO_DETAILS.owner}/${REPO_DETAILS.repo}/commit/${comment.commit})`;
const pr = await getPullRequest(comment.commit);
if (pr) {
line += ` by [${pr.author}](${pr.authorUrl}), PR [#${pr.number}](${pr.url})`;
}
line += `)`;
}
lines.push(line);
}
lines.push('');
return lines.join(EOL);
}
return '';
}
/**
* Get the single pull request associated with the given commit.
* @param {string} commitHash
* @returns {Promise<PullRequest>}
*/
async function getPullRequest(commitHash) {
try {
const result = await github.repos.listPullRequestsAssociatedWithCommit({ commit_sha: commitHash, ...REPO_DETAILS });
// In case the commit has been in multiple PRs at some point but only one got merged
const prs = result.data.filter(result => !!result.merged_at);
if (prs.length > 1) {
// In case the commit was in PRs to multiple branches or something?
console.warn(`Multiple PRs found for ${commitHash}: ${prs.map(pr => '#' + pr.number).join(', ')}`);
}
if (prs[0]) {
return {
number: prs[0].number,
url: prs[0].html_url,
author: prs[0].user.login,
authorUrl: prs[0].user.html_url
};
} else {
console.warn('No PRs found for ' + commitHash);
}
} catch (ex) {
console.warn(`Error finding PR for ${commitHash}: ${ex}`);
}
return null;
}
/**
* Builds a map of changelog tags to entries defined in CHANGELOG.json files.
* @param {number} [maxAgeDays] If provided, only include entries less than this many days old.
* Otherwise get all entries.
*/
function getChangelogTagMap(maxAgeDays) {
/** @type {Map<string, ChangelogEntry>} */
let map = new Map();
forEachFileRecursive(undefined, 'CHANGELOG.json', result => {
/** @type {{ entries: ChangelogEntry[]; name: string; }} */
let changelog = JSON.parse(result.content);
for (const entry of changelog.entries) {
if (isNewEnough(entry.date, maxAgeDays)) {
entry.name = changelog.name;
map.set(entry.tag, entry);
} else {
// changelog entries should be in reverse chronological order, so stop after the first one
// that's too old
break;
}
}
});
return map;
}
/**
* Gets all the tags in a repo using 'git tag'.
* @param {number} [maxAgeDays] If provided, only include entries less than this many days old.
* Otherwise get all entries.
*/
function getTags(maxAgeDays) {
const cmd = [
'git',
'for-each-ref',
'--sort=-committerdate', // commit date descending
"--format='%(refname:short) -- %(committerdate)'",
'refs/tags'
].join(' ');
let tagsAndDates = execSync(cmd, { cwd: process.cwd() })
.toString()
.split('\n')
.map(tag => tag.split(' -- '))
.filter(arr => arr.length === 2);
if (maxAgeDays) {
const endIndex = tagsAndDates.findIndex(([, date]) => !isNewEnough(date, maxAgeDays));
if (endIndex !== -1) {
tagsAndDates = tagsAndDates.slice(0, endIndex);
}
}
const tags = tagsAndDates.map(([tag]) => tag);
console.log(`Found ${tags.length} tag(s).`);
return tags;
}
/**
* @param {string} dateStr String of a date
* @param {number} [maxAgeDays] If provided, only return true if entry is less than this many days old.
* If not provided, always return true.
*/
function isNewEnough(dateStr, maxAgeDays) {
return !maxAgeDays || Date.now() - new Date(dateStr).getTime() < maxAgeDays * MILLIS_PER_DAY;
}
/**
* Gets all releases from github.
* @param {string[]} [tags] If provided, only get the releases for these tags (it's okay if a tag
* doesn't have a release yet). Otherwise get all the releases.
*/
async function getReleases(tags) {
/** @type {Map<string, GitHubApi.ReposListReleasesResponseItem>} */
let releases = new Map();
if (tags) {
// Only get a subset of releases
for (const tag of tags) {
try {
const release = await github.repos.getReleaseByTag({ ...REPO_DETAILS, tag });
releases.set(release.data.tag_name, release.data);
} catch (err) {
if (err.status === 404) {
// This tag probably isn't released yet, which is fine in this context
} else {
throw new Error(`Could not get release for tag ${tag} from github.\n${err}`);
}
}
}
} else {
// Get all the releases
try {
/** @type {GitHubApi.ReposListReleasesResponseItem[]} */
const res = await github.paginate(github.repos.listReleases.endpoint.merge(REPO_DETAILS));
res.forEach(release => {
releases.set(release.tag_name, release);
});
} catch (err) {
throw new Error('Could not get releases from github.\n' + err);
}
}
console.log(`Found ${releases.size}${tags ? ' recent' : ''} releases on github.`);
return releases;
}
/**
* Adds new release notes, and if argv.patch is true, will patch existing ones in the case
* that they need to be regenerated.
*/
async function updateReleaseNotes() {
// If we're patching all release notes, get all the changelog entries/tags and all the releases
// (expensive operations). Otherwise only get changelog entries/tags from the past argv.age days,
// and corresponding releases (if they exist yet).
const changelogEntries = getChangelogTagMap(argv.patchAll ? undefined : argv.age);
const tagsToFetch = argv.patchAll ? undefined : Array.from(changelogEntries.keys());
const releasesByTag = await getReleases(tagsToFetch);
let count = 0;
const tags = getTags().filter(tag => changelogEntries.has(tag));
// do NOT use forEach here, since it doesn't handle async properly
for (const tag of tags) {
let entry = changelogEntries.get(tag);
let hasBeenReleased = releasesByTag.has(tag);
if (hasBeenReleased && !(argv.patch || argv.patchAll)) {
continue; // nothing to do
}
const entryInfo = `${entry.name} ${entry.version}`;
console.log(`${hasBeenReleased ? 'Patching' : 'Creating'} release notes for ${entryInfo}`);
count++;
/** @type {Partial<GitHubApi.ReposUpdateReleaseParams>} */
const releaseDetails = {
...REPO_DETAILS,
tag_name: entry.tag,
name: `${entry.name} v${entry.version}`,
draft: false,
prerelease: false,
body: await getMarkdownForEntry(entry)
};
if (hasBeenReleased) {
releaseDetails.release_id = releasesByTag.get(tag).id;
}
if (argv.apply) {
try {
if (hasBeenReleased) {
await github.repos.updateRelease(/** @type {GitHubApi.ReposUpdateReleaseParams} */ (releaseDetails));
} else {
await github.repos.createRelease(/** @type {GitHubApi.ReposCreateReleaseParams} */ (releaseDetails));
}
console.log(`Successfully ${hasBeenReleased ? 'updated' : 'created'} release notes for ${entryInfo}`);
} catch (err) {
throw new Error(`Failed to commit release notes for ${entryInfo}.${EOL}${err}`);
}
} else {
// Log the expected output (with the body separate to get it nicely formatted, not as JSON)
const { body, ...rest } = releaseDetails;
console.log('\nRelease details: ' + JSON.stringify(rest, null, 2));
console.log('\n' + body);
}
}
if (!count) {
console.log('No changes were applied.');
}
}

Просмотреть файл

@ -1,30 +0,0 @@
const fs = require('fs');
const findConfig = require('./find-config');
const jju = require('jju');
/**
* Make the requested updates to the given config file.
*
* @param {string} file Full path to or name of the config file. If no file exists at the location
* as given, `file` is assumed to be a config file name and the method will run
* `findConfig(file)` to find the full path.
* @param {any} newContents Object representing the new contents of the file. Any comments from the
* original file should be preserved.
* @returns True if operation succeeded
*/
function writeConfig(file, newValue) {
file = findConfig(file);
if (!file) {
return false;
}
const oldContents = fs.readFileSync(file, 'utf8');
const newContents = jju.update(oldContents, newValue, {
mode: 'cjson',
indent: 2
});
fs.writeFileSync(file, newContents);
return true;
}
module.exports = writeConfig;