1105 строки
37 KiB
JavaScript
1105 строки
37 KiB
JavaScript
|
|
var admZip = require('adm-zip');
|
|
var check = require('validator');
|
|
var fs = require('fs');
|
|
var minimatch = require('minimatch');
|
|
var os = require('os');
|
|
var path = require('path');
|
|
var process = require('process');
|
|
var ncp = require('child_process');
|
|
var semver = require('semver');
|
|
var shell = require('shelljs');
|
|
const Downloader = require("nodejs-file-downloader");
|
|
|
|
// global paths
|
|
var downloadPath = path.join(__dirname, '_download');
|
|
|
|
var makeOptions = require('./make-options.json');
|
|
|
|
// list of .NET culture names
|
|
var cultureNames = ['cs', 'de', 'es', 'fr', 'it', 'ja', 'ko', 'pl', 'pt-BR', 'ru', 'tr', 'zh-Hans', 'zh-Hant'];
|
|
|
|
// core dev-dependencies constants
|
|
const constants = require('./dev-dependencies-constants');
|
|
|
|
const MOCHA_TARGET_VERSION = constants.MOCHA_TARGET_VERSION;
|
|
const TSC_CURRENT_VERSION = constants.TSC_CURRENT_VERSION;
|
|
const MOCHA_NODE_VERSION = constants.MOCHA_NODE_VERSION;
|
|
|
|
const allowedTypescriptVersions = [TSC_CURRENT_VERSION];
|
|
|
|
//------------------------------------------------------------------------------
|
|
// shell functions
|
|
//------------------------------------------------------------------------------
|
|
var shellAssert = function () {
|
|
var errMsg = shell.error();
|
|
if (errMsg) {
|
|
throw new Error(errMsg);
|
|
}
|
|
}
|
|
|
|
var cd = function (dir) {
|
|
shell.cd(dir);
|
|
shellAssert();
|
|
}
|
|
exports.cd = cd;
|
|
|
|
var cp = function (options, source, dest) {
|
|
if (dest) {
|
|
shell.cp(options, source, dest);
|
|
}
|
|
else {
|
|
shell.cp(options, source);
|
|
}
|
|
|
|
shellAssert();
|
|
}
|
|
exports.cp = cp;
|
|
|
|
var mkdir = function (options, target) {
|
|
if (target) {
|
|
shell.mkdir(options, target);
|
|
}
|
|
else {
|
|
shell.mkdir(options);
|
|
}
|
|
|
|
shellAssert();
|
|
}
|
|
exports.mkdir = mkdir;
|
|
|
|
var rm = function (options, target) {
|
|
if (target) {
|
|
shell.rm(options, target);
|
|
}
|
|
else {
|
|
shell.rm(options);
|
|
}
|
|
|
|
shellAssert();
|
|
}
|
|
exports.rm = rm;
|
|
|
|
var test = function (options, p) {
|
|
var result = shell.test(options, p);
|
|
shellAssert();
|
|
return result;
|
|
}
|
|
exports.test = test;
|
|
//------------------------------------------------------------------------------
|
|
|
|
var assert = function (value, name) {
|
|
if (!value) {
|
|
throw new Error('"' + name + '" cannot be null or empty.');
|
|
}
|
|
}
|
|
exports.assert = assert;
|
|
|
|
var banner = function (message, noBracket) {
|
|
console.log();
|
|
if (!noBracket) {
|
|
console.log('------------------------------------------------------------');
|
|
}
|
|
console.log(message);
|
|
if (!noBracket) {
|
|
console.log('------------------------------------------------------------');
|
|
}
|
|
console.log();
|
|
}
|
|
exports.banner = banner;
|
|
|
|
var rp = function (relPath) {
|
|
return path.join(pwd() + '', relPath);
|
|
}
|
|
exports.rp = rp;
|
|
|
|
var fail = function (message) {
|
|
console.error('ERROR: ' + message);
|
|
process.exit(1);
|
|
}
|
|
exports.fail = fail;
|
|
|
|
var ensureExists = function (checkPath) {
|
|
assert(checkPath, 'checkPath');
|
|
var exists = test('-d', checkPath) || test('-f', checkPath);
|
|
|
|
if (!exists) {
|
|
fail(checkPath + ' does not exist');
|
|
}
|
|
}
|
|
exports.ensureExists = ensureExists;
|
|
|
|
var pathExists = function (checkPath) {
|
|
return test('-d', checkPath) || test('-f', checkPath);
|
|
}
|
|
exports.pathExists = pathExists;
|
|
|
|
var lintNodeTask = function (taskPath) {
|
|
var originalDir = pwd();
|
|
cd(taskPath);
|
|
//To lint, the task must have a tslint.json and tsconfig.json file
|
|
if (test('-f', rp('tslint.json')) && test('-f', rp('tsconfig.json'))) {
|
|
run('node ../../node_modules/tslint/bin/tslint -c tslint.json -t prose --project tsconfig.json');
|
|
}
|
|
cd(originalDir);
|
|
}
|
|
exports.lintNodeTask = lintNodeTask;
|
|
|
|
var buildNodeTask = function (taskPath, outDir) {
|
|
var originalDir = pwd();
|
|
cd(taskPath);
|
|
var packageJsonPath = rp('package.json');
|
|
var overrideTscPath;
|
|
if (test('-f', packageJsonPath)) {
|
|
// verify no dev dependencies
|
|
// we allow a TS dev-dependency to indicate a task should use a different TS version
|
|
var packageJson = JSON.parse(fs.readFileSync(packageJsonPath).toString());
|
|
var devDeps = packageJson.devDependencies ? Object.keys(packageJson.devDependencies).length : 0;
|
|
if (devDeps == 1 && packageJson.devDependencies["typescript"]) {
|
|
var version = packageJson.devDependencies["typescript"];
|
|
if (!allowedTypescriptVersions.includes(version)) {
|
|
fail(`The package.json specifies a different TS version (${version}) that the allowed versions: ${allowedTypescriptVersions}. Offending package.json: ${packageJsonPath}`);
|
|
}
|
|
overrideTscPath = path.join(taskPath, "node_modules", "typescript");
|
|
console.log(`Detected Typescript version: ${version}`);
|
|
} else if (devDeps >= 1) {
|
|
fail('The package.json should not contain dev dependencies other than typescript. Move the dev dependencies into a package.json file under the Tests sub-folder. Offending package.json: ' + packageJsonPath);
|
|
}
|
|
|
|
run('npm install');
|
|
}
|
|
|
|
// Use the tsc version supplied by the task if it is available, otherwise use the global default.
|
|
if (overrideTscPath) {
|
|
var tscExec = path.join(overrideTscPath, "bin", "tsc");
|
|
run("node " + tscExec + ' --outDir "' + outDir + '" --rootDir "' + taskPath + '"');
|
|
// Don't include typescript in node_modules
|
|
|
|
//remove tsc and tsserver symbolic links
|
|
if (os.platform !== 'win32') {
|
|
rm('-f', path.join(taskPath, 'node_modules', '.bin', 'tsc'));
|
|
rm('-f', path.join(taskPath, 'node_modules', '.bin', 'tsserver'));
|
|
}
|
|
|
|
//remove typescript from node_modules
|
|
rm("-rf", overrideTscPath);
|
|
} else {
|
|
run('tsc --outDir "' + outDir + '" --rootDir "' + taskPath + '"');
|
|
}
|
|
|
|
cd(originalDir);
|
|
}
|
|
exports.buildNodeTask = buildNodeTask;
|
|
|
|
var buildPs3Task = async function (taskPath, outDir) {
|
|
var packageUrl = 'https://www.powershellgallery.com/api/v2/package/VstsTaskSdk/0.7.1';
|
|
var packageSource = await downloadArchiveAsync(packageUrl, /*omitExtensionCheck*/true);
|
|
var packageDest = path.join(outDir, 'ps_modules/VstsTaskSdk');
|
|
matchCopy('+(*.ps1|*.psd1|*.psm1|lib.json|Strings)', packageSource, packageDest, { noRecurse: true });
|
|
}
|
|
exports.buildPs3Task = buildPs3Task;
|
|
|
|
var copyTaskResources = function (srcPath, destPath) {
|
|
assert(srcPath, 'srcPath');
|
|
assert(destPath, 'destPath');
|
|
|
|
// copy the globally defined set of default task resources
|
|
var toCopy = makeOptions['taskResources'];
|
|
toCopy.forEach(function (item) {
|
|
matchCopy(item, srcPath, destPath, { noRecurse: true });
|
|
});
|
|
}
|
|
exports.copyTaskResources = copyTaskResources;
|
|
|
|
var matchFind = function (pattern, root, options) {
|
|
assert(pattern, 'pattern');
|
|
assert(root, 'root');
|
|
|
|
// determine whether to recurse
|
|
options = options || {};
|
|
var noRecurse = options.hasOwnProperty('noRecurse') && options.noRecurse;
|
|
delete options.noRecurse;
|
|
|
|
// merge specified options with defaults
|
|
mergedOptions = { matchBase: true };
|
|
Object.keys(options || {}).forEach(function (key) {
|
|
mergedOptions[key] = options[key];
|
|
});
|
|
|
|
// normalize first, so we can substring later
|
|
root = path.resolve(root);
|
|
|
|
// determine the list of items
|
|
var items;
|
|
if (noRecurse) {
|
|
items = fs.readdirSync(root)
|
|
.map(function (name) {
|
|
return path.join(root, name);
|
|
});
|
|
}
|
|
else {
|
|
items = find(root)
|
|
.filter(function (item) { // filter out the root folder
|
|
return path.normalize(item) != root;
|
|
});
|
|
}
|
|
|
|
return minimatch.match(items, pattern, mergedOptions);
|
|
}
|
|
exports.matchFind = matchFind;
|
|
|
|
var matchCopy = function (pattern, sourceRoot, destRoot, options) {
|
|
assert(pattern, 'pattern');
|
|
assert(sourceRoot, 'sourceRoot');
|
|
assert(destRoot, 'destRoot');
|
|
|
|
console.log(`copying ${pattern}`);
|
|
|
|
// normalize first, so we can substring later
|
|
sourceRoot = path.resolve(sourceRoot);
|
|
destRoot = path.resolve(destRoot);
|
|
|
|
matchFind(pattern, sourceRoot, options)
|
|
.forEach(function (item) {
|
|
// create the dest dir based on the relative item path
|
|
var relative = item.substring(sourceRoot.length + 1);
|
|
assert(relative, 'relative'); // should always be filterd out by matchFind
|
|
var dest = path.dirname(path.join(destRoot, relative));
|
|
mkdir('-p', dest);
|
|
|
|
cp('-Rf', item, dest + '/');
|
|
});
|
|
}
|
|
exports.matchCopy = matchCopy;
|
|
|
|
var matchRemove = function (pattern, sourceRoot, options) {
|
|
assert(pattern, 'pattern');
|
|
assert(sourceRoot, 'sourceRoot');
|
|
|
|
console.log(`removing ${pattern}`);
|
|
|
|
matchFind(pattern, sourceRoot, options)
|
|
.forEach(function (item) {
|
|
rm('-Rf', item);
|
|
});
|
|
}
|
|
exports.matchRemove = matchRemove;
|
|
|
|
var run = function (cl, inheritStreams, noHeader) {
|
|
if (!noHeader) {
|
|
console.log();
|
|
console.log('> ' + cl);
|
|
}
|
|
|
|
var options = {
|
|
stdio: inheritStreams ? 'inherit' : 'pipe'
|
|
};
|
|
var rc = 0;
|
|
var output;
|
|
try {
|
|
output = ncp.execSync(cl, options);
|
|
}
|
|
catch (err) {
|
|
if (!inheritStreams) {
|
|
console.error(err.output ? err.output.toString() : err.message);
|
|
}
|
|
|
|
process.exit(1);
|
|
}
|
|
|
|
return (output || '').toString().trim();
|
|
}
|
|
exports.run = run;
|
|
|
|
var ensureTool = function (name, versionArgs, validate) {
|
|
console.log(name + ' tool:');
|
|
var toolPath = which(name);
|
|
if (!toolPath) {
|
|
fail(name + ' not found. might need to run npm install');
|
|
}
|
|
|
|
if (versionArgs) {
|
|
var result = exec(name + ' ' + versionArgs).stdout;
|
|
if (typeof validate == 'string') {
|
|
if (result.trim() != validate) {
|
|
fail('expected version: ' + validate);
|
|
}
|
|
}
|
|
else {
|
|
validate(result.trim());
|
|
}
|
|
}
|
|
|
|
console.log(toolPath + '');
|
|
}
|
|
exports.ensureTool = ensureTool;
|
|
|
|
var downloadFileAsync = async function (url) {
|
|
// validate parameters
|
|
if (!url) {
|
|
throw new Error('Parameter "url" must be set.');
|
|
}
|
|
|
|
// skip if already downloaded
|
|
const scrubbedUrl = url.replace(/[/\:?]/g, '_');
|
|
const targetPath = path.join(downloadPath, 'file', scrubbedUrl);
|
|
const marker = targetPath + '.completed';
|
|
if (test('-f', marker)) {
|
|
console.log('File already exists: ' + targetPath);
|
|
return targetPath;
|
|
}
|
|
|
|
console.log('Downloading file: ' + url);
|
|
// delete any previous partial attempt
|
|
if (test('-f', targetPath)) {
|
|
rm('-f', targetPath);
|
|
}
|
|
|
|
// download the file
|
|
mkdir('-p', path.join(downloadPath, 'file'));
|
|
|
|
const downloader = new Downloader({
|
|
url: url,
|
|
directory: path.join(downloadPath, 'file'),
|
|
fileName: scrubbedUrl
|
|
});
|
|
|
|
|
|
const { filePath } = await downloader.download(); // Downloader.download() resolves with some useful properties.
|
|
fs.writeFileSync(marker, '');
|
|
|
|
return filePath;
|
|
}
|
|
exports.downloadFileAsync = downloadFileAsync;
|
|
|
|
var downloadArchiveAsync = async function (url, omitExtensionCheck) {
|
|
// validate parameters
|
|
if (!url) {
|
|
throw new Error('Parameter "url" must be set.');
|
|
}
|
|
|
|
var isZip;
|
|
var isTargz;
|
|
if (omitExtensionCheck) {
|
|
isZip = true;
|
|
}
|
|
else {
|
|
if (url.match(/\.zip$/)) {
|
|
isZip = true;
|
|
}
|
|
else if (url.match(/\.tar\.gz$/) && (process.platform == 'darwin' || process.platform == 'linux')) {
|
|
isTargz = true;
|
|
}
|
|
else {
|
|
throw new Error('Unexpected archive extension');
|
|
}
|
|
}
|
|
|
|
// skip if already downloaded and extracted
|
|
var scrubbedUrl = url.replace(/[/\:?]/g, '_');
|
|
var targetPath = path.join(downloadPath, 'archive', scrubbedUrl);
|
|
var marker = targetPath + '.completed';
|
|
if (!test('-f', marker)) {
|
|
// download the archive
|
|
var archivePath = await downloadFileAsync(url);
|
|
console.log('Extracting archive: ' + url);
|
|
|
|
// delete any previously attempted extraction directory
|
|
if (test('-d', targetPath)) {
|
|
rm('-rf', targetPath);
|
|
}
|
|
|
|
// extract
|
|
mkdir('-p', targetPath);
|
|
if (isZip) {
|
|
if (process.platform == 'win32') {
|
|
let escapedFile = archivePath.replace(/'/g, "''").replace(/"|\n|\r/g, ''); // double-up single quotes, remove double quotes and newlines
|
|
let escapedDest = targetPath.replace(/'/g, "''").replace(/"|\n|\r/g, '');
|
|
|
|
let command = `$ErrorActionPreference = 'Stop' ; try { Add-Type -AssemblyName System.IO.Compression.FileSystem } catch { } ; [System.IO.Compression.ZipFile]::ExtractToDirectory('${escapedFile}', '${escapedDest}')`;
|
|
run(`powershell -Command "${command}"`);
|
|
} else {
|
|
run(`unzip ${archivePath} -d ${targetPath}`);
|
|
}
|
|
}
|
|
else if (isTargz) {
|
|
var originalCwd = process.cwd();
|
|
cd(targetPath);
|
|
try {
|
|
run(`tar -xzf "${archivePath}"`);
|
|
}
|
|
finally {
|
|
cd(originalCwd);
|
|
}
|
|
}
|
|
|
|
// write the completed marker
|
|
fs.writeFileSync(marker, '');
|
|
}
|
|
|
|
return targetPath;
|
|
}
|
|
exports.downloadArchiveAsync = downloadArchiveAsync;
|
|
|
|
var copyGroup = function (group, sourceRoot, destRoot) {
|
|
// example structure to copy a single file:
|
|
// {
|
|
// "source": "foo.dll"
|
|
// }
|
|
//
|
|
// example structure to copy an array of files/folders to a relative directory:
|
|
// {
|
|
// "source": [
|
|
// "foo.dll",
|
|
// "bar",
|
|
// ],
|
|
// "dest": "baz/",
|
|
// "options": "-R"
|
|
// }
|
|
//
|
|
// example to multiply the copy by .NET culture names supported by TFS:
|
|
// {
|
|
// "source": "<CULTURE_NAME>/foo.dll",
|
|
// "dest": "<CULTURE_NAME>/"
|
|
// }
|
|
//
|
|
|
|
// validate parameters
|
|
assert(group, 'group');
|
|
assert(group.source, 'group.source');
|
|
if (typeof group.source == 'object') {
|
|
assert(group.source.length, 'group.source.length');
|
|
group.source.forEach(function (s) {
|
|
assert(s, 'group.source[i]');
|
|
});
|
|
}
|
|
|
|
assert(sourceRoot, 'sourceRoot');
|
|
assert(destRoot, 'destRoot');
|
|
|
|
// multiply by culture name (recursive call to self)
|
|
if (group.dest && group.dest.indexOf('<CULTURE_NAME>') >= 0) {
|
|
cultureNames.forEach(function (cultureName) {
|
|
// culture names do not contain any JSON-special characters, so this is OK (albeit a hack)
|
|
var localizedGroupJson = JSON.stringify(group).replace(/<CULTURE_NAME>/g, cultureName);
|
|
copyGroup(JSON.parse(localizedGroupJson), sourceRoot, destRoot);
|
|
});
|
|
|
|
return;
|
|
}
|
|
|
|
// build the source array
|
|
var source = typeof group.source == 'string' ? [ group.source ] : group.source;
|
|
source = source.map(function (val) { // root the paths
|
|
return path.join(sourceRoot, val);
|
|
});
|
|
|
|
// create the destination directory
|
|
var dest = group.dest ? path.join(destRoot, group.dest) : destRoot + '/';
|
|
dest = path.normalize(dest);
|
|
mkdir('-p', dest);
|
|
|
|
// copy the files
|
|
if (group.hasOwnProperty('options') && group.options) {
|
|
cp(group.options, source, dest);
|
|
}
|
|
else {
|
|
cp(source, dest);
|
|
}
|
|
}
|
|
|
|
var removeGroup = function (group, pathRoot) {
|
|
// example structure to remove an array of files/folders:
|
|
// {
|
|
// "items": [
|
|
// "foo.dll",
|
|
// "bar",
|
|
// ],
|
|
// "options": "-R"
|
|
// }
|
|
|
|
// validate parameters
|
|
assert(group, 'group');
|
|
assert(group.items, 'group.items');
|
|
if (typeof group.items != 'object') {
|
|
throw new Error('Expected group.items to be an array');
|
|
} else {
|
|
assert(group.items.length, 'group.items.length');
|
|
group.items.forEach(function (p) {
|
|
assert(p, 'group.items[i]');
|
|
});
|
|
}
|
|
|
|
assert(group.options, 'group.options');
|
|
assert(pathRoot, 'pathRoot');
|
|
|
|
// build the rooted items array
|
|
var rootedItems = group.items.map(function (val) { // root the paths
|
|
return path.join(pathRoot, val);
|
|
});
|
|
|
|
// remove the items
|
|
rm(group.options, rootedItems);
|
|
}
|
|
|
|
var addPath = function (directory) {
|
|
var separator;
|
|
if (os.platform() == 'win32') {
|
|
separator = ';';
|
|
}
|
|
else {
|
|
separator = ':';
|
|
}
|
|
|
|
var existing = process.env['PATH'];
|
|
if (existing) {
|
|
process.env['PATH'] = directory + separator + existing;
|
|
}
|
|
else {
|
|
process.env['PATH'] = directory;
|
|
}
|
|
}
|
|
exports.addPath = addPath;
|
|
//------------------------------------------------------------------------------
|
|
// task.json functions
|
|
//------------------------------------------------------------------------------
|
|
var createResjson = function (task, taskPath) {
|
|
var resources = {};
|
|
if (task.hasOwnProperty('friendlyName')) {
|
|
resources['loc.friendlyName'] = task.friendlyName;
|
|
}
|
|
|
|
if (task.hasOwnProperty('helpMarkDown')) {
|
|
resources['loc.helpMarkDown'] = task.helpMarkDown;
|
|
}
|
|
|
|
if (task.hasOwnProperty('description')) {
|
|
resources['loc.description'] = task.description;
|
|
}
|
|
|
|
if (task.hasOwnProperty('instanceNameFormat')) {
|
|
resources['loc.instanceNameFormat'] = task.instanceNameFormat;
|
|
}
|
|
|
|
if (task.hasOwnProperty('groups')) {
|
|
task.groups.forEach(function (group) {
|
|
if (group.hasOwnProperty('name')) {
|
|
resources['loc.group.displayName.' + group.name] = group.displayName;
|
|
}
|
|
});
|
|
}
|
|
|
|
if (task.hasOwnProperty('inputs')) {
|
|
task.inputs.forEach(function (input) {
|
|
if (input.hasOwnProperty('name')) {
|
|
resources['loc.input.label.' + input.name] = input.label;
|
|
|
|
if (input.hasOwnProperty('helpMarkDown') && input.helpMarkDown) {
|
|
resources['loc.input.help.' + input.name] = input.helpMarkDown;
|
|
}
|
|
}
|
|
});
|
|
}
|
|
|
|
if (task.hasOwnProperty('messages')) {
|
|
Object.keys(task.messages).forEach(function (key) {
|
|
resources['loc.messages.' + key] = task.messages[key];
|
|
});
|
|
}
|
|
|
|
var resjsonPath = path.join(taskPath, 'Strings', 'resources.resjson', 'en-US', 'resources.resjson');
|
|
mkdir('-p', path.dirname(resjsonPath));
|
|
fs.writeFileSync(resjsonPath, JSON.stringify(resources, null, 2));
|
|
};
|
|
exports.createResjson = createResjson;
|
|
|
|
var createTaskLocJson = function (taskPath) {
|
|
var taskJsonPath = path.join(taskPath, 'task.json');
|
|
var taskLoc = JSON.parse(fs.readFileSync(taskJsonPath));
|
|
taskLoc.friendlyName = 'ms-resource:loc.friendlyName';
|
|
taskLoc.helpMarkDown = 'ms-resource:loc.helpMarkDown';
|
|
taskLoc.description = 'ms-resource:loc.description';
|
|
taskLoc.instanceNameFormat = 'ms-resource:loc.instanceNameFormat';
|
|
if (taskLoc.hasOwnProperty('groups')) {
|
|
taskLoc.groups.forEach(function (group) {
|
|
if (group.hasOwnProperty('name')) {
|
|
group.displayName = 'ms-resource:loc.group.displayName.' + group.name;
|
|
}
|
|
});
|
|
}
|
|
|
|
if (taskLoc.hasOwnProperty('inputs')) {
|
|
taskLoc.inputs.forEach(function (input) {
|
|
if (input.hasOwnProperty('name')) {
|
|
input.label = 'ms-resource:loc.input.label.' + input.name;
|
|
|
|
if (input.hasOwnProperty('helpMarkDown') && input.helpMarkDown) {
|
|
input.helpMarkDown = 'ms-resource:loc.input.help.' + input.name;
|
|
}
|
|
}
|
|
});
|
|
}
|
|
|
|
if (taskLoc.hasOwnProperty('messages')) {
|
|
Object.keys(taskLoc.messages).forEach(function (key) {
|
|
taskLoc.messages[key] = 'ms-resource:loc.messages.' + key;
|
|
});
|
|
}
|
|
|
|
fs.writeFileSync(path.join(taskPath, 'task.loc.json'), JSON.stringify(taskLoc, null, 2));
|
|
};
|
|
exports.createTaskLocJson = createTaskLocJson;
|
|
|
|
// Validates the structure of a task.json file.
|
|
var validateTask = function (task) {
|
|
if (!task.id || !check.isUUID(task.id)) {
|
|
fail('id is a required guid');
|
|
};
|
|
|
|
if (!task.name || !check.isAlphanumeric(task.name)) {
|
|
fail('name is a required alphanumeric string');
|
|
}
|
|
|
|
if (!task.friendlyName || !check.isLength(task.friendlyName, 1, 40)) {
|
|
fail('friendlyName is a required string <= 40 chars');
|
|
}
|
|
|
|
if (!task.instanceNameFormat) {
|
|
fail('instanceNameFormat is required');
|
|
}
|
|
};
|
|
exports.validateTask = validateTask;
|
|
//------------------------------------------------------------------------------
|
|
|
|
//------------------------------------------------------------------------------
|
|
// package functions
|
|
//------------------------------------------------------------------------------
|
|
var linkNonAggregatedLayoutContent = function (sourceRoot, destRoot, metadataOnly) {
|
|
assert(sourceRoot, 'sourceRoot');
|
|
assert(destRoot, 'destRoot');
|
|
var metadataFileNames = [ 'TASK.JSON', 'TASK.LOC.JSON', 'STRINGS', 'ICON.PNG' ];
|
|
// process each file/folder within the source root
|
|
fs.readdirSync(sourceRoot).forEach(function (itemName) {
|
|
var taskSourcePath = path.join(sourceRoot, itemName);
|
|
var taskDestPath = path.join(destRoot, itemName);
|
|
|
|
// skip the Common folder and skip files
|
|
if (itemName == 'Common' || !fs.statSync(taskSourcePath).isDirectory()) {
|
|
return;
|
|
}
|
|
|
|
mkdir('-p', taskDestPath);
|
|
|
|
// process each file/folder within each task folder
|
|
fs.readdirSync(taskSourcePath).forEach(function (itemName) {
|
|
// skip the Tests folder
|
|
if (itemName == 'Tests') {
|
|
return;
|
|
}
|
|
|
|
// when metadataOnly=true, skip non-metadata items
|
|
if (metadataOnly && metadataFileNames.indexOf(itemName.toUpperCase()) < 0) {
|
|
return;
|
|
}
|
|
|
|
// create a junction point for directories, hardlink files
|
|
var itemSourcePath = path.join(taskSourcePath, itemName);
|
|
var itemDestPath = path.join(taskDestPath, itemName);
|
|
if (fs.statSync(itemSourcePath).isDirectory()) {
|
|
fs.symlinkSync(itemSourcePath, itemDestPath, 'junction');
|
|
}
|
|
else {
|
|
fs.linkSync(itemSourcePath, itemDestPath);
|
|
}
|
|
});
|
|
});
|
|
}
|
|
|
|
var linkAggregatedLayoutContent = function (sourceRoot, destRoot, release, commit) {
|
|
assert(sourceRoot, 'sourceRoot');
|
|
assert(destRoot, 'destRoot');
|
|
assert(commit, 'commit');
|
|
console.log();
|
|
console.log(`> Linking ${path.basename(sourceRoot)}`);
|
|
mkdir('-p', destRoot);
|
|
|
|
// process each file/folder within the source root
|
|
fs.readdirSync(sourceRoot).forEach(function (itemName) {
|
|
// skip files
|
|
var taskSourcePath = path.join(sourceRoot, itemName);
|
|
if (!fs.statSync(taskSourcePath).isDirectory()) {
|
|
return;
|
|
}
|
|
|
|
// load the source task.json
|
|
var sourceTask = JSON.parse(fs.readFileSync(path.join(taskSourcePath, 'task.json')));
|
|
if (typeof sourceTask.version.Major != 'number' ||
|
|
typeof sourceTask.version.Minor != 'number' ||
|
|
typeof sourceTask.version.Patch != 'number') {
|
|
|
|
fail(`Expected task.version.Major/Minor/Patch to be numbers (${taskSourcePath})`);
|
|
}
|
|
|
|
// determine the dest folder based on the major version
|
|
var taskDestPath = path.join(destRoot, itemName + `__v${sourceTask.version.Major}`);
|
|
|
|
if (test('-e', taskDestPath)) {
|
|
// validate that a newer minor+patch does not exist in an older release
|
|
// (newer releases should be linked first)
|
|
var destTask = JSON.parse(fs.readFileSync(path.join(taskDestPath, 'task.json')));
|
|
var sourceVersion = `${sourceTask.version.Major}.${sourceTask.version.Minor}.${sourceTask.version.Patch}`;
|
|
var destVersion = `${destTask.version.Major}.${destTask.version.Minor}.${destTask.version.Patch}`;
|
|
if (semver.gt(sourceVersion, destVersion)) {
|
|
fail(`Expected minor+patch version for task already in the aggregate layout, to be greater or equal than non-aggregated layout being merged. Source task: ${taskSourcePath}`);
|
|
}
|
|
}
|
|
else {
|
|
// create a junction point
|
|
fs.symlinkSync(taskSourcePath, taskDestPath, 'junction');
|
|
|
|
// write a human-friendly metadata file
|
|
fs.writeFileSync(taskDestPath + (release ? `_m${release}` : '') + `_${commit}`, '');
|
|
}
|
|
});
|
|
}
|
|
|
|
var getNonAggregatedLayout = function (packagePath, release, commit) {
|
|
assert(packagePath, 'packagePath');
|
|
assert(release, 'release');
|
|
assert(commit, 'commit');
|
|
|
|
// validate the zip is in the cache
|
|
var localappdata = process.env.LOCALAPPDATA;
|
|
assert(localappdata, 'LOCALAPPDATA');
|
|
var zipPath = path.join(localappdata, 'vsts-tasks', `non-aggregated-tasks_m${release}_${commit}.zip`);
|
|
var markerPath = `${zipPath}.completed`;
|
|
if (!test('-f', markerPath)) {
|
|
fail(`Non-aggregated layout for m${release} (${commit}) not found in the cache. Publish the latest m${release} and then try again.`);
|
|
}
|
|
|
|
// extract
|
|
console.log();
|
|
console.log(`> Expanding ${path.basename(zipPath)}`);
|
|
var destPath = path.join(packagePath, `non-aggregated-layout-m${release}`);
|
|
run(`powershell.exe -NoLogo -Sta -NoProfile -NonInteractive -ExecutionPolicy Unrestricted -Command "& '${path.join(__dirname, 'Expand-Tasks.ps1')}' -ZipPath '${zipPath}' -TargetPath '${destPath}'"`, /*inheritStreams:*/false, /*noHeader*/true);
|
|
|
|
return destPath;
|
|
}
|
|
|
|
var getRefs = function () {
|
|
console.log();
|
|
console.log('> Getting branch/commit info')
|
|
var info = { };
|
|
var branch;
|
|
if (process.env.TF_BUILD) {
|
|
// during CI agent checks out a commit, not a branch.
|
|
// $(build.sourceBranch) indicates the branch name, e.g. refs/heads/releases/m108
|
|
branch = process.env.BUILD_SOURCEBRANCH;
|
|
}
|
|
else {
|
|
// assumes user has checked out a branch. this is a fairly safe assumption.
|
|
// this code only runs during "package" and "publish" build targets, which
|
|
// is not typically run locally.
|
|
branch = run('git symbolic-ref HEAD', /*inheritStreams*/false, /*noHeader*/true);
|
|
}
|
|
|
|
assert(branch, 'branch');
|
|
var commit = run('git rev-parse --short=8 HEAD', /*inheritStreams*/false, /*noHeader*/true);
|
|
var release;
|
|
if (branch.match(/^refs\/heads\/releases\/m[0-9]+$/)) {
|
|
release = parseInt(branch.split('/').pop().substr(1));
|
|
}
|
|
|
|
// get the ref info for HEAD
|
|
var info ={
|
|
head: {
|
|
branch: branch,
|
|
commit: commit,
|
|
release: release
|
|
},
|
|
releases: { }
|
|
};
|
|
|
|
// get the ref info for each release branch within range
|
|
run('git branch --list --remotes "origin/releases/m*"', /*inheritStreams*/false, /*noHeader*/true)
|
|
.split('\n')
|
|
.forEach(function (branch) {
|
|
branch = branch.trim();
|
|
if (!branch.match(/^origin\/releases\/m[0-9]+$/)) {
|
|
return;
|
|
}
|
|
|
|
var release = parseInt(branch.split('/').pop().substr(1));
|
|
|
|
// filter out releases less than 108 and greater than HEAD
|
|
if (release < 108 ||
|
|
release > (info.head.release || 999)) {
|
|
|
|
return;
|
|
}
|
|
|
|
branch = 'refs/remotes/' + branch;
|
|
var commit = run(`git rev-parse --short=8 "${branch}"`, /*inheritStreams*/false, /*noHeader*/true);
|
|
info.releases[release] = {
|
|
branch: branch,
|
|
commit: commit,
|
|
release: release
|
|
};
|
|
});
|
|
|
|
return info;
|
|
}
|
|
exports.getRefs = getRefs;
|
|
|
|
var compressTasks = function (sourceRoot, destPath, individually) {
|
|
assert(sourceRoot, 'sourceRoot');
|
|
assert(destPath, 'destPath');
|
|
run(`powershell.exe -NoLogo -Sta -NoProfile -NonInteractive -ExecutionPolicy Unrestricted -Command "& '${path.join(__dirname, 'Compress-Tasks.ps1')}' -SourceRoot '${sourceRoot}' -TargetPath '${destPath}' -Individually:${individually ? '$true' : '$false'}"`, /*inheritStreams:*/true, /*noHeader*/true);
|
|
}
|
|
exports.compressTasks = compressTasks;
|
|
|
|
var createNonAggregatedZip = function (buildPath, packagePath) {
|
|
assert(buildPath, 'buildPath');
|
|
assert(packagePath, 'packagePath');
|
|
|
|
// build the layout for the nested task zips
|
|
console.log();
|
|
console.log('> Linking content for nested task zips');
|
|
var nestedZipsContentPath = path.join(packagePath, 'nested-zips-layout');
|
|
linkNonAggregatedLayoutContent(buildPath, nestedZipsContentPath, /*metadataOnly*/false);
|
|
|
|
// create the nested task zips (part of the non-aggregated tasks layout)
|
|
console.log();
|
|
console.log('> Creating nested task zips (content for non-aggregated tasks layout)');
|
|
var nonAggregatedLayoutPath = path.join(packagePath, 'non-aggregated-layout');
|
|
compressTasks(nestedZipsContentPath, nonAggregatedLayoutPath, /*individually:*/true);
|
|
|
|
// link the task metadata into the non-aggregated tasks layout
|
|
console.log();
|
|
console.log('> Linking metadata content for non-aggregated tasks layout');
|
|
linkNonAggregatedLayoutContent(buildPath, nonAggregatedLayoutPath, /*metadataOnly*/true);
|
|
|
|
// mark the layout with a version number.
|
|
// servicing supports both this new format and the legacy layout format as well.
|
|
fs.writeFileSync(path.join(nonAggregatedLayoutPath, 'layout-version.txt'), '2');
|
|
|
|
// create the non-aggregated tasks zip
|
|
console.log();
|
|
console.log('> Zipping non-aggregated tasks layout');
|
|
var nonAggregatedZipPath = path.join(packagePath, 'non-aggregated-tasks.zip');
|
|
compressTasks(nonAggregatedLayoutPath, nonAggregatedZipPath);
|
|
}
|
|
exports.createNonAggregatedZip = createNonAggregatedZip;
|
|
|
|
var createAggregatedZip = function (packagePath) {
|
|
assert(packagePath, 'packagePath');
|
|
|
|
// get branch/commit info
|
|
var refs = getRefs();
|
|
|
|
// initialize the aggregated layout
|
|
// mark the layout with a version number.
|
|
// servicing supports both this new format and the legacy layout format as well.
|
|
console.log();
|
|
console.log('> Creating aggregated layout');
|
|
var aggregatedLayoutPath = path.join(packagePath, 'aggregated-layout');
|
|
mkdir('-p', aggregatedLayoutPath);
|
|
fs.writeFileSync(path.join(aggregatedLayoutPath, 'layout-version.txt'), '2');
|
|
|
|
// link the tasks from the non-aggregated layout into the aggregated layout
|
|
var nonAggregatedLayoutPath = path.join(packagePath, 'non-aggregated-layout');
|
|
linkAggregatedLayoutContent(nonAggregatedLayoutPath, aggregatedLayoutPath, /*release:*/'', /*commit:*/refs.head.commit);
|
|
|
|
// link the tasks from previous releases into the aggregated layout
|
|
Object.keys(refs.releases)
|
|
.sort()
|
|
.reverse()
|
|
.forEach(function (release) {
|
|
// skip the current release (already covered by current build)
|
|
if (release == refs.head.release) {
|
|
return;
|
|
}
|
|
|
|
var commit = refs.releases[release].commit;
|
|
var releaseLayout = getNonAggregatedLayout(packagePath, release, commit);
|
|
linkAggregatedLayoutContent(releaseLayout, aggregatedLayoutPath, /*release:*/release, /*commit:*/commit);
|
|
});
|
|
|
|
// validate task uniqueness within the layout based on task GUID + major version
|
|
var majorVersions = { };
|
|
fs.readdirSync(aggregatedLayoutPath) // walk each item in the aggregate layout
|
|
.forEach(function (itemName) {
|
|
var itemPath = path.join(aggregatedLayoutPath, itemName);
|
|
if (!fs.statSync(itemPath).isDirectory()) { // skip files
|
|
return;
|
|
}
|
|
|
|
// load the task.json
|
|
var taskPath = path.join(itemPath, 'task.json');
|
|
var task = JSON.parse(fs.readFileSync(taskPath));
|
|
if (typeof task.version.Major != 'number') {
|
|
fail(`Expected task.version.Major/Minor/Patch to be a number (${taskPath})`);
|
|
}
|
|
|
|
assert(task.id, `task.id (${taskPath})`);
|
|
if (typeof task.id != 'string') {
|
|
fail(`Expected id to be a string (${taskPath})`);
|
|
}
|
|
|
|
// validate GUID + Major version is unique
|
|
var key = task.id + task.version.Major;
|
|
if (majorVersions[key]) {
|
|
fail(`Tasks GUID + Major version must be unique within the aggregated layout. Task 1: ${majorVersions[key]}; task 2: ${taskPath}`);
|
|
}
|
|
|
|
majorVersions[key] = taskPath;
|
|
});
|
|
|
|
// create the aggregated tasks zip
|
|
console.log();
|
|
console.log('> Zipping aggregated tasks layout');
|
|
var aggregatedZipPath = path.join(packagePath, 'pack-source', 'contents', 'Microsoft.TeamFoundation.Build.Tasks.zip');
|
|
mkdir('-p', path.dirname(aggregatedZipPath));
|
|
compressTasks(aggregatedLayoutPath, aggregatedZipPath);
|
|
}
|
|
exports.createAggregatedZip = createAggregatedZip;
|
|
|
|
var storeNonAggregatedZip = function (zipPath, release, commit) {
|
|
assert(zipPath, 'zipPath');
|
|
ensureExists(zipPath);
|
|
assert(release, 'release');
|
|
assert(commit, 'commit');
|
|
|
|
console.log();
|
|
console.log(`> Storing non-aggregated zip (m${release} ${commit})`);
|
|
|
|
// determine the destination dir
|
|
var localappdata = process.env.LOCALAPPDATA;
|
|
assert(localappdata, 'LOCALAPPDATA');
|
|
var destDir = path.join(localappdata, 'vsts-tasks');
|
|
mkdir('-p', destDir);
|
|
|
|
// remove old packages for same release branch
|
|
rm(path.join(destDir, `non-aggregated-tasks_m${release}_*`))
|
|
|
|
// copy the zip
|
|
var destZip = path.join(destDir, `non-aggregated-tasks_m${release}_${commit}.zip`);
|
|
cp(zipPath, destZip);
|
|
|
|
// write the completed marker file
|
|
var destMarker = `${destZip}.completed`;
|
|
fs.writeFileSync(destMarker, '');
|
|
}
|
|
exports.storeNonAggregatedZip = storeNonAggregatedZip;
|
|
|
|
var installNode = async function (nodeVersion) {
|
|
const versions = {
|
|
20: 'v20.17.0',
|
|
16: 'v16.17.1',
|
|
14: 'v14.10.1',
|
|
10: 'v10.24.1',
|
|
6: 'v6.10.3',
|
|
5: 'v5.10.1',
|
|
};
|
|
|
|
if (!nodeVersion) {
|
|
nodeVersion = versions[6];
|
|
} else {
|
|
if (!versions[nodeVersion]) {
|
|
fail(`Unexpected node version '${nodeVersion}'. Supported versions: ${Object.keys(versions).join(', ')}`);
|
|
};
|
|
nodeVersion = versions[nodeVersion];
|
|
}
|
|
|
|
if (nodeVersion === run('node -v')) {
|
|
console.log('skipping node install for tests since correct version is running');
|
|
return;
|
|
}
|
|
|
|
// determine the platform
|
|
var platform = os.platform();
|
|
if (platform != 'darwin' && platform != 'linux' && platform != 'win32') {
|
|
throw new Error('Unexpected platform: ' + platform);
|
|
}
|
|
|
|
var nodeUrl = 'https://nodejs.org/dist';
|
|
switch (platform) {
|
|
case 'darwin':
|
|
var nodeArchivePath = await downloadArchiveAsync(nodeUrl + '/' + nodeVersion + '/node-' + nodeVersion + '-darwin-x64.tar.gz');
|
|
addPath(path.join(nodeArchivePath, 'node-' + nodeVersion + '-darwin-x64', 'bin'));
|
|
break;
|
|
case 'linux':
|
|
var nodeArchivePath = await downloadArchiveAsync(nodeUrl + '/' + nodeVersion + '/node-' + nodeVersion + '-linux-x64.tar.gz');
|
|
addPath(path.join(nodeArchivePath, 'node-' + nodeVersion + '-linux-x64', 'bin'));
|
|
break;
|
|
case 'win32':
|
|
var nodeDirectory = path.join(downloadPath, `node-${nodeVersion}`);
|
|
var marker = nodeDirectory + '.completed';
|
|
if (!test('-f', marker)) {
|
|
var nodeExePath = await downloadFileAsync(nodeUrl + '/' + nodeVersion + '/win-x64/node.exe');
|
|
var nodeLibPath = await downloadFileAsync(nodeUrl + '/' + nodeVersion + '/win-x64/node.lib');
|
|
rm('-Rf', nodeDirectory);
|
|
mkdir('-p', nodeDirectory);
|
|
cp(nodeExePath, path.join(nodeDirectory, 'node.exe'));
|
|
cp(nodeLibPath, path.join(nodeDirectory, 'node.lib'));
|
|
fs.writeFileSync(marker, '');
|
|
}
|
|
|
|
addPath(nodeDirectory);
|
|
break;
|
|
}
|
|
}
|
|
exports.installNode = installNode;
|
|
|
|
var getTaskNodeVersion = function(buildPath, taskName) {
|
|
let nodes = [];
|
|
var taskJsonPath = path.join(buildPath, taskName, "task.json");
|
|
if (!fs.existsSync(taskJsonPath)) {
|
|
console.warn('Unable to find task.json, defaulting to use Node 20');
|
|
nodes.push(20);
|
|
return nodes;
|
|
}
|
|
var taskJsonContents = fs.readFileSync(taskJsonPath, { encoding: 'utf-8' });
|
|
var taskJson = JSON.parse(taskJsonContents);
|
|
var execution = taskJson['execution'] || taskJson['prejobexecution'];
|
|
for (var key of Object.keys(execution)) {
|
|
const executor = key.toLocaleLowerCase();
|
|
if (!executor.startsWith('node')) continue;
|
|
|
|
const version = executor.replace('node', '');
|
|
nodes.push(parseInt(version) || 6);
|
|
}
|
|
|
|
nodes = nodes.filter( version => version > MOCHA_NODE_VERSION);
|
|
if (nodes.length) {
|
|
return nodes;
|
|
}
|
|
|
|
console.warn('Unable to determine execution type from task.json, defaulting to use Node 20');
|
|
nodes.push(20);
|
|
return nodes;
|
|
}
|
|
exports.getTaskNodeVersion = getTaskNodeVersion;
|
|
|
|
var toOverrideString = function(object) {
|
|
return JSON.stringify(object).replace(/"/g, '\\"');
|
|
}
|
|
|
|
exports.toOverrideString = toOverrideString;
|
|
|
|
var createExtension = function(manifest) {
|
|
ensureTool('tsc', '--version', `Version ${TSC_CURRENT_VERSION}`);
|
|
ensureTool('mocha', '--version', MOCHA_TARGET_VERSION);
|
|
|
|
matchRemove('**/Tests', path.join(__dirname, '_build/Tasks/'));
|
|
matchRemove('**/*.js.map', path.join(__dirname, '_build/Tasks/'));
|
|
|
|
console.log('Creating vsix...');
|
|
|
|
run(`node ./node_modules/tfx-cli/_build/app.js extension create --manifest-globs app-store-vsts-extension.json --override ` + toOverrideString(manifest));
|
|
}
|
|
|
|
exports.createExtension = createExtension;
|
|
|
|
//------------------------------------------------------------------------------
|