Add unit test coverage (#9)
* Migrate to new ado npm packages * Add unit tests around no key files or target folders found * Unit tests for running tasks from fork * progress on mocking univ package calls, linting ts * Unit test around successful cache restore * Unit test around save cache on cache restored * Unit tests for savecache on cache hit and mismatched key * Got mocha tests to debug * Unit tests passing for if savecache artifact tool err * Unit test for restore cache miss and permissions errors * Add vscode dir to repo * Remove hard typescript dep * Optionally save test results to file * Run unit tests in build def * Dont use gulp to run unit tests * Use mocha installed to node_modules for testing * Try gulp again for testing * Try test without publishing * Publish test results on failure * Run mocha directly * Use mocha from node_modules * Fix issue with test dep on full filepath * Isolate test from packaging * Fix malformed yml * Ugh. Fix more malformities * Use tfx-cli from node_modules * Remove empty build job * Use extension build task again * Demand tfx-cli be present * Try running packaging twice * Try running build on win * Avoid mocha path in test script * Run tests on mac and package on windows
This commit is contained in:
Родитель
f3f3a6b7d4
Коммит
ea4766905d
|
@ -1,6 +1,6 @@
|
|||
node_modules/
|
||||
_temp/
|
||||
.vscode
|
||||
# .vscode
|
||||
.DS_Store
|
||||
gulp-tsc-tmp-*
|
||||
.gulp-tsc-tmp-*
|
||||
|
|
|
@ -0,0 +1,34 @@
|
|||
{
|
||||
// Use IntelliSense to learn about possible attributes.
|
||||
// Hover to view descriptions of existing attributes.
|
||||
// For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387
|
||||
"version": "0.2.0",
|
||||
"configurations": [
|
||||
{
|
||||
"type": "node",
|
||||
"request": "launch",
|
||||
"name": "Mocha All",
|
||||
"program": "${workspaceFolder}/node_modules/mocha/bin/_mocha",
|
||||
"args": [
|
||||
"--require", "ts-node/register",
|
||||
"--timeout", "999999",
|
||||
"--colors", "--recursive",
|
||||
"${workspaceFolder}/Tasks/**/Tests/_suite.ts"
|
||||
],
|
||||
"internalConsoleOptions": "openOnSessionStart"
|
||||
},
|
||||
{
|
||||
"type": "node",
|
||||
"request": "launch",
|
||||
"name": "Mocha Current",
|
||||
"program": "${workspaceFolder}/node_modules/mocha/bin/_mocha",
|
||||
"args": [
|
||||
"--require", "ts-node/register",
|
||||
"--timeout", "999999",
|
||||
"--colors", "--recursive",
|
||||
"${relativeFile}"
|
||||
],
|
||||
"internalConsoleOptions": "openOnSessionStart"
|
||||
}
|
||||
]
|
||||
}
|
|
@ -1,12 +1,12 @@
|
|||
// Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
// Licensed under the MIT License.
|
||||
|
||||
import * as tl from "vsts-task-lib";
|
||||
let fs = require("fs");
|
||||
let os = require("os");
|
||||
import * as tl from "azure-pipelines-task-lib";
|
||||
const fs = require("fs");
|
||||
const os = require("os");
|
||||
import child = require("child_process");
|
||||
import stream = require("stream");
|
||||
import {IExecOptions, IExecSyncResult} from "vsts-task-lib/toolrunner";
|
||||
import {IExecOptions, IExecSyncResult} from "azure-pipelines-task-lib/toolrunner";
|
||||
|
||||
export interface IArtifactToolOptions {
|
||||
artifactToolPath: string;
|
||||
|
@ -17,21 +17,21 @@ export interface IArtifactToolOptions {
|
|||
publishedPackageVar: string;
|
||||
}
|
||||
|
||||
export function getOptions(): IExecOptions{
|
||||
let result: IExecOptions = <IExecOptions>{
|
||||
export function getOptions(): IExecOptions {
|
||||
const result: IExecOptions = {
|
||||
cwd: process.cwd(),
|
||||
env: Object.assign({}, process.env),
|
||||
silent: false,
|
||||
failOnStdErr: false,
|
||||
ignoreReturnCode: false,
|
||||
windowsVerbatimArguments: false
|
||||
};
|
||||
windowsVerbatimArguments: false,
|
||||
} as IExecOptions;
|
||||
result.outStream = process.stdout as stream.Writable;
|
||||
result.errStream = process.stderr as stream.Writable;
|
||||
return result;
|
||||
}
|
||||
|
||||
function getCommandString(toolPath: string, command: string[]){
|
||||
function getCommandString(toolPath: string, command: string[]) {
|
||||
let cmd: string = toolPath;
|
||||
command.forEach((a: string): void => {
|
||||
cmd += ` ${a}`;
|
||||
|
@ -39,19 +39,18 @@ function getCommandString(toolPath: string, command: string[]){
|
|||
return cmd;
|
||||
}
|
||||
|
||||
export function runArtifactTool(artifactToolPath: string, command: string[], execOptions: IExecOptions): IExecSyncResult{
|
||||
export function runArtifactTool(artifactToolPath: string, command: string[], execOptions: IExecOptions): IExecSyncResult {
|
||||
|
||||
if (tl.osType() === "Windows_NT" || artifactToolPath.trim().toLowerCase().endsWith(".exe")) {
|
||||
return tl.execSync(artifactToolPath, command, execOptions);
|
||||
}
|
||||
else{
|
||||
} else {
|
||||
fs.chmodSync(artifactToolPath, "755");
|
||||
|
||||
if (!execOptions.silent) {
|
||||
execOptions.outStream.write(getCommandString(artifactToolPath, command) + os.EOL);
|
||||
}
|
||||
|
||||
let result = child.spawnSync(artifactToolPath, command, execOptions);
|
||||
const result = child.spawnSync(artifactToolPath, command, execOptions);
|
||||
|
||||
if (!execOptions.silent && result.stdout && result.stdout.length > 0) {
|
||||
execOptions.outStream.write(result.stdout);
|
||||
|
@ -61,7 +60,7 @@ export function runArtifactTool(artifactToolPath: string, command: string[], exe
|
|||
execOptions.errStream.write(result.stderr);
|
||||
}
|
||||
|
||||
let res: IExecSyncResult = <IExecSyncResult>{ code: result.status, error: result.error };
|
||||
const res: IExecSyncResult = { code: result.status, error: result.error } as IExecSyncResult;
|
||||
res.stdout = (result.stdout) ? result.stdout.toString() : null;
|
||||
res.stderr = (result.stderr) ? result.stderr.toString() : null;
|
||||
return res;
|
|
@ -6,13 +6,13 @@ import AdmZip = require('adm-zip');
|
|||
import os = require("os");
|
||||
import * as path from "path";
|
||||
import * as semver from "semver";
|
||||
import * as pkgLocationUtils from "../locationUtilities";
|
||||
import * as tl from "vsts-task-lib";
|
||||
import * as toollib from "vsts-task-tool-lib/tool";
|
||||
import * as pkgLocationUtils from "./locationUtilities";
|
||||
import * as tl from "azure-pipelines-task-lib";
|
||||
import * as toollib from "azure-pipelines-tool-lib/tool";
|
||||
|
||||
export function getArtifactToolLocation(dirName: string): string {
|
||||
let toolPath: string = path.join(dirName, "ArtifactTool.exe");
|
||||
if (tl.osType() !== "Windows_NT"){
|
||||
if (tl.osType() !== "Windows_NT") {
|
||||
toolPath = path.join(dirName, "artifacttool");
|
||||
}
|
||||
return toolPath;
|
||||
|
@ -31,14 +31,13 @@ export async function extractZip(file: string): Promise<string> {
|
|||
if (!file) {
|
||||
throw new Error("parameter 'file' is required");
|
||||
}
|
||||
let dest = _createExtractFolder();
|
||||
let zip = new AdmZip(file);
|
||||
const dest = _createExtractFolder();
|
||||
const zip = new AdmZip(file);
|
||||
zip.extractAllTo(dest, true);
|
||||
return dest;
|
||||
}
|
||||
|
||||
export async function getArtifactToolFromService(serviceUri: string, accessToken: string, toolName: string){
|
||||
|
||||
export async function getArtifactToolFromService(serviceUri: string, accessToken: string, toolName: string) {
|
||||
const overrideArtifactToolPath = tl.getVariable("UPack.OverrideArtifactToolPath");
|
||||
if (overrideArtifactToolPath != null) {
|
||||
return getArtifactToolLocation(overrideArtifactToolPath);
|
||||
|
@ -46,10 +45,10 @@ export async function getArtifactToolFromService(serviceUri: string, accessToken
|
|||
|
||||
let osName = tl.osType();
|
||||
let arch = os.arch();
|
||||
if(osName === "Windows_NT"){
|
||||
if (osName === "Windows_NT") {
|
||||
osName = "windows";
|
||||
}
|
||||
if (arch === "x64"){
|
||||
if (arch === "x64") {
|
||||
arch = "amd64";
|
||||
}
|
||||
|
||||
|
@ -59,13 +58,13 @@ export async function getArtifactToolFromService(serviceUri: string, accessToken
|
|||
|
||||
const blobstoreConnection = pkgLocationUtils.getWebApiWithProxy(serviceUri, accessToken);
|
||||
|
||||
try{
|
||||
try {
|
||||
const artifactToolGetUrl = await blobstoreConnection.vsoClient.getVersioningData(ApiVersion,
|
||||
blobstoreAreaName, blobstoreAreaId, { toolName }, {osName, arch});
|
||||
|
||||
const artifactToolUri = await blobstoreConnection.rest.get(artifactToolGetUrl.requestUrl);
|
||||
|
||||
if (artifactToolUri.statusCode !== 200){
|
||||
if (artifactToolUri.statusCode !== 200) {
|
||||
tl.debug(tl.loc("Error_UnexpectedErrorFailedToGetToolMetadata", artifactToolUri.toString()));
|
||||
throw new Error(tl.loc("Error_UnexpectedErrorFailedToGetToolMetadata", artifactToolGetUrl.requestUrl));
|
||||
}
|
||||
|
@ -80,20 +79,20 @@ export async function getArtifactToolFromService(serviceUri: string, accessToken
|
|||
const unzippedToolsDir = await extractZip(zippedToolsDir);
|
||||
|
||||
artifactToolPath = await toollib.cacheDir(unzippedToolsDir, "ArtifactTool", artifactToolUri.result['version']);
|
||||
}
|
||||
else{
|
||||
} else {
|
||||
tl.debug(tl.loc("Info_ResolvedToolFromCache", artifactToolPath));
|
||||
}
|
||||
return getArtifactToolLocation(artifactToolPath);
|
||||
}
|
||||
catch(err){
|
||||
tl.error(err);
|
||||
tl.setResult(tl.TaskResult.Failed, tl.loc("FailedToGetArtifactTool", err));
|
||||
} catch (err) {
|
||||
tl.warning(err);
|
||||
// TODO: Should return null?
|
||||
// tl.setResult(tl.TaskResult.Failed, tl.loc("FailedToGetArtifactTool", err));
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
export function getVersionUtility(versionRadio: string, highestVersion: string): string {
|
||||
switch(versionRadio) {
|
||||
switch (versionRadio) {
|
||||
case "patch":
|
||||
return semver.inc(highestVersion, "patch");
|
||||
case "minor":
|
||||
|
@ -114,24 +113,23 @@ export async function getPackageNameFromId(serviceUri: string, accessToken: stri
|
|||
|
||||
// Getting url for feeds version API
|
||||
const packageUrl = await new Promise<string>((resolve, reject) => {
|
||||
let getVersioningDataPromise = feedConnection.vsoClient.getVersioningData(ApiVersion, PackagingAreaName, PackageAreaId, { feedId, packageId });
|
||||
getVersioningDataPromise.then((result) => {
|
||||
const getVersioningDataPromise = feedConnection.vsoClient.getVersioningData(ApiVersion, PackagingAreaName, PackageAreaId, { feedId, packageId });
|
||||
getVersioningDataPromise.then(result => {
|
||||
return resolve(result.requestUrl);
|
||||
});
|
||||
getVersioningDataPromise.catch((error) => {
|
||||
getVersioningDataPromise.catch(error => {
|
||||
return reject(error);
|
||||
});
|
||||
});
|
||||
|
||||
// Return the user input incase of failure
|
||||
try{
|
||||
try {
|
||||
const response = await feedConnection.rest.get(packageUrl);
|
||||
if(response.statusCode === 200 && response.result['name']){
|
||||
if (response.statusCode === 200 && response.result['name']) {
|
||||
return response.result['name'];
|
||||
}
|
||||
return packageId;
|
||||
}
|
||||
catch(err){
|
||||
} catch (err) {
|
||||
return packageId;
|
||||
}
|
||||
}
|
||||
|
@ -145,30 +143,29 @@ export async function getHighestPackageVersionFromFeed(serviceUri: string, acces
|
|||
|
||||
// Getting url for feeds version API
|
||||
const packageUrl = await new Promise<string>((resolve, reject) => {
|
||||
var getVersioningDataPromise = feedConnection.vsoClient.getVersioningData(ApiVersion, PackagingAreaName, PackageAreaId, { feedId }, {packageNameQuery: packageName, protocolType: "upack", includeDeleted: "true", includeUrls: "false"});
|
||||
getVersioningDataPromise.then((result) => {
|
||||
const getVersioningDataPromise = feedConnection.vsoClient.getVersioningData(ApiVersion, PackagingAreaName, PackageAreaId, { feedId }, {packageNameQuery: packageName, protocolType: "upack", includeDeleted: "true", includeUrls: "false"});
|
||||
getVersioningDataPromise.then(result => {
|
||||
return resolve(result.requestUrl);
|
||||
});
|
||||
getVersioningDataPromise.catch((error) => {
|
||||
getVersioningDataPromise.catch(error => {
|
||||
return reject(error);
|
||||
});
|
||||
});
|
||||
|
||||
const versionResponse = await new Promise<string>((resolve, reject) => {
|
||||
let responsePromise = feedConnection.rest.get(packageUrl);
|
||||
responsePromise.then((result) => {
|
||||
if (result.result['count'] === 0){
|
||||
const responsePromise = feedConnection.rest.get(packageUrl);
|
||||
responsePromise.then(result => {
|
||||
if (result.result['count'] === 0) {
|
||||
return resolve("0.0.0");
|
||||
}
|
||||
else{
|
||||
result.result['value'].forEach((element) => {
|
||||
if (element.name === packageName.toLowerCase()){
|
||||
} else {
|
||||
result.result['value'].forEach(element => {
|
||||
if (element.name === packageName.toLowerCase()) {
|
||||
return resolve(element.versions[0].version);
|
||||
}
|
||||
});
|
||||
}
|
||||
});
|
||||
responsePromise.catch((error) => {
|
||||
responsePromise.catch(error => {
|
||||
return reject(error);
|
||||
});
|
||||
});
|
|
@ -0,0 +1,51 @@
|
|||
import tmrm = require("azure-pipelines-task-lib/mock-run");
|
||||
import { TaskLibAnswerExecResult} from "azure-pipelines-task-lib/mock-answer";
|
||||
|
||||
export function registerArtifactToolUtilitiesMock(
|
||||
tmr: tmrm.TaskMockRunner,
|
||||
toolPath: string
|
||||
) {
|
||||
const artifactToolMocks = {
|
||||
getArtifactToolFromService(serviceUri, accessToken, toolName) {
|
||||
return toolPath;
|
||||
},
|
||||
getPackageNameFromId(
|
||||
serviceUri: string,
|
||||
accessToken: string,
|
||||
feedId: string,
|
||||
packageId: string
|
||||
) {
|
||||
return packageId;
|
||||
},
|
||||
};
|
||||
tmr.registerMock("packaging-common/ArtifactToolUtilities", artifactToolMocks);
|
||||
tmr.registerMock("../ArtifactToolUtilities", artifactToolMocks);
|
||||
}
|
||||
|
||||
export function registerArtifactToolRunnerMock(tmr: tmrm.TaskMockRunner) {
|
||||
const mtt = require("azure-pipelines-task-lib/mock-toolrunner");
|
||||
const artifactToolMocks = {
|
||||
getOptions() {
|
||||
return {
|
||||
cwd: process.cwd(),
|
||||
env: Object.assign({}, process.env),
|
||||
silent: false,
|
||||
failOnStdErr: false,
|
||||
ignoreReturnCode: false,
|
||||
windowsVerbatimArguments: false,
|
||||
};
|
||||
},
|
||||
runArtifactTool(
|
||||
artifactToolPath: string,
|
||||
command: string[],
|
||||
execOptions
|
||||
) {
|
||||
const tr = new mtt.ToolRunner(artifactToolPath);
|
||||
tr.arg(command);
|
||||
return tr.execSync(execOptions);
|
||||
},
|
||||
};
|
||||
|
||||
tmr.registerMock("packaging-common/ArtifactToolRunner", artifactToolMocks);
|
||||
tmr.registerMock("../ArtifactToolRunner", artifactToolMocks);
|
||||
}
|
|
@ -0,0 +1,62 @@
|
|||
import tmrm = require("azure-pipelines-task-lib/mock-run");
|
||||
|
||||
export function registerLocationHelpersMock(tmr: tmrm.TaskMockRunner) {
|
||||
const mockLocationUtils = {
|
||||
getFeedUriFromBaseServiceUri(
|
||||
serviceUri: string,
|
||||
accesstoken: string
|
||||
) {
|
||||
return serviceUri + "/feed";
|
||||
},
|
||||
getBlobstoreUriFromBaseServiceUri(
|
||||
serviceUri: string,
|
||||
accesstoken: string
|
||||
) {
|
||||
return serviceUri + "/blobstore";
|
||||
},
|
||||
getPackagingUris(input) {
|
||||
const collectionUrl: string = "https://vsts/packagesource";
|
||||
return {
|
||||
PackagingUris: [collectionUrl],
|
||||
DefaultPackagingUri: collectionUrl,
|
||||
};
|
||||
},
|
||||
getWebApiWithProxy(serviceUri: string, accessToken?: string) {
|
||||
return {
|
||||
vsoClient: {
|
||||
async getVersioningData(
|
||||
ApiVersion: string,
|
||||
PackagingAreaName: string,
|
||||
PackageAreaId: string,
|
||||
Obj
|
||||
) {
|
||||
return { requestUrl: "foobar" };
|
||||
},
|
||||
},
|
||||
};
|
||||
},
|
||||
|
||||
getSystemAccessToken() {
|
||||
return "token";
|
||||
},
|
||||
|
||||
getFeedRegistryUrl(
|
||||
packagingUrl: string,
|
||||
registryType,
|
||||
feedId: string,
|
||||
accessToken?: string
|
||||
) {
|
||||
return packagingUrl + "/" + feedId;
|
||||
},
|
||||
ProtocolType: { NuGet: 1, Npm: 2, Maven: 3, PyPi: 4 },
|
||||
RegistryType: {
|
||||
npm: 1,
|
||||
NuGetV2: 2,
|
||||
NuGetV3: 3,
|
||||
PyPiSimple: 4,
|
||||
PyPiUpload: 5,
|
||||
},
|
||||
};
|
||||
tmr.registerMock('packaging-common/locationUtilities', mockLocationUtils);
|
||||
tmr.registerMock("../locationUtilities", mockLocationUtils);
|
||||
}
|
|
@ -0,0 +1,59 @@
|
|||
import {
|
||||
TaskLibAnswers,
|
||||
TaskLibAnswerExecResult
|
||||
} from "azure-pipelines-task-lib/mock-answer";
|
||||
import tmrm = require("azure-pipelines-task-lib/mock-run");
|
||||
import * as pkgMock from "./MockHelper";
|
||||
import * as artMock from "./ArtifactToolMockHelper";
|
||||
|
||||
export class UniversalMockHelper {
|
||||
constructor(
|
||||
private tmr: tmrm.TaskMockRunner,
|
||||
private answers: TaskLibAnswers,
|
||||
private artifactToolCmd: string
|
||||
) {
|
||||
this.tmr.setInput("verbosity", "verbose");
|
||||
this.tmr.setInput('feedlist', 'node-package-feed');
|
||||
|
||||
process.env.AGENT_HOMEDIRECTORY = "/users/home/directory";
|
||||
(process.env.BUILD_SOURCESDIRECTORY = "/users/home/sources"),
|
||||
process.env.SYSTEM_SERVERTYPE = "hosted";
|
||||
process.env.BUILD_DEFINITIONNAME = "build definition 1";
|
||||
(process.env.ENDPOINT_AUTH_SYSTEMVSSCONNECTION =
|
||||
'{"parameters":{"AccessToken":"token"},"scheme":"OAuth"}');
|
||||
process.env.ENDPOINT_URL_SYSTEMVSSCONNECTION =
|
||||
"https://example.visualstudio.com/defaultcollection";
|
||||
process.env.SYSTEM_DEFAULTWORKINGDIRECTORY = "/users/home/directory";
|
||||
process.env.SYSTEM_TEAMFOUNDATIONCOLLECTIONURI =
|
||||
"https://example.visualstudio.com/defaultcollection";
|
||||
|
||||
artMock.registerArtifactToolUtilitiesMock(
|
||||
tmr,
|
||||
this.artifactToolCmd
|
||||
);
|
||||
artMock.registerArtifactToolRunnerMock(this.tmr);
|
||||
pkgMock.registerLocationHelpersMock(tmr);
|
||||
}
|
||||
|
||||
public mockUniversalCommand(
|
||||
command: string,
|
||||
feed: string,
|
||||
packageName: string,
|
||||
packageVersion: string,
|
||||
path: string,
|
||||
result: TaskLibAnswerExecResult,
|
||||
service?: string
|
||||
) {
|
||||
if (!service) {
|
||||
service = "https://example.visualstudio.com/defaultcollection";
|
||||
}
|
||||
console.log(`${
|
||||
this.artifactToolCmd
|
||||
} universal ${command} --feed ${feed} --service ${service} --package-name ${packageName} --package-version ${packageVersion} --path ${path} --patvar UNIVERSAL_${command.toUpperCase()}_PAT --verbosity verbose`);
|
||||
this.answers.exec[
|
||||
`${
|
||||
this.artifactToolCmd
|
||||
} universal ${command} --feed ${feed} --service ${service} --package-name ${packageName} --package-version ${packageVersion} --path ${path} --patvar UNIVERSAL_${command.toUpperCase()}_PAT --verbosity verbose`
|
||||
] = result;
|
||||
}
|
||||
}
|
|
@ -1,7 +1,7 @@
|
|||
// Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
// Licensed under the MIT License.
|
||||
|
||||
import * as tl from "vsts-task-lib";
|
||||
import * as tl from "azure-pipelines-task-lib";
|
||||
|
||||
export interface IPackageSource {
|
||||
accountUrl: string;
|
||||
|
|
|
@ -1,130 +1,155 @@
|
|||
// Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
// Licensed under the MIT License.
|
||||
|
||||
import crypto = require('crypto');
|
||||
import fs = require('fs');
|
||||
import path = require('path');
|
||||
import shell = require('shelljs');
|
||||
import tl = require('vsts-task-lib/task');
|
||||
import crypto = require("crypto");
|
||||
import fs = require("fs");
|
||||
import path = require("path");
|
||||
import shell = require("shelljs");
|
||||
import tl = require("azure-pipelines-task-lib/task");
|
||||
|
||||
import { UniversalPackages } from './universalPackages';
|
||||
import { UniversalPackages } from "./universalPackages";
|
||||
const universalPackages = new UniversalPackages();
|
||||
|
||||
const isWin = process.platform === 'win32';
|
||||
const isWin = process.platform === "win32";
|
||||
const salt = 2;
|
||||
|
||||
export class cacheUtilities {
|
||||
hashFiles = function (files: string[]): string {
|
||||
public hashFiles = function(files: string[]): string {
|
||||
let contents: string = "";
|
||||
files = files.sort();
|
||||
|
||||
files.forEach(file => {
|
||||
let filePath = path.resolve(file);
|
||||
contents += fs.readFileSync(filePath, 'utf8');
|
||||
const filePath = tl.resolve(file);
|
||||
contents += fs.readFileSync(filePath, "utf8");
|
||||
});
|
||||
|
||||
contents = contents.replace(/(\r|\n)/gm, "");
|
||||
contents += salt.toString();
|
||||
|
||||
let hash = `${process.platform}-${crypto.createHash('sha256').update(contents).digest('hex')}`;
|
||||
const hash = `${process.platform}-${crypto
|
||||
.createHash("sha256")
|
||||
.update(contents)
|
||||
.digest("hex")}`;
|
||||
return hash;
|
||||
}
|
||||
};
|
||||
|
||||
downloadCaches = async function (files: string[], destinationFolder: string) {
|
||||
public downloadCaches = async function(files: string[], destinationFolder: string) {
|
||||
const hash: string = await this.hashFiles(files);
|
||||
|
||||
// Make our working folder
|
||||
let tmp_cache = path.join(tl.getVariable('System.DefaultWorkingDirectory') || process.cwd(), 'tmp_cache');
|
||||
let tarballPath = path.join(tmp_cache, hash + '.tar.gz');
|
||||
const tmp_cache = path.join(
|
||||
tl.getVariable("System.DefaultWorkingDirectory") || process.cwd(),
|
||||
"tmp_cache"
|
||||
);
|
||||
let tarballPath = path.join(tmp_cache, hash + ".tar.gz");
|
||||
tl.mkdirP(tmp_cache);
|
||||
|
||||
// Convert to unix path
|
||||
if (isWin) {
|
||||
destinationFolder = '/' + destinationFolder.replace(":", "").replace(/\\/g, '/');
|
||||
tarballPath = '/' + tarballPath.replace(":", "").replace(/\\/g, '/');
|
||||
destinationFolder =
|
||||
"/" + destinationFolder.replace(":", "").replace(/\\/g, "/");
|
||||
tarballPath = "/" + tarballPath.replace(":", "").replace(/\\/g, "/");
|
||||
}
|
||||
|
||||
try {
|
||||
let result = await universalPackages.download(hash, tmp_cache);
|
||||
const result = await universalPackages.download(hash, tmp_cache);
|
||||
|
||||
// Check if blob exists
|
||||
if (result) {
|
||||
if (!result.toolRan) {
|
||||
tl.warning("Issue running universal packages tools");
|
||||
} else if (result.success) {
|
||||
try {
|
||||
shell.exec(`tar -xzf ${tarballPath} -C "${destinationFolder}"`);
|
||||
|
||||
// Set variable to track whether or not we downloaded cache (i.e. it already existed)
|
||||
tl.setVariable(hash, 'true');
|
||||
tl.setVariable('CacheRestored', 'true');
|
||||
tl.setVariable(hash, "true");
|
||||
tl.setVariable("CacheRestored", "true");
|
||||
return;
|
||||
|
||||
} catch (err) {
|
||||
console.log(err);
|
||||
}
|
||||
} else {
|
||||
console.log('Cache miss: ', hash);
|
||||
tl.setVariable('CacheRestored', 'false');
|
||||
tl.setVariable(hash, 'false');
|
||||
console.log("Cache miss: ", hash);
|
||||
tl.setVariable("CacheRestored", "false");
|
||||
tl.setVariable(hash, "false");
|
||||
}
|
||||
|
||||
} catch (err) {
|
||||
console.log(err);
|
||||
}
|
||||
|
||||
tl.rmRF(tmp_cache);
|
||||
}
|
||||
};
|
||||
|
||||
uploadCaches = async function (keyFiles: string[], targetFolders: string[]) {
|
||||
if (targetFolders.length == 0) {
|
||||
console.log('Issue: no artifacts specified to cache');
|
||||
public uploadCaches = async function(keyFiles: string[], targetFolders: string[]) {
|
||||
if (targetFolders.length === 0) {
|
||||
console.log("Issue: no artifacts specified to cache");
|
||||
return;
|
||||
}
|
||||
|
||||
const hash: string = await this.hashFiles(keyFiles);
|
||||
|
||||
// If we downloaded from a cached archive, no need to regenerate archive
|
||||
let status = tl.getVariable(hash);
|
||||
if (status === 'true') {
|
||||
console.log('Cache entry already exists for: ', hash);
|
||||
const status = tl.getVariable(hash);
|
||||
// const status = process.env[hash];
|
||||
if (status === "true") {
|
||||
console.log("Cache entry already exists for: ", hash);
|
||||
return;
|
||||
}
|
||||
// If hash was not around during the restorecache step, we assume it was produced during build
|
||||
if (status === undefined) {
|
||||
console.log('Not caching artifact produced during build: ', hash);
|
||||
tl.setResult(tl.TaskResult.Skipped, `Not caching artifact produced during build: ${hash}`);
|
||||
return;
|
||||
}
|
||||
|
||||
console.log('Creating cache entry for: ', hash);
|
||||
console.log("Creating cache entry for: ", hash);
|
||||
|
||||
// Make our working folder
|
||||
let tmp_cache = path.join(tl.getVariable('System.DefaultWorkingDirectory') || process.cwd(), 'tmp_cache');
|
||||
const tmp_cache = path.join(
|
||||
tl.getVariable("System.DefaultWorkingDirectory") || process.cwd(),
|
||||
"tmp_cache"
|
||||
);
|
||||
tl.mkdirP(tmp_cache);
|
||||
|
||||
// Create tar archive
|
||||
let tarballParentDir = path.join(tmp_cache, '..');
|
||||
let tarballPath = path.join(tmp_cache, hash + '.tar.gz');
|
||||
let tarballParentDir = path.join(tmp_cache, "..");
|
||||
let tarballPath = path.join(tmp_cache, hash + ".tar.gz");
|
||||
|
||||
// ensure exists
|
||||
if (!fs.existsSync(tmp_cache)) {
|
||||
console.log('Artifact directory does not exist: ', tmp_cache)
|
||||
if (!tl.exist(tmp_cache)) {
|
||||
console.log("Artifact directory does not exist: ", tmp_cache);
|
||||
return;
|
||||
}
|
||||
|
||||
if (isWin) {
|
||||
tarballParentDir = '/' + tarballParentDir.replace(":", "").replace(/\\/g, "/");
|
||||
tarballPath = '/' + tarballPath.replace(":", "").replace(/\\/g, "/");
|
||||
tarballParentDir =
|
||||
"/" + tarballParentDir.replace(":", "").replace(/\\/g, "/");
|
||||
tarballPath = "/" + tarballPath.replace(":", "").replace(/\\/g, "/");
|
||||
targetFolders = targetFolders.map(folder => folder.replace(/\\/g, "/"));
|
||||
}
|
||||
|
||||
try {
|
||||
let { stderr: error } = shell.exec(`tar -C "${tarballParentDir}" -czf "${tarballPath}" ${targetFolders.map(t => `\"${t}\"`).join(' ')}`, {silent: true});
|
||||
|
||||
const { stderr: error } = shell.exec(
|
||||
`tar -C "${tarballParentDir}" -czf "${tarballPath}" ${targetFolders
|
||||
.map(t => `\"${t}\"`)
|
||||
.join(" ")}`,
|
||||
{ silent: true }
|
||||
);
|
||||
|
||||
if (error) {
|
||||
console.log(`Issue creating tarball:\n ${error}`);
|
||||
} else {
|
||||
console.log(`Tarball created:\n ${tarballPath}`);
|
||||
|
||||
|
||||
// Upload universal package
|
||||
await universalPackages.publish(hash, tmp_cache);
|
||||
const result = await universalPackages.publish(hash, tmp_cache);
|
||||
|
||||
if (!result.toolRan) {
|
||||
tl.warning("Issue running universal packages tools");
|
||||
} else if (result.success) {
|
||||
console.log("Cache successfully saved");
|
||||
} else {
|
||||
tl.warning("Cache unsuccessfully saved. Find more information in logs above");
|
||||
}
|
||||
|
||||
}
|
||||
} catch (err) {
|
||||
console.log(err);
|
||||
|
@ -132,91 +157,137 @@ export class cacheUtilities {
|
|||
|
||||
// Delete tmp directory
|
||||
tl.rmRF(tmp_cache);
|
||||
}
|
||||
};
|
||||
|
||||
restoreCache = async function () {
|
||||
public restoreCache = async function() {
|
||||
try {
|
||||
let buildStatus = tl.getVariable('Agent.JobStatus');
|
||||
let buildStatus = tl.getVariable("Agent.JobStatus");
|
||||
if (buildStatus) {
|
||||
buildStatus = buildStatus.toLowerCase();
|
||||
if (buildStatus != 'succeeded' && buildStatus != 'succeededwithissues') {
|
||||
tl.debug('Bailing out from building artifacts due to previously unsuccessful task');
|
||||
if (
|
||||
buildStatus !== "succeeded" &&
|
||||
buildStatus !== "succeededwithissues"
|
||||
) {
|
||||
tl.debug(
|
||||
"Bailing out from building artifacts due to previously unsuccessful task"
|
||||
);
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
let isFork = tl.getVariable('System.PullRequest.IsFork') || 'undefined';
|
||||
const isFork = tl.getVariable("System.PullRequest.IsFork") || "undefined";
|
||||
|
||||
if (isFork.toLowerCase() == 'true') {
|
||||
console.log('Caches are not restored for forked repositories');
|
||||
if (isFork.toLowerCase() === "true") {
|
||||
tl.setResult(
|
||||
tl.TaskResult.Skipped,
|
||||
"Caches are not restored for forked repositories."
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
const patterns = tl.getInput('keyFile', true).split(/,[ ]*/g);
|
||||
const patterns = tl.getInput("keyFile", true).split(/,[ ]*/g);
|
||||
|
||||
const findOptions = <tl.FindOptions>{
|
||||
const findOptions = {
|
||||
allowBrokenSymbolicLinks: false,
|
||||
followSpecifiedSymbolicLink: false,
|
||||
followSymbolicLinks: false
|
||||
};
|
||||
followSymbolicLinks: false,
|
||||
} as tl.FindOptions;
|
||||
|
||||
let files: string[] = tl.findMatch(tl.getVariable('System.DefaultWorkingDirectory'), patterns, findOptions);
|
||||
files.forEach(f => { tl.debug(`Found key file: ${f}`) });
|
||||
const files: string[] = tl.findMatch(
|
||||
tl.getVariable("System.DefaultWorkingDirectory"),
|
||||
patterns,
|
||||
findOptions
|
||||
);
|
||||
files.forEach(f => {
|
||||
tl.debug(`Found key file: ${f}`);
|
||||
});
|
||||
|
||||
await this.downloadCaches(files, tl.getVariable('System.DefaultWorkingDirectory') || process.cwd());
|
||||
}
|
||||
catch (err) {
|
||||
if (files.length === 0) {
|
||||
tl.warning(`no key files matching: ${patterns}`);
|
||||
return;
|
||||
}
|
||||
|
||||
await this.downloadCaches(
|
||||
files,
|
||||
tl.getVariable("System.DefaultWorkingDirectory") || process.cwd()
|
||||
);
|
||||
} catch (err) {
|
||||
console.log(`error: ${err}`);
|
||||
tl.setResult(tl.TaskResult.Failed, err.message);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
saveCache = async function () {
|
||||
public saveCache = async function() {
|
||||
try {
|
||||
let buildStatus = tl.getVariable('Agent.JobStatus');
|
||||
let buildStatus = tl.getVariable("Agent.JobStatus");
|
||||
|
||||
if (buildStatus) {
|
||||
buildStatus = buildStatus.toLowerCase();
|
||||
if (buildStatus != 'succeeded' && buildStatus != 'succeededwithissues') {
|
||||
console.log('Bailing out from building artifacts due to previously unsuccessful task');
|
||||
if (
|
||||
buildStatus !== "succeeded" &&
|
||||
buildStatus !== "succeededwithissues"
|
||||
) {
|
||||
console.log(
|
||||
"Bailing out from building artifacts due to previously unsuccessful task"
|
||||
);
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
let isFork = tl.getVariable('System.PullRequest.IsFork') || 'undefined';
|
||||
const isFork = tl.getVariable("System.PullRequest.IsFork") || "undefined";
|
||||
|
||||
if (isFork.toLowerCase() == 'true') {
|
||||
console.log('Caches are not saved from forked repositories');
|
||||
if (isFork.toLowerCase() === "true") {
|
||||
tl.setResult(
|
||||
tl.TaskResult.Skipped,
|
||||
"Caches are not saved from forked repositories."
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
const patterns = tl.getInput('keyfile', true).split(/,[ ]*/g);
|
||||
const targetPatterns = tl.getInput('targetfolder', true).split(/,[ ]*/g);
|
||||
const patterns = tl.getInput("keyfile", true).split(/,[ ]*/g);
|
||||
const targetPatterns = tl.getInput("targetfolder", true).split(/,[ ]*/g);
|
||||
|
||||
const findOptions = <tl.FindOptions>{
|
||||
const findOptions = {
|
||||
allowBrokenSymbolicLinks: false,
|
||||
followSpecifiedSymbolicLink: false,
|
||||
followSymbolicLinks: false
|
||||
};
|
||||
followSymbolicLinks: false,
|
||||
} as tl.FindOptions;
|
||||
|
||||
let keyFiles: string[] = tl.findMatch(tl.getVariable('System.DefaultWorkingDirectory'), patterns, findOptions);
|
||||
keyFiles.forEach(f => { tl.debug(`Found key file: ${f}`) });
|
||||
const keyFiles: string[] = tl.findMatch(
|
||||
tl.getVariable("System.DefaultWorkingDirectory"),
|
||||
patterns,
|
||||
findOptions
|
||||
);
|
||||
keyFiles.forEach(f => {
|
||||
tl.debug(`Found key file: ${f}`);
|
||||
});
|
||||
|
||||
if (keyFiles.length === 0) {
|
||||
tl.warning(`no key files matching: ${patterns}`);
|
||||
return;
|
||||
}
|
||||
|
||||
// Construct this list of artifacts to store. These are relative to prevent the full path from
|
||||
let searchDirectory = tl.getVariable('System.DefaultWorkingDirectory') || process.cwd();
|
||||
let allPaths = tl.find(searchDirectory);
|
||||
let matchedPaths: string[] = tl.match(allPaths, targetPatterns);
|
||||
let targetFolders: string[] = matchedPaths
|
||||
const searchDirectory =
|
||||
tl.getVariable("System.DefaultWorkingDirectory") || process.cwd();
|
||||
const allPaths = tl.find(searchDirectory);
|
||||
const matchedPaths: string[] = tl.match(allPaths, targetPatterns);
|
||||
const targetFolders: string[] = matchedPaths
|
||||
.filter((itemPath: string) => tl.stats(itemPath).isDirectory())
|
||||
.map(folder => path.relative(searchDirectory, folder));
|
||||
|
||||
tl.debug('\n\n\n-----------------------------');
|
||||
if (targetFolders.length === 0) {
|
||||
tl.warning(`no target folders matching: ${targetPatterns}`);
|
||||
return;
|
||||
}
|
||||
|
||||
tl.debug("\n\n\n-----------------------------");
|
||||
targetFolders.forEach(f => tl.debug(f));
|
||||
tl.debug('-----------------------------\n\n\n');
|
||||
tl.debug("-----------------------------\n\n\n");
|
||||
|
||||
await this.uploadCaches(keyFiles, targetFolders);
|
||||
}
|
||||
catch (err) {
|
||||
} catch (err) {
|
||||
tl.setResult(tl.TaskResult.Failed, err.message);
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
|
|
@ -1,50 +1,87 @@
|
|||
// Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
// Licensed under the MIT License.
|
||||
|
||||
import * as path from "path";
|
||||
import * as tl from "azure-pipelines-task-lib";
|
||||
import * as pkgLocationUtils from "../locationUtilities";
|
||||
import * as tl from "vsts-task-lib";
|
||||
import * as artifactToolUtilities from "./ArtifactToolUtilities";
|
||||
import * as artifactToolUtilities from "../ArtifactToolUtilities";
|
||||
import * as universalDownload from "./universaldownload";
|
||||
import * as universalPublish from "./universalpublish";
|
||||
|
||||
export class UniversalPackages {
|
||||
private artifactToolPath: string;
|
||||
|
||||
private init = async function main(command: string, hash: string, targetFolder: string) : Promise<void> {
|
||||
// Getting artifact tool
|
||||
tl.debug("Getting artifact tool");
|
||||
|
||||
try {
|
||||
const localAccessToken = pkgLocationUtils.getSystemAccessToken();
|
||||
const serviceUri = tl.getEndpointUrl("SYSTEMVSSCONNECTION", false);
|
||||
const blobUri = await pkgLocationUtils.getBlobstoreUriFromBaseServiceUri(
|
||||
serviceUri,
|
||||
localAccessToken);
|
||||
|
||||
// Finding the artifact tool directory
|
||||
this.artifactToolPath = await artifactToolUtilities.getArtifactToolFromService(
|
||||
blobUri,
|
||||
localAccessToken,
|
||||
"artifacttool");
|
||||
}
|
||||
catch (error) {
|
||||
console.log(error);
|
||||
}
|
||||
}
|
||||
|
||||
download = async function (hash: string, targetFolder: string) : Promise<boolean> {
|
||||
if (!this.artifactToolPath) {
|
||||
await this.init();
|
||||
}
|
||||
return universalDownload.run(this.artifactToolPath, hash, targetFolder);
|
||||
}
|
||||
|
||||
publish = async function(hash: string, targetFolder: string) : Promise<boolean> {
|
||||
if (!this.artifactToolPath) {
|
||||
await this.init();
|
||||
}
|
||||
return universalPublish.run(this.artifactToolPath, hash, targetFolder);
|
||||
}
|
||||
export interface UniversalPackagesResult {
|
||||
toolRan: boolean;
|
||||
success: boolean;
|
||||
}
|
||||
|
||||
export class UniversalPackages {
|
||||
private artifactToolPath: string;
|
||||
|
||||
public download = async function(
|
||||
hash: string,
|
||||
targetFolder: string
|
||||
): Promise<UniversalPackagesResult> {
|
||||
if (!this.artifactToolPath) {
|
||||
const initialized = await this.init();
|
||||
if (!initialized) {
|
||||
console.log("Error initializing artifact tool utility");
|
||||
return {
|
||||
toolRan: false,
|
||||
success: false,
|
||||
};
|
||||
}
|
||||
}
|
||||
return universalDownload.run(this.artifactToolPath, hash, targetFolder);
|
||||
};
|
||||
|
||||
public publish = async function(
|
||||
hash: string,
|
||||
targetFolder: string
|
||||
): Promise<UniversalPackagesResult> {
|
||||
if (!this.artifactToolPath) {
|
||||
const initialized = await this.init();
|
||||
if (!initialized) {
|
||||
console.log("Error initializing artifact tool utility");
|
||||
return {
|
||||
toolRan: false,
|
||||
success: false,
|
||||
};
|
||||
}
|
||||
}
|
||||
return universalPublish.run(this.artifactToolPath, hash, targetFolder);
|
||||
};
|
||||
|
||||
private init = async function main(
|
||||
command: string,
|
||||
hash: string,
|
||||
targetFolder: string
|
||||
): Promise<boolean> {
|
||||
// Getting artifact tool
|
||||
tl.debug("Getting artifact tool");
|
||||
|
||||
try {
|
||||
const localAccessToken = pkgLocationUtils.getSystemAccessToken();
|
||||
const serviceUri = tl.getEndpointUrl("SYSTEMVSSCONNECTION", false);
|
||||
const blobUri = await pkgLocationUtils.getBlobstoreUriFromBaseServiceUri(
|
||||
serviceUri,
|
||||
localAccessToken
|
||||
);
|
||||
console.log(blobUri);
|
||||
|
||||
// Finding the artifact tool directory
|
||||
this.artifactToolPath = await artifactToolUtilities.getArtifactToolFromService(
|
||||
blobUri,
|
||||
localAccessToken,
|
||||
"artifacttool"
|
||||
);
|
||||
|
||||
if (!this.artifactToolPath) {
|
||||
return false;
|
||||
}
|
||||
|
||||
console.log(this.artifactToolPath);
|
||||
} catch (error) {
|
||||
console.log(error);
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
};
|
||||
}
|
||||
|
|
|
@ -1,17 +1,18 @@
|
|||
// Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
// Licensed under the MIT License.
|
||||
|
||||
import * as tl from "vsts-task-lib";
|
||||
import * as tl from "azure-pipelines-task-lib";
|
||||
import * as pkgLocationUtils from "../locationUtilities";
|
||||
import { IExecSyncResult, IExecOptions } from "vsts-task-lib/toolrunner";
|
||||
import * as artifactToolRunner from "./ArtifactToolRunner";
|
||||
import * as artifactToolUtilities from "./ArtifactToolUtilities";
|
||||
import { IExecSyncResult, IExecOptions } from "azure-pipelines-task-lib/toolrunner";
|
||||
import * as artifactToolRunner from "../ArtifactToolRunner";
|
||||
import * as artifactToolUtilities from "../ArtifactToolUtilities";
|
||||
import * as auth from "./Authentication";
|
||||
import { UniversalPackagesResult } from "./universalPackages";
|
||||
|
||||
export async function run(artifactToolPath: string, hash: string, targetFolder: string): Promise<boolean> {
|
||||
export async function run(artifactToolPath: string, hash: string, targetFolder: string): Promise<UniversalPackagesResult> {
|
||||
try {
|
||||
// Get directory to publish
|
||||
let downloadDir: string = targetFolder;
|
||||
const downloadDir: string = targetFolder;
|
||||
if (downloadDir.length < 1) {
|
||||
tl.warning(tl.loc("Info_DownloadDirectoryNotFound"));
|
||||
return;
|
||||
|
@ -25,7 +26,7 @@ export async function run(artifactToolPath: string, hash: string, targetFolder:
|
|||
// Feed Auth
|
||||
let internalAuthInfo: auth.InternalAuthInfo;
|
||||
|
||||
let toolRunnerOptions = artifactToolRunner.getOptions();
|
||||
const toolRunnerOptions = artifactToolRunner.getOptions();
|
||||
|
||||
// getting inputs
|
||||
serviceUri = tl.getEndpointUrl("SYSTEMVSSCONNECTION", false);
|
||||
|
@ -35,9 +36,9 @@ export async function run(artifactToolPath: string, hash: string, targetFolder:
|
|||
// Getting package name from hash
|
||||
const packageId = tl.getVariable('Build.DefinitionName')
|
||||
.replace(/\s/g, "")
|
||||
.substring(0,255)
|
||||
.substring(0, 255)
|
||||
.toLowerCase();
|
||||
|
||||
|
||||
const accessToken = pkgLocationUtils.getSystemAccessToken();
|
||||
|
||||
internalAuthInfo = new auth.InternalAuthInfo([], accessToken);
|
||||
|
@ -63,19 +64,29 @@ export async function run(artifactToolPath: string, hash: string, targetFolder:
|
|||
downloadPackageUsingArtifactTool(downloadDir, downloadOptions, toolRunnerOptions);
|
||||
|
||||
console.log('artifact downloaded');
|
||||
return true;
|
||||
return {
|
||||
toolRan: true,
|
||||
success: true,
|
||||
};
|
||||
} catch (err) {
|
||||
if (!err.message.includes("Can't find the package")) {
|
||||
tl.error(err);
|
||||
tl.warning(err);
|
||||
return {
|
||||
toolRan: false,
|
||||
success: false,
|
||||
};
|
||||
}
|
||||
|
||||
return false;
|
||||
return {
|
||||
toolRan: true,
|
||||
success: false,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
function downloadPackageUsingArtifactTool(downloadDir: string, options: artifactToolRunner.IArtifactToolOptions, execOptions: IExecOptions) {
|
||||
|
||||
let command = new Array<string>();
|
||||
const command = new Array<string>();
|
||||
|
||||
command.push("universal", "download",
|
||||
"--feed", options.feedId,
|
||||
|
@ -88,6 +99,7 @@ function downloadPackageUsingArtifactTool(downloadDir: string, options: artifact
|
|||
|
||||
console.log(tl.loc("Info_Downloading", options.packageName, options.packageVersion, options.feedId));
|
||||
const execResult: IExecSyncResult = artifactToolRunner.runArtifactTool(options.artifactToolPath, command, execOptions);
|
||||
|
||||
if (execResult.code === 0) {
|
||||
return;
|
||||
}
|
||||
|
|
|
@ -3,30 +3,31 @@
|
|||
|
||||
import * as pkgLocationUtils from "../locationUtilities";
|
||||
import { ProvenanceHelper } from "../provenance";
|
||||
import * as tl from "vsts-task-lib";
|
||||
import { IExecOptions, IExecSyncResult } from "vsts-task-lib/toolrunner";
|
||||
import * as artifactToolRunner from "./ArtifactToolRunner";
|
||||
import * as artifactToolUtilities from "./ArtifactToolUtilities";
|
||||
import * as tl from "azure-pipelines-task-lib";
|
||||
import { IExecOptions, IExecSyncResult } from "azure-pipelines-task-lib/toolrunner";
|
||||
import * as artifactToolRunner from "../ArtifactToolRunner";
|
||||
import * as artifactToolUtilities from "../ArtifactToolUtilities";
|
||||
import * as auth from "./Authentication";
|
||||
import { UniversalPackagesResult } from "./universalPackages";
|
||||
|
||||
export async function run(artifactToolPath: string, hash: string, targetFolder: string): Promise<boolean> {
|
||||
let buildIdentityDisplayName: string = null;
|
||||
let buildIdentityAccount: string = null;
|
||||
export async function run(artifactToolPath: string, hash: string, targetFolder: string): Promise<UniversalPackagesResult> {
|
||||
const buildIdentityDisplayName: string = null;
|
||||
const buildIdentityAccount: string = null;
|
||||
try {
|
||||
// Get directory to publish
|
||||
let publishDir: string = targetFolder;
|
||||
const publishDir: string = targetFolder;
|
||||
let serviceUri: string;
|
||||
let feedId: string;
|
||||
let packageName: string;
|
||||
let version: string = `1.0.0-${hash}`;
|
||||
const version: string = `1.0.0-${hash}`;
|
||||
let accessToken: string;
|
||||
let feedUri: string;
|
||||
let publishedPackageVar: string = tl.getInput("publishedPackageVar");
|
||||
const publishedPackageVar: string = tl.getInput("publishedPackageVar");
|
||||
const versionRadio = 'custom';
|
||||
|
||||
let internalAuthInfo: auth.InternalAuthInfo;
|
||||
|
||||
let toolRunnerOptions = artifactToolRunner.getOptions();
|
||||
const toolRunnerOptions = artifactToolRunner.getOptions();
|
||||
|
||||
let sessionId: string;
|
||||
|
||||
|
@ -35,7 +36,7 @@ export async function run(artifactToolPath: string, hash: string, targetFolder:
|
|||
|
||||
packageName = tl.getVariable('Build.DefinitionName')
|
||||
.replace(/\s/g, "")
|
||||
.substring(0,255)
|
||||
.substring(0, 255)
|
||||
.toLowerCase();
|
||||
|
||||
feedId = tl.getInput("feedList");
|
||||
|
@ -89,21 +90,21 @@ export async function run(artifactToolPath: string, hash: string, targetFolder:
|
|||
tl.setVariable(publishedPackageVar, `${packageName} ${version}`);
|
||||
}
|
||||
|
||||
return true;
|
||||
return {
|
||||
toolRan: true,
|
||||
success: true,
|
||||
};
|
||||
} catch (err) {
|
||||
tl.error(err);
|
||||
|
||||
if (buildIdentityDisplayName || buildIdentityAccount) {
|
||||
tl.warning(tl.loc("BuildIdentityPermissionsHint", buildIdentityDisplayName, buildIdentityAccount));
|
||||
}
|
||||
|
||||
tl.setResult(tl.TaskResult.Failed, tl.loc("PackagesFailedToPublish"));
|
||||
return false;
|
||||
tl.warning(`Issue saving package: ${err}`);
|
||||
return {
|
||||
toolRan: true,
|
||||
success: false,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
function publishPackageUsingArtifactTool(publishDir: string, options: artifactToolRunner.IArtifactToolOptions, execOptions: IExecOptions) {
|
||||
let command = new Array<string>();
|
||||
const command = new Array<string>();
|
||||
command.push("universal", "publish",
|
||||
"--feed", options.feedId,
|
||||
"--service", options.accountUrl,
|
||||
|
@ -111,8 +112,7 @@ function publishPackageUsingArtifactTool(publishDir: string, options: artifactTo
|
|||
"--package-version", options.packageVersion,
|
||||
"--path", publishDir,
|
||||
"--patvar", "UNIVERSAL_PUBLISH_PAT",
|
||||
"--verbosity", tl.getInput("verbosity"),
|
||||
"--description", tl.getInput("packagePublishDescription"));
|
||||
"--verbosity", tl.getInput("verbosity"));
|
||||
|
||||
console.log(tl.loc("Info_Publishing", options.packageName, options.packageVersion, options.feedId));
|
||||
const execResult: IExecSyncResult = artifactToolRunner.runArtifactTool(options.artifactToolPath, command, execOptions);
|
||||
|
|
|
@ -1,69 +1,83 @@
|
|||
// Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
// Licensed under the MIT License.
|
||||
|
||||
import * as vsts from 'azure-devops-node-api';
|
||||
import * as interfaces from 'azure-devops-node-api/interfaces/common/VSSInterfaces';
|
||||
import * as tl from 'vsts-task-lib/task';
|
||||
import { IRequestOptions } from 'azure-devops-node-api/interfaces/common/VsoBaseInterfaces';
|
||||
import * as vsts from "azure-devops-node-api";
|
||||
import * as interfaces from "azure-devops-node-api/interfaces/common/VSSInterfaces";
|
||||
import * as tl from "azure-pipelines-task-lib/task";
|
||||
import { IRequestOptions } from "azure-devops-node-api/interfaces/common/VsoBaseInterfaces";
|
||||
|
||||
import * as provenance from "./provenance";
|
||||
|
||||
export enum ProtocolType {
|
||||
NuGet,
|
||||
Maven,
|
||||
Npm,
|
||||
PyPi
|
||||
NuGet,
|
||||
Maven,
|
||||
Npm,
|
||||
PyPi,
|
||||
}
|
||||
|
||||
export enum RegistryType {
|
||||
npm,
|
||||
NuGetV2,
|
||||
NuGetV3,
|
||||
PyPiSimple,
|
||||
PyPiUpload
|
||||
npm,
|
||||
NuGetV2,
|
||||
NuGetV3,
|
||||
PyPiSimple,
|
||||
PyPiUpload,
|
||||
}
|
||||
|
||||
export interface PackagingLocation {
|
||||
PackagingUris: string[];
|
||||
DefaultPackagingUri: string;
|
||||
PackagingUris: string[];
|
||||
DefaultPackagingUri: string;
|
||||
}
|
||||
|
||||
// Getting service urls from resource areas api
|
||||
export async function getServiceUriFromAreaId(serviceUri: string, accessToken: string, areaId: string): Promise<string> {
|
||||
const serverType = tl.getVariable('System.ServerType');
|
||||
if (!serverType || serverType.toLowerCase() !== 'hosted') {
|
||||
return serviceUri;
|
||||
}
|
||||
export async function getServiceUriFromAreaId(
|
||||
serviceUri: string,
|
||||
accessToken: string,
|
||||
areaId: string
|
||||
): Promise<string> {
|
||||
const serverType = tl.getVariable("System.ServerType");
|
||||
if (!serverType || serverType.toLowerCase() !== "hosted") {
|
||||
return serviceUri;
|
||||
}
|
||||
|
||||
const webApi = getWebApiWithProxy(serviceUri, accessToken);
|
||||
const locationApi = await webApi.getLocationsApi();
|
||||
const webApi = getWebApiWithProxy(serviceUri, accessToken);
|
||||
const locationApi = await webApi.getLocationsApi();
|
||||
|
||||
tl.debug(`Getting URI for area ID ${areaId} from ${serviceUri}`);
|
||||
try {
|
||||
const serviceUriFromArea = await locationApi.getResourceArea(areaId);
|
||||
return serviceUriFromArea.locationUrl;
|
||||
} catch (error) {
|
||||
throw new Error(error);
|
||||
}
|
||||
tl.debug(`Getting URI for area ID ${areaId} from ${serviceUri}`);
|
||||
try {
|
||||
const serviceUriFromArea = await locationApi.getResourceArea(areaId);
|
||||
console.log("got service url from area");
|
||||
return serviceUriFromArea.locationUrl;
|
||||
} catch (error) {
|
||||
throw new Error(error);
|
||||
}
|
||||
}
|
||||
|
||||
export async function getNuGetUriFromBaseServiceUri(serviceUri: string, accesstoken: string): Promise<string> {
|
||||
const nugetAreaId = 'B3BE7473-68EA-4A81-BFC7-9530BAAA19AD';
|
||||
export async function getNuGetUriFromBaseServiceUri(
|
||||
serviceUri: string,
|
||||
accesstoken: string
|
||||
): Promise<string> {
|
||||
const nugetAreaId = "B3BE7473-68EA-4A81-BFC7-9530BAAA19AD";
|
||||
|
||||
return getServiceUriFromAreaId(serviceUri, accesstoken, nugetAreaId);
|
||||
return getServiceUriFromAreaId(serviceUri, accesstoken, nugetAreaId);
|
||||
}
|
||||
|
||||
// Feeds url from location service
|
||||
export async function getFeedUriFromBaseServiceUri(serviceUri: string, accesstoken: string): Promise<string> {
|
||||
const feedAreaId = '7ab4e64e-c4d8-4f50-ae73-5ef2e21642a5';
|
||||
export async function getFeedUriFromBaseServiceUri(
|
||||
serviceUri: string,
|
||||
accesstoken: string
|
||||
): Promise<string> {
|
||||
const feedAreaId = "7ab4e64e-c4d8-4f50-ae73-5ef2e21642a5";
|
||||
|
||||
return getServiceUriFromAreaId(serviceUri, accesstoken, feedAreaId);
|
||||
return getServiceUriFromAreaId(serviceUri, accesstoken, feedAreaId);
|
||||
}
|
||||
|
||||
export async function getBlobstoreUriFromBaseServiceUri(serviceUri: string, accesstoken: string): Promise<string> {
|
||||
const blobAreaId = '5294ef93-12a1-4d13-8671-9d9d014072c8';
|
||||
export async function getBlobstoreUriFromBaseServiceUri(
|
||||
serviceUri: string,
|
||||
accesstoken: string
|
||||
): Promise<string> {
|
||||
const blobAreaId = "5294ef93-12a1-4d13-8671-9d9d014072c8";
|
||||
|
||||
return getServiceUriFromAreaId(serviceUri, accesstoken, blobAreaId);
|
||||
return getServiceUriFromAreaId(serviceUri, accesstoken, blobAreaId);
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -72,181 +86,210 @@ export async function getBlobstoreUriFromBaseServiceUri(serviceUri: string, acce
|
|||
* The second URI, if existent, will be Packaging's default access point
|
||||
* The remaining URI's will be alternate Packaging's access points
|
||||
*/
|
||||
export async function getPackagingUris(protocolType: ProtocolType): Promise<PackagingLocation> {
|
||||
tl.debug('Getting Packaging service access points');
|
||||
const collectionUrl = tl.getVariable('System.TeamFoundationCollectionUri');
|
||||
export async function getPackagingUris(
|
||||
protocolType: ProtocolType
|
||||
): Promise<PackagingLocation> {
|
||||
tl.debug("Getting Packaging service access points");
|
||||
const collectionUrl = tl.getVariable("System.TeamFoundationCollectionUri");
|
||||
|
||||
const pkgLocation: PackagingLocation = {
|
||||
PackagingUris: [collectionUrl],
|
||||
DefaultPackagingUri: collectionUrl
|
||||
};
|
||||
const pkgLocation: PackagingLocation = {
|
||||
PackagingUris: [collectionUrl],
|
||||
DefaultPackagingUri: collectionUrl,
|
||||
};
|
||||
|
||||
const serverType = tl.getVariable('System.ServerType');
|
||||
if (!serverType || serverType.toLowerCase() !== 'hosted') {
|
||||
return pkgLocation;
|
||||
}
|
||||
const serverType = tl.getVariable("System.ServerType");
|
||||
if (!serverType || serverType.toLowerCase() !== "hosted") {
|
||||
return pkgLocation;
|
||||
}
|
||||
|
||||
const accessToken = getSystemAccessToken();
|
||||
const areaId = getAreaIdForProtocol(protocolType);
|
||||
const accessToken = getSystemAccessToken();
|
||||
const areaId = getAreaIdForProtocol(protocolType);
|
||||
|
||||
const serviceUri = await getServiceUriFromAreaId(collectionUrl, accessToken, areaId);
|
||||
const serviceUri = await getServiceUriFromAreaId(
|
||||
collectionUrl,
|
||||
accessToken,
|
||||
areaId
|
||||
);
|
||||
|
||||
const webApi = getWebApiWithProxy(serviceUri);
|
||||
const webApi = getWebApiWithProxy(serviceUri);
|
||||
|
||||
const locationApi = await webApi.getLocationsApi();
|
||||
const locationApi = await webApi.getLocationsApi();
|
||||
|
||||
tl.debug('Acquiring Packaging endpoints from ' + serviceUri);
|
||||
return locationApi.getConnectionData(interfaces.ConnectOptions.IncludeServices).then((connectionData) => {
|
||||
tl.debug('Successfully acquired the connection data');
|
||||
const defaultAccessPoint: string = connectionData.locationServiceData.accessMappings.find((mapping) =>
|
||||
mapping.moniker === connectionData.locationServiceData.defaultAccessMappingMoniker
|
||||
).accessPoint;
|
||||
tl.debug("Acquiring Packaging endpoints from " + serviceUri);
|
||||
return locationApi
|
||||
.getConnectionData(interfaces.ConnectOptions.IncludeServices)
|
||||
.then(connectionData => {
|
||||
tl.debug("Successfully acquired the connection data");
|
||||
const defaultAccessPoint: string = connectionData.locationServiceData.accessMappings.find(
|
||||
mapping =>
|
||||
mapping.moniker ===
|
||||
connectionData.locationServiceData.defaultAccessMappingMoniker
|
||||
).accessPoint;
|
||||
|
||||
pkgLocation.DefaultPackagingUri = defaultAccessPoint;
|
||||
pkgLocation.PackagingUris.push(defaultAccessPoint);
|
||||
pkgLocation.PackagingUris = pkgLocation.PackagingUris.concat(
|
||||
connectionData.locationServiceData.accessMappings.map((mapping) => {
|
||||
return mapping.accessPoint;
|
||||
}));
|
||||
pkgLocation.DefaultPackagingUri = defaultAccessPoint;
|
||||
pkgLocation.PackagingUris.push(defaultAccessPoint);
|
||||
pkgLocation.PackagingUris = pkgLocation.PackagingUris.concat(
|
||||
connectionData.locationServiceData.accessMappings.map(mapping => {
|
||||
return mapping.accessPoint;
|
||||
})
|
||||
);
|
||||
|
||||
tl.debug('Acquired location');
|
||||
tl.debug(JSON.stringify(pkgLocation));
|
||||
return pkgLocation;
|
||||
}).catch((error) => {
|
||||
tl.debug('An error occurred while acquiring the connection data');
|
||||
tl.debug(JSON.stringify(error));
|
||||
return pkgLocation;
|
||||
tl.debug("Acquired location");
|
||||
tl.debug(JSON.stringify(pkgLocation));
|
||||
return pkgLocation;
|
||||
})
|
||||
.catch(error => {
|
||||
tl.debug("An error occurred while acquiring the connection data");
|
||||
tl.debug(JSON.stringify(error));
|
||||
return pkgLocation;
|
||||
});
|
||||
}
|
||||
|
||||
export function getSystemAccessToken(): string {
|
||||
tl.debug('Getting credentials for local feeds');
|
||||
const auth = tl.getEndpointAuthorization('SYSTEMVSSCONNECTION', false);
|
||||
if (auth.scheme === 'OAuth') {
|
||||
tl.debug('Got auth token');
|
||||
return auth.parameters['AccessToken'];
|
||||
} else {
|
||||
tl.warning('Could not determine credentials to use');
|
||||
}
|
||||
tl.debug("Getting credentials for local feeds");
|
||||
const auth = tl.getEndpointAuthorization("SYSTEMVSSCONNECTION", false);
|
||||
if (auth.scheme === "OAuth") {
|
||||
tl.debug("Got auth token");
|
||||
return auth.parameters.AccessToken;
|
||||
} else {
|
||||
tl.warning("Could not determine credentials to use");
|
||||
}
|
||||
}
|
||||
|
||||
function getAreaIdForProtocol(protocolType: ProtocolType): string {
|
||||
switch (protocolType) {
|
||||
case ProtocolType.Maven:
|
||||
return '6F7F8C07-FF36-473C-BCF3-BD6CC9B6C066';
|
||||
case ProtocolType.Npm:
|
||||
return '4C83CFC1-F33A-477E-A789-29D38FFCA52E';
|
||||
default:
|
||||
case ProtocolType.NuGet:
|
||||
return 'B3BE7473-68EA-4A81-BFC7-9530BAAA19AD';
|
||||
}
|
||||
switch (protocolType) {
|
||||
case ProtocolType.Maven:
|
||||
return "6F7F8C07-FF36-473C-BCF3-BD6CC9B6C066";
|
||||
case ProtocolType.Npm:
|
||||
return "4C83CFC1-F33A-477E-A789-29D38FFCA52E";
|
||||
default:
|
||||
case ProtocolType.NuGet:
|
||||
return "B3BE7473-68EA-4A81-BFC7-9530BAAA19AD";
|
||||
}
|
||||
}
|
||||
|
||||
export function getWebApiWithProxy(serviceUri: string, accessToken?: string): vsts.WebApi {
|
||||
if (!accessToken) {
|
||||
accessToken = getSystemAccessToken();
|
||||
}
|
||||
export function getWebApiWithProxy(
|
||||
serviceUri: string,
|
||||
accessToken?: string
|
||||
): vsts.WebApi {
|
||||
if (!accessToken) {
|
||||
accessToken = getSystemAccessToken();
|
||||
}
|
||||
const credentialHandler = vsts.getBasicHandler("vsts", accessToken);
|
||||
|
||||
const credentialHandler = vsts.getBasicHandler('vsts', accessToken);
|
||||
const options: IRequestOptions = {
|
||||
proxy: tl.getHttpProxyConfiguration(serviceUri)
|
||||
};
|
||||
return new vsts.WebApi(serviceUri, credentialHandler, options);
|
||||
const options: IRequestOptions = {
|
||||
proxy: tl.getHttpProxyConfiguration(serviceUri),
|
||||
};
|
||||
return new vsts.WebApi(serviceUri, credentialHandler, options);
|
||||
}
|
||||
|
||||
interface RegistryLocation {
|
||||
apiVersion: string,
|
||||
area: string,
|
||||
locationId: string
|
||||
};
|
||||
apiVersion: string;
|
||||
area: string;
|
||||
locationId: string;
|
||||
}
|
||||
|
||||
export async function getFeedRegistryUrl(
|
||||
packagingUrl: string,
|
||||
registryType: RegistryType,
|
||||
feedId: string,
|
||||
accessToken?: string,
|
||||
useSession?: boolean): Promise<string> {
|
||||
let loc : RegistryLocation;
|
||||
switch (registryType) {
|
||||
case RegistryType.npm:
|
||||
loc = {
|
||||
apiVersion: '3.0-preview.1',
|
||||
area: 'npm',
|
||||
locationId: 'D9B75B07-F1D9-4A67-AAA6-A4D9E66B3352'
|
||||
};
|
||||
break;
|
||||
case RegistryType.NuGetV2:
|
||||
loc = {
|
||||
apiVersion: '3.0-preview.1',
|
||||
area: 'nuget',
|
||||
locationId: "5D6FC3B3-EF78-4342-9B6E-B3799C866CFA"
|
||||
};
|
||||
break;
|
||||
case RegistryType.PyPiSimple:
|
||||
loc = {
|
||||
apiVersion: '5.0',
|
||||
area: 'pypi',
|
||||
locationId: "93377A2C-F5FB-48B9-A8DC-7781441CABF1"
|
||||
};
|
||||
break;
|
||||
case RegistryType.PyPiUpload:
|
||||
loc = {
|
||||
apiVersion: '5.0',
|
||||
area: 'pypi',
|
||||
locationId: "C7A75C1B-08AC-4B11-B468-6C7EF835C85E"
|
||||
};
|
||||
break;
|
||||
default:
|
||||
case RegistryType.NuGetV3:
|
||||
loc = {
|
||||
apiVersion: '3.0-preview.1',
|
||||
area: 'nuget',
|
||||
locationId: "9D3A4E8E-2F8F-4AE1-ABC2-B461A51CB3B3"
|
||||
};
|
||||
break;
|
||||
}
|
||||
packagingUrl: string,
|
||||
registryType: RegistryType,
|
||||
feedId: string,
|
||||
accessToken?: string,
|
||||
useSession?: boolean
|
||||
): Promise<string> {
|
||||
let loc: RegistryLocation;
|
||||
switch (registryType) {
|
||||
case RegistryType.npm:
|
||||
loc = {
|
||||
apiVersion: "3.0-preview.1",
|
||||
area: "npm",
|
||||
locationId: "D9B75B07-F1D9-4A67-AAA6-A4D9E66B3352",
|
||||
};
|
||||
break;
|
||||
case RegistryType.NuGetV2:
|
||||
loc = {
|
||||
apiVersion: "3.0-preview.1",
|
||||
area: "nuget",
|
||||
locationId: "5D6FC3B3-EF78-4342-9B6E-B3799C866CFA",
|
||||
};
|
||||
break;
|
||||
case RegistryType.PyPiSimple:
|
||||
loc = {
|
||||
apiVersion: "5.0",
|
||||
area: "pypi",
|
||||
locationId: "93377A2C-F5FB-48B9-A8DC-7781441CABF1",
|
||||
};
|
||||
break;
|
||||
case RegistryType.PyPiUpload:
|
||||
loc = {
|
||||
apiVersion: "5.0",
|
||||
area: "pypi",
|
||||
locationId: "C7A75C1B-08AC-4B11-B468-6C7EF835C85E",
|
||||
};
|
||||
break;
|
||||
default:
|
||||
case RegistryType.NuGetV3:
|
||||
loc = {
|
||||
apiVersion: "3.0-preview.1",
|
||||
area: "nuget",
|
||||
locationId: "9D3A4E8E-2F8F-4AE1-ABC2-B461A51CB3B3",
|
||||
};
|
||||
break;
|
||||
}
|
||||
|
||||
tl.debug("Getting registry url from " + packagingUrl);
|
||||
tl.debug("Getting registry url from " + packagingUrl);
|
||||
|
||||
const vssConnection = getWebApiWithProxy(packagingUrl, accessToken);
|
||||
const vssConnection = getWebApiWithProxy(packagingUrl, accessToken);
|
||||
|
||||
let sessionId = feedId;
|
||||
if (useSession) {
|
||||
sessionId = await provenance.ProvenanceHelper.GetSessionId(
|
||||
feedId,
|
||||
loc.area /* protocol */,
|
||||
vssConnection.serverUrl,
|
||||
[vssConnection.authHandler],
|
||||
vssConnection.options);
|
||||
}
|
||||
let sessionId = feedId;
|
||||
if (useSession) {
|
||||
sessionId = await provenance.ProvenanceHelper.GetSessionId(
|
||||
feedId,
|
||||
loc.area /* protocol */,
|
||||
vssConnection.serverUrl,
|
||||
[vssConnection.authHandler],
|
||||
vssConnection.options
|
||||
);
|
||||
}
|
||||
|
||||
const data = await Retry(async () => {
|
||||
return await vssConnection.vsoClient.getVersioningData(loc.apiVersion, loc.area, loc.locationId, { feedId: sessionId });
|
||||
}, 4, 100);
|
||||
const data = await Retry(
|
||||
async () => {
|
||||
return await vssConnection.vsoClient.getVersioningData(
|
||||
loc.apiVersion,
|
||||
loc.area,
|
||||
loc.locationId,
|
||||
{ feedId: sessionId }
|
||||
);
|
||||
},
|
||||
4,
|
||||
100
|
||||
);
|
||||
|
||||
tl.debug("Feed registry url: " + data.requestUrl);
|
||||
return data.requestUrl;
|
||||
tl.debug("Feed registry url: " + data.requestUrl);
|
||||
return data.requestUrl;
|
||||
}
|
||||
|
||||
// This should be replaced when retry is implemented in vso client.
|
||||
async function Retry<T>(cb : () => Promise<T>, max_retry: number, retry_delay: number) : Promise<T> {
|
||||
try {
|
||||
return await cb();
|
||||
} catch(exception) {
|
||||
tl.debug(JSON.stringify(exception));
|
||||
if(max_retry > 0)
|
||||
{
|
||||
tl.debug("Waiting " + retry_delay + "ms...");
|
||||
await delay(retry_delay);
|
||||
tl.debug("Retrying...");
|
||||
return await Retry<T>(cb, max_retry-1, retry_delay*2);
|
||||
} else {
|
||||
throw new Error(exception);
|
||||
}
|
||||
async function Retry<T>(
|
||||
cb: () => Promise<T>,
|
||||
max_retry: number,
|
||||
retry_delay: number
|
||||
): Promise<T> {
|
||||
try {
|
||||
return await cb();
|
||||
} catch (exception) {
|
||||
tl.debug(JSON.stringify(exception));
|
||||
if (max_retry > 0) {
|
||||
tl.debug("Waiting " + retry_delay + "ms...");
|
||||
await delay(retry_delay);
|
||||
tl.debug("Retrying...");
|
||||
return await Retry<T>(cb, max_retry - 1, retry_delay * 2);
|
||||
} else {
|
||||
throw new Error(exception);
|
||||
}
|
||||
}
|
||||
}
|
||||
function delay(delayMs: number) {
|
||||
return new Promise(function(resolve) {
|
||||
setTimeout(resolve, delayMs);
|
||||
});
|
||||
}
|
||||
function delay(delayMs:number) {
|
||||
return new Promise(function(resolve) {
|
||||
setTimeout(resolve, delayMs);
|
||||
});
|
||||
}
|
||||
|
|
|
@ -13,15 +13,33 @@
|
|||
}
|
||||
},
|
||||
"@types/node": {
|
||||
"version": "10.12.9",
|
||||
"resolved": "https://registry.npmjs.org/@types/node/-/node-10.12.9.tgz",
|
||||
"integrity": "sha512-eajkMXG812/w3w4a1OcBlaTwsFPO5F7fJ/amy+tieQxEMWBlbV1JGSjkFM+zkHNf81Cad+dfIRA+IBkvmvdAeA=="
|
||||
"version": "11.13.0",
|
||||
"resolved": "https://registry.npmjs.org/@types/node/-/node-11.13.0.tgz",
|
||||
"integrity": "sha512-rx29MMkRdVmzunmiA4lzBYJNnXsW/PhG4kMBy2ATsYaDjGGR75dCFEVVROKpNwlVdcUX3xxlghKQOeDPBJobng=="
|
||||
},
|
||||
"@types/semver": {
|
||||
"version": "5.5.0",
|
||||
"resolved": "https://registry.npmjs.org/@types/semver/-/semver-5.5.0.tgz",
|
||||
"integrity": "sha512-41qEJgBH/TWgo5NFSvBCJ1qkoi3Q6ONSF2avrHq1LVEZfYpdHmj0y9SuTK+u9ZhG1sYQKBL1AWXKyLWP4RaUoQ=="
|
||||
},
|
||||
"@types/uuid": {
|
||||
"version": "3.4.4",
|
||||
"resolved": "https://registry.npmjs.org/@types/uuid/-/uuid-3.4.4.tgz",
|
||||
"integrity": "sha512-tPIgT0GUmdJQNSHxp0X2jnpQfBSTfGxUMc/2CXBU2mnyTFVYVa2ojpoQ74w0U2yn2vw3jnC640+77lkFFpdVDw==",
|
||||
"requires": {
|
||||
"@types/node": "*"
|
||||
}
|
||||
},
|
||||
"adm-zip": {
|
||||
"version": "0.4.13",
|
||||
"resolved": "https://registry.npmjs.org/adm-zip/-/adm-zip-0.4.13.tgz",
|
||||
"integrity": "sha512-fERNJX8sOXfel6qCBCMPvZLzENBEhZTzKqg6vrOW5pvoEaQuJhRU4ndTAh6lHOxn1I6jnz2NHra56ZODM751uw=="
|
||||
},
|
||||
"arg": {
|
||||
"version": "4.1.0",
|
||||
"resolved": "https://registry.npmjs.org/arg/-/arg-4.1.0.tgz",
|
||||
"integrity": "sha512-ZWc51jO3qegGkVh8Hwpv636EkbesNV5ZNQPCtRa+0qytRYPEs9IYT9qITY9buezqUH5uqyzlWLcufrzU2rffdg=="
|
||||
},
|
||||
"azure-devops-node-api": {
|
||||
"version": "6.6.0",
|
||||
"resolved": "https://registry.npmjs.org/azure-devops-node-api/-/azure-devops-node-api-6.6.0.tgz",
|
||||
|
@ -43,6 +61,51 @@
|
|||
}
|
||||
}
|
||||
},
|
||||
"azure-pipelines-task-lib": {
|
||||
"version": "2.8.0",
|
||||
"resolved": "https://registry.npmjs.org/azure-pipelines-task-lib/-/azure-pipelines-task-lib-2.8.0.tgz",
|
||||
"integrity": "sha512-PR8oap9z2j+o455W3PwAfB4SX1p4GdJc9OHQaQV0V+iQS1IBY6dVgcNSQMkHAXb0V1bbuLOFBLanXPe5eSgGTQ==",
|
||||
"requires": {
|
||||
"minimatch": "3.0.4",
|
||||
"mockery": "^1.7.0",
|
||||
"q": "^1.1.2",
|
||||
"semver": "^5.1.0",
|
||||
"shelljs": "^0.3.0",
|
||||
"uuid": "^3.0.1"
|
||||
},
|
||||
"dependencies": {
|
||||
"shelljs": {
|
||||
"version": "0.3.0",
|
||||
"resolved": "https://registry.npmjs.org/shelljs/-/shelljs-0.3.0.tgz",
|
||||
"integrity": "sha1-NZbmMHp4FUT1kfN9phg2DzHbV7E="
|
||||
}
|
||||
}
|
||||
},
|
||||
"azure-pipelines-tool-lib": {
|
||||
"version": "0.12.0",
|
||||
"resolved": "https://registry.npmjs.org/azure-pipelines-tool-lib/-/azure-pipelines-tool-lib-0.12.0.tgz",
|
||||
"integrity": "sha512-JAlFvMTtEXISrnJY/kgq0LecLi089RqXRf/gMsXYbflmzszklkc+LUJpR0A7NDmJ+9/MWpKY/ZX+Q/zirYa7gw==",
|
||||
"requires": {
|
||||
"@types/semver": "^5.3.0",
|
||||
"@types/uuid": "^3.0.1",
|
||||
"azure-pipelines-task-lib": "^2.8.0",
|
||||
"semver": "^5.3.0",
|
||||
"semver-compare": "^1.0.0",
|
||||
"typed-rest-client": "1.0.9",
|
||||
"uuid": "^3.0.1"
|
||||
},
|
||||
"dependencies": {
|
||||
"typed-rest-client": {
|
||||
"version": "1.0.9",
|
||||
"resolved": "https://registry.npmjs.org/typed-rest-client/-/typed-rest-client-1.0.9.tgz",
|
||||
"integrity": "sha512-iOdwgmnP/tF6Qs+oY4iEtCf/3fnCDl7Gy9LGPJ4E3M4Wj3uaSko15FVwbsaBmnBqTJORnXBWVY5306D4HH8oiA==",
|
||||
"requires": {
|
||||
"tunnel": "0.0.4",
|
||||
"underscore": "1.8.3"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"balanced-match": {
|
||||
"version": "1.0.0",
|
||||
"resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.0.tgz",
|
||||
|
@ -57,11 +120,21 @@
|
|||
"concat-map": "0.0.1"
|
||||
}
|
||||
},
|
||||
"buffer-from": {
|
||||
"version": "1.1.1",
|
||||
"resolved": "https://registry.npmjs.org/buffer-from/-/buffer-from-1.1.1.tgz",
|
||||
"integrity": "sha512-MQcXEUbCKtEo7bhqEs6560Hyd4XaovZlO/k9V3hjVUF/zwW7KBVdSK4gIt/bzwS9MbR5qob+F5jusZsb0YQK2A=="
|
||||
},
|
||||
"concat-map": {
|
||||
"version": "0.0.1",
|
||||
"resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz",
|
||||
"integrity": "sha1-2Klr13/Wjfd5OnMDajug1UBdR3s="
|
||||
},
|
||||
"diff": {
|
||||
"version": "3.5.0",
|
||||
"resolved": "https://registry.npmjs.org/diff/-/diff-3.5.0.tgz",
|
||||
"integrity": "sha512-A46qtFgd+g7pDZinpnwiRJtxbC1hpgf0uzP3iG89scHk0AUC7A1TGxf5OiiOUv/JMZR8GOt8hL900hV0bOy5xA=="
|
||||
},
|
||||
"fs.realpath": {
|
||||
"version": "1.0.0",
|
||||
"resolved": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz",
|
||||
|
@ -156,6 +229,11 @@
|
|||
"inherits": "^2.0.1"
|
||||
}
|
||||
},
|
||||
"make-error": {
|
||||
"version": "1.3.5",
|
||||
"resolved": "https://registry.npmjs.org/make-error/-/make-error-1.3.5.tgz",
|
||||
"integrity": "sha512-c3sIjNUow0+8swNwVpqoH4YCShKNFkMaw6oH1mNS2haDZQqkeZFlHS3dhoeEbKKmJB4vXpJucU6oH75aDYeE9g=="
|
||||
},
|
||||
"minimatch": {
|
||||
"version": "3.0.4",
|
||||
"resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.0.4.tgz",
|
||||
|
@ -228,11 +306,37 @@
|
|||
"rechoir": "^0.6.2"
|
||||
}
|
||||
},
|
||||
"source-map": {
|
||||
"version": "0.6.1",
|
||||
"resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz",
|
||||
"integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g=="
|
||||
},
|
||||
"source-map-support": {
|
||||
"version": "0.5.11",
|
||||
"resolved": "https://registry.npmjs.org/source-map-support/-/source-map-support-0.5.11.tgz",
|
||||
"integrity": "sha512-//sajEx/fGL3iw6fltKMdPvy8kL3kJ2O3iuYlRoT3k9Kb4BjOoZ+BZzaNHeuaruSt+Kf3Zk9tnfAQg9/AJqUVQ==",
|
||||
"requires": {
|
||||
"buffer-from": "^1.0.0",
|
||||
"source-map": "^0.6.0"
|
||||
}
|
||||
},
|
||||
"sprintf-js": {
|
||||
"version": "1.1.0",
|
||||
"resolved": "https://registry.npmjs.org/sprintf-js/-/sprintf-js-1.1.0.tgz",
|
||||
"integrity": "sha1-z/yvcC2vZeo5u04PorKZzsGhvkY="
|
||||
},
|
||||
"ts-node": {
|
||||
"version": "8.0.3",
|
||||
"resolved": "https://registry.npmjs.org/ts-node/-/ts-node-8.0.3.tgz",
|
||||
"integrity": "sha512-2qayBA4vdtVRuDo11DEFSsD/SFsBXQBRZZhbRGSIkmYmVkWjULn/GGMdG10KVqkaGndljfaTD8dKjWgcejO8YA==",
|
||||
"requires": {
|
||||
"arg": "^4.1.0",
|
||||
"diff": "^3.1.0",
|
||||
"make-error": "^1.1.1",
|
||||
"source-map-support": "^0.5.6",
|
||||
"yn": "^3.0.0"
|
||||
}
|
||||
},
|
||||
"tunnel": {
|
||||
"version": "0.0.4",
|
||||
"resolved": "https://registry.npmjs.org/tunnel/-/tunnel-0.0.4.tgz",
|
||||
|
@ -257,53 +361,15 @@
|
|||
"resolved": "https://registry.npmjs.org/uuid/-/uuid-3.3.2.tgz",
|
||||
"integrity": "sha512-yXJmeNaw3DnnKAOKJE51sL/ZaYfWJRl1pK9dr19YFCu0ObS231AB1/LbqTKRAQ5kw8A90rA6fr4riOUpTZvQZA=="
|
||||
},
|
||||
"vsts-task-lib": {
|
||||
"version": "2.6.0",
|
||||
"resolved": "https://registry.npmjs.org/vsts-task-lib/-/vsts-task-lib-2.6.0.tgz",
|
||||
"integrity": "sha512-ja2qX4BIUvswcNbGtIoGo1SM5mRVc3Yaf7oM4oY64bNHs04chKfvH6f3cDDG0pd44OrZIGQE9LgECzeau6z2wA==",
|
||||
"requires": {
|
||||
"minimatch": "3.0.4",
|
||||
"mockery": "^1.7.0",
|
||||
"q": "^1.1.2",
|
||||
"semver": "^5.1.0",
|
||||
"shelljs": "^0.3.0",
|
||||
"uuid": "^3.0.1"
|
||||
},
|
||||
"dependencies": {
|
||||
"shelljs": {
|
||||
"version": "0.3.0",
|
||||
"resolved": "https://registry.npmjs.org/shelljs/-/shelljs-0.3.0.tgz",
|
||||
"integrity": "sha1-NZbmMHp4FUT1kfN9phg2DzHbV7E="
|
||||
}
|
||||
}
|
||||
},
|
||||
"vsts-task-tool-lib": {
|
||||
"version": "0.4.1",
|
||||
"resolved": "https://registry.npmjs.org/vsts-task-tool-lib/-/vsts-task-tool-lib-0.4.1.tgz",
|
||||
"integrity": "sha1-mYLTv14YS95SqpdCGJROEGJzRWU=",
|
||||
"requires": {
|
||||
"semver": "^5.3.0",
|
||||
"semver-compare": "^1.0.0",
|
||||
"typed-rest-client": "^0.11.0",
|
||||
"uuid": "^3.0.1",
|
||||
"vsts-task-lib": "^2.0.7"
|
||||
},
|
||||
"dependencies": {
|
||||
"typed-rest-client": {
|
||||
"version": "0.11.0",
|
||||
"resolved": "https://registry.npmjs.org/typed-rest-client/-/typed-rest-client-0.11.0.tgz",
|
||||
"integrity": "sha1-DvQTUtYo7i4IePtYpniRZF9qG0E=",
|
||||
"requires": {
|
||||
"tunnel": "0.0.4",
|
||||
"underscore": "1.8.3"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"wrappy": {
|
||||
"version": "1.0.2",
|
||||
"resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz",
|
||||
"integrity": "sha1-tSQ9jz7BqjXxNkYFvA0QNuMKtp8="
|
||||
},
|
||||
"yn": {
|
||||
"version": "3.0.0",
|
||||
"resolved": "https://registry.npmjs.org/yn/-/yn-3.0.0.tgz",
|
||||
"integrity": "sha512-+Wo/p5VRfxUgBUGy2j/6KX2mj9AYJWOHuhMjMcbBFc3y54o9/4buK1ksBvuiK01C3kby8DH9lSmJdSxw+4G/2Q=="
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -13,16 +13,17 @@
|
|||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@types/ltx": "^2.8.0",
|
||||
"@types/node": "^10.12.9",
|
||||
"@types/node": "^11.13.0",
|
||||
"adm-zip": "^0.4.11",
|
||||
"azure-devops-node-api": "^6.6.0",
|
||||
"azure-pipelines-task-lib": "^2.8.0",
|
||||
"azure-pipelines-tool-lib": "^0.12.0",
|
||||
"ini": "^1.3.4",
|
||||
"ip-address": "^5.8.9",
|
||||
"ltx": "^2.6.2",
|
||||
"shelljs": "^0.8.3",
|
||||
"q": "^1.5.0",
|
||||
"typed-rest-client": "0.12.0",
|
||||
"vsts-task-lib": "2.6.0",
|
||||
"vsts-task-tool-lib": "0.4.1"
|
||||
"shelljs": "^0.8.3",
|
||||
"ts-node": "^8.0.3",
|
||||
"typed-rest-client": "0.12.0"
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
// Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
// Licensed under the MIT License.
|
||||
|
||||
import * as tl from "vsts-task-lib";
|
||||
import * as tl from "azure-pipelines-task-lib";
|
||||
|
||||
import * as VsoBaseInterfaces from 'azure-devops-node-api/interfaces/common/VsoBaseInterfaces';
|
||||
import { ClientVersioningData } from 'azure-devops-node-api/VsoClient';
|
||||
|
|
|
@ -4,7 +4,7 @@
|
|||
"target": "es6",
|
||||
"declaration": true,
|
||||
"noImplicitAny": false,
|
||||
"sourceMap": false
|
||||
"sourceMap": true
|
||||
},
|
||||
"exclude": [
|
||||
"node_modules"
|
||||
|
|
|
@ -1,202 +0,0 @@
|
|||
// Generated by typings
|
||||
// Source: https://raw.githubusercontent.com/DefinitelyTyped/DefinitelyTyped/a361a8ab3c327f208d3f82ad206971d4a63d8c25/mocha/mocha.d.ts
|
||||
interface MochaSetupOptions {
|
||||
//milliseconds to wait before considering a test slow
|
||||
slow?: number;
|
||||
|
||||
// timeout in milliseconds
|
||||
timeout?: number;
|
||||
|
||||
// ui name "bdd", "tdd", "exports" etc
|
||||
ui?: string;
|
||||
|
||||
//array of accepted globals
|
||||
globals?: any[];
|
||||
|
||||
// reporter instance (function or string), defaults to `mocha.reporters.Spec`
|
||||
reporter?: any;
|
||||
|
||||
// bail on the first test failure
|
||||
bail?: boolean;
|
||||
|
||||
// ignore global leaks
|
||||
ignoreLeaks?: boolean;
|
||||
|
||||
// grep string or regexp to filter tests with
|
||||
grep?: any;
|
||||
}
|
||||
|
||||
declare var mocha: Mocha;
|
||||
declare var describe: Mocha.IContextDefinition;
|
||||
declare var xdescribe: Mocha.IContextDefinition;
|
||||
// alias for `describe`
|
||||
declare var context: Mocha.IContextDefinition;
|
||||
// alias for `describe`
|
||||
declare var suite: Mocha.IContextDefinition;
|
||||
declare var it: Mocha.ITestDefinition;
|
||||
declare var xit: Mocha.ITestDefinition;
|
||||
// alias for `it`
|
||||
declare var test: Mocha.ITestDefinition;
|
||||
declare var specify: Mocha.ITestDefinition;
|
||||
|
||||
interface MochaDone {
|
||||
(error?: any): any;
|
||||
}
|
||||
|
||||
interface ActionFunction {
|
||||
(done: MochaDone): any | PromiseLike<any>
|
||||
}
|
||||
|
||||
declare function setup(action: ActionFunction): void;
|
||||
declare function teardown(action: ActionFunction): void;
|
||||
declare function suiteSetup(action: ActionFunction): void;
|
||||
declare function suiteTeardown(action: ActionFunction): void;
|
||||
declare function before(action: ActionFunction): void;
|
||||
declare function before(description: string, action: ActionFunction): void;
|
||||
declare function after(action: ActionFunction): void;
|
||||
declare function after(description: string, action: ActionFunction): void;
|
||||
declare function beforeEach(action: ActionFunction): void;
|
||||
declare function beforeEach(description: string, action: ActionFunction): void;
|
||||
declare function afterEach(action: ActionFunction): void;
|
||||
declare function afterEach(description: string, action: ActionFunction): void;
|
||||
|
||||
declare class Mocha {
|
||||
currentTest: Mocha.ITestDefinition;
|
||||
constructor(options?: {
|
||||
grep?: RegExp;
|
||||
ui?: string;
|
||||
reporter?: string;
|
||||
timeout?: number;
|
||||
bail?: boolean;
|
||||
});
|
||||
|
||||
/** Setup mocha with the given options. */
|
||||
setup(options: MochaSetupOptions): Mocha;
|
||||
bail(value?: boolean): Mocha;
|
||||
addFile(file: string): Mocha;
|
||||
/** Sets reporter by name, defaults to "spec". */
|
||||
reporter(name: string): Mocha;
|
||||
/** Sets reporter constructor, defaults to mocha.reporters.Spec. */
|
||||
reporter(reporter: (runner: Mocha.IRunner, options: any) => any): Mocha;
|
||||
ui(value: string): Mocha;
|
||||
grep(value: string): Mocha;
|
||||
grep(value: RegExp): Mocha;
|
||||
invert(): Mocha;
|
||||
ignoreLeaks(value: boolean): Mocha;
|
||||
checkLeaks(): Mocha;
|
||||
/**
|
||||
* Function to allow assertion libraries to throw errors directly into mocha.
|
||||
* This is useful when running tests in a browser because window.onerror will
|
||||
* only receive the 'message' attribute of the Error.
|
||||
*/
|
||||
throwError(error: Error): void;
|
||||
/** Enables growl support. */
|
||||
growl(): Mocha;
|
||||
globals(value: string): Mocha;
|
||||
globals(values: string[]): Mocha;
|
||||
useColors(value: boolean): Mocha;
|
||||
useInlineDiffs(value: boolean): Mocha;
|
||||
timeout(value: number): Mocha;
|
||||
slow(value: number): Mocha;
|
||||
enableTimeouts(value: boolean): Mocha;
|
||||
asyncOnly(value: boolean): Mocha;
|
||||
noHighlighting(value: boolean): Mocha;
|
||||
/** Runs tests and invokes `onComplete()` when finished. */
|
||||
run(onComplete?: (failures: number) => void): Mocha.IRunner;
|
||||
}
|
||||
|
||||
// merge the Mocha class declaration with a module
|
||||
declare namespace Mocha {
|
||||
/** Partial interface for Mocha's `Runnable` class. */
|
||||
interface IRunnable {
|
||||
title: string;
|
||||
fn: Function;
|
||||
async: boolean;
|
||||
sync: boolean;
|
||||
timedOut: boolean;
|
||||
}
|
||||
|
||||
/** Partial interface for Mocha's `Suite` class. */
|
||||
interface ISuite {
|
||||
parent: ISuite;
|
||||
title: string;
|
||||
|
||||
fullTitle(): string;
|
||||
}
|
||||
|
||||
/** Partial interface for Mocha's `Test` class. */
|
||||
interface ITest extends IRunnable {
|
||||
parent: ISuite;
|
||||
pending: boolean;
|
||||
|
||||
fullTitle(): string;
|
||||
}
|
||||
|
||||
/** Partial interface for Mocha's `Runner` class. */
|
||||
interface IRunner {}
|
||||
|
||||
interface IContextDefinition {
|
||||
(description: string, spec: () => void): ISuite;
|
||||
only(description: string, spec: () => void): ISuite;
|
||||
skip(description: string, spec: () => void): void;
|
||||
timeout(ms: number): void;
|
||||
}
|
||||
|
||||
interface ITestDefinition {
|
||||
(expectation: string, assertion?: ActionFunction): ITest;
|
||||
only(expectation: string, assertion?: ActionFunction): ITest;
|
||||
skip(expectation: string, assertion?: ActionFunction): void;
|
||||
timeout(ms: number): void;
|
||||
state: "failed" | "passed";
|
||||
}
|
||||
|
||||
export module reporters {
|
||||
export class Base {
|
||||
stats: {
|
||||
suites: number;
|
||||
tests: number;
|
||||
passes: number;
|
||||
pending: number;
|
||||
failures: number;
|
||||
};
|
||||
|
||||
constructor(runner: IRunner);
|
||||
}
|
||||
|
||||
export class Doc extends Base {}
|
||||
export class Dot extends Base {}
|
||||
export class HTML extends Base {}
|
||||
export class HTMLCov extends Base {}
|
||||
export class JSON extends Base {}
|
||||
export class JSONCov extends Base {}
|
||||
export class JSONStream extends Base {}
|
||||
export class Landing extends Base {}
|
||||
export class List extends Base {}
|
||||
export class Markdown extends Base {}
|
||||
export class Min extends Base {}
|
||||
export class Nyan extends Base {}
|
||||
export class Progress extends Base {
|
||||
/**
|
||||
* @param options.open String used to indicate the start of the progress bar.
|
||||
* @param options.complete String used to indicate a complete test on the progress bar.
|
||||
* @param options.incomplete String used to indicate an incomplete test on the progress bar.
|
||||
* @param options.close String used to indicate the end of the progress bar.
|
||||
*/
|
||||
constructor(runner: IRunner, options?: {
|
||||
open?: string;
|
||||
complete?: string;
|
||||
incomplete?: string;
|
||||
close?: string;
|
||||
});
|
||||
}
|
||||
export class Spec extends Base {}
|
||||
export class TAP extends Base {}
|
||||
export class XUnit extends Base {
|
||||
constructor(runner: IRunner, options?: any);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
declare module "mocha" {
|
||||
export = Mocha;
|
||||
}
|
|
@ -1,8 +0,0 @@
|
|||
{
|
||||
"resolution": "main",
|
||||
"tree": {
|
||||
"src": "https://raw.githubusercontent.com/DefinitelyTyped/DefinitelyTyped/a361a8ab3c327f208d3f82ad206971d4a63d8c25/mocha/mocha.d.ts",
|
||||
"raw": "registry:dt/mocha#2.2.5+20160720003353",
|
||||
"typings": "https://raw.githubusercontent.com/DefinitelyTyped/DefinitelyTyped/a361a8ab3c327f208d3f82ad206971d4a63d8c25/mocha/mocha.d.ts"
|
||||
}
|
||||
}
|
|
@ -1,357 +0,0 @@
|
|||
// Generated by typings
|
||||
// Source: https://raw.githubusercontent.com/DefinitelyTyped/DefinitelyTyped/623f30ab194a3486e014ca39bc7f2089897d6ce4/q/Q.d.ts
|
||||
declare function Q<T>(promise: Q.IPromise<T>): Q.Promise<T>;
|
||||
/**
|
||||
* If value is not a promise, returns a promise that is fulfilled with value.
|
||||
*/
|
||||
declare function Q<T>(value: T): Q.Promise<T>;
|
||||
|
||||
declare namespace Q {
|
||||
interface IPromise<T> {
|
||||
then<U>(onFulfill?: (value: T) => U | IPromise<U>, onReject?: (error: any) => U | IPromise<U>): IPromise<U>;
|
||||
}
|
||||
|
||||
interface Deferred<T> {
|
||||
promise: Promise<T>;
|
||||
resolve(value?: T): void;
|
||||
resolve(value?: IPromise<T>): void;
|
||||
reject(reason: any): void;
|
||||
notify(value: any): void;
|
||||
makeNodeResolver(): (reason: any, value: T) => void;
|
||||
}
|
||||
|
||||
interface Promise<T> {
|
||||
/**
|
||||
* Like a finally clause, allows you to observe either the fulfillment or rejection of a promise, but to do so without modifying the final value. This is useful for collecting resources regardless of whether a job succeeded, like closing a database connection, shutting a server down, or deleting an unneeded key from an object.
|
||||
|
||||
* finally returns a promise, which will become resolved with the same fulfillment value or rejection reason as promise. However, if callback returns a promise, the resolution of the returned promise will be delayed until the promise returned from callback is finished.
|
||||
*/
|
||||
fin(finallyCallback: () => any): Promise<T>;
|
||||
/**
|
||||
* Like a finally clause, allows you to observe either the fulfillment or rejection of a promise, but to do so without modifying the final value. This is useful for collecting resources regardless of whether a job succeeded, like closing a database connection, shutting a server down, or deleting an unneeded key from an object.
|
||||
|
||||
* finally returns a promise, which will become resolved with the same fulfillment value or rejection reason as promise. However, if callback returns a promise, the resolution of the returned promise will be delayed until the promise returned from callback is finished.
|
||||
*/
|
||||
finally(finallyCallback: () => any): Promise<T>;
|
||||
|
||||
/**
|
||||
* The then method from the Promises/A+ specification, with an additional progress handler.
|
||||
*/
|
||||
then<U>(onFulfill?: (value: T) => U | IPromise<U>, onReject?: (error: any) => U | IPromise<U>, onProgress?: Function): Promise<U>;
|
||||
|
||||
/**
|
||||
* Like then, but "spreads" the array into a variadic fulfillment handler. If any of the promises in the array are rejected, instead calls onRejected with the first rejected promise's rejection reason.
|
||||
*
|
||||
* This is especially useful in conjunction with all
|
||||
*/
|
||||
spread<U>(onFulfill: (...args: any[]) => IPromise<U> | U, onReject?: (reason: any) => IPromise<U> | U): Promise<U>;
|
||||
|
||||
fail<U>(onRejected: (reason: any) => U | IPromise<U>): Promise<U>;
|
||||
|
||||
/**
|
||||
* A sugar method, equivalent to promise.then(undefined, onRejected).
|
||||
*/
|
||||
catch<U>(onRejected: (reason: any) => U | IPromise<U>): Promise<U>;
|
||||
|
||||
/**
|
||||
* A sugar method, equivalent to promise.then(undefined, undefined, onProgress).
|
||||
*/
|
||||
progress(onProgress: (progress: any) => any): Promise<T>;
|
||||
|
||||
/**
|
||||
* Much like then, but with different behavior around unhandled rejection. If there is an unhandled rejection, either because promise is rejected and no onRejected callback was provided, or because onFulfilled or onRejected threw an error or returned a rejected promise, the resulting rejection reason is thrown as an exception in a future turn of the event loop.
|
||||
*
|
||||
* This method should be used to terminate chains of promises that will not be passed elsewhere. Since exceptions thrown in then callbacks are consumed and transformed into rejections, exceptions at the end of the chain are easy to accidentally, silently ignore. By arranging for the exception to be thrown in a future turn of the event loop, so that it won't be caught, it causes an onerror event on the browser window, or an uncaughtException event on Node.js's process object.
|
||||
*
|
||||
* Exceptions thrown by done will have long stack traces, if Q.longStackSupport is set to true. If Q.onerror is set, exceptions will be delivered there instead of thrown in a future turn.
|
||||
*
|
||||
* The Golden Rule of done vs. then usage is: either return your promise to someone else, or if the chain ends with you, call done to terminate it.
|
||||
*/
|
||||
done(onFulfilled?: (value: T) => any, onRejected?: (reason: any) => any, onProgress?: (progress: any) => any): void;
|
||||
|
||||
/**
|
||||
* If callback is a function, assumes it's a Node.js-style callback, and calls it as either callback(rejectionReason) when/if promise becomes rejected, or as callback(null, fulfillmentValue) when/if promise becomes fulfilled. If callback is not a function, simply returns promise.
|
||||
*/
|
||||
nodeify(callback: (reason: any, value: any) => void): Promise<T>;
|
||||
|
||||
/**
|
||||
* Returns a promise to get the named property of an object. Essentially equivalent to
|
||||
*
|
||||
* promise.then(function (o) {
|
||||
* return o[propertyName];
|
||||
* });
|
||||
*/
|
||||
get<U>(propertyName: String): Promise<U>;
|
||||
set<U>(propertyName: String, value: any): Promise<U>;
|
||||
delete<U>(propertyName: String): Promise<U>;
|
||||
/**
|
||||
* Returns a promise for the result of calling the named method of an object with the given array of arguments. The object itself is this in the function, just like a synchronous method call. Essentially equivalent to
|
||||
*
|
||||
* promise.then(function (o) {
|
||||
* return o[methodName].apply(o, args);
|
||||
* });
|
||||
*/
|
||||
post<U>(methodName: String, args: any[]): Promise<U>;
|
||||
/**
|
||||
* Returns a promise for the result of calling the named method of an object with the given variadic arguments. The object itself is this in the function, just like a synchronous method call.
|
||||
*/
|
||||
invoke<U>(methodName: String, ...args: any[]): Promise<U>;
|
||||
fapply<U>(args: any[]): Promise<U>;
|
||||
fcall<U>(...args: any[]): Promise<U>;
|
||||
|
||||
/**
|
||||
* Returns a promise for an array of the property names of an object. Essentially equivalent to
|
||||
*
|
||||
* promise.then(function (o) {
|
||||
* return Object.keys(o);
|
||||
* });
|
||||
*/
|
||||
keys(): Promise<string[]>;
|
||||
|
||||
/**
|
||||
* A sugar method, equivalent to promise.then(function () { return value; }).
|
||||
*/
|
||||
thenResolve<U>(value: U): Promise<U>;
|
||||
/**
|
||||
* A sugar method, equivalent to promise.then(function () { throw reason; }).
|
||||
*/
|
||||
thenReject(reason: any): Promise<T>;
|
||||
|
||||
/**
|
||||
* Attaches a handler that will observe the value of the promise when it becomes fulfilled, returning a promise for that same value, perhaps deferred but not replaced by the promise returned by the onFulfilled handler.
|
||||
*/
|
||||
tap(onFulfilled: (value: T) => any): Promise<T>;
|
||||
|
||||
timeout(ms: number, message?: string): Promise<T>;
|
||||
/**
|
||||
* Returns a promise that will have the same result as promise, but will only be fulfilled or rejected after at least ms milliseconds have passed.
|
||||
*/
|
||||
delay(ms: number): Promise<T>;
|
||||
|
||||
/**
|
||||
* Returns whether a given promise is in the fulfilled state. When the static version is used on non-promises, the result is always true.
|
||||
*/
|
||||
isFulfilled(): boolean;
|
||||
/**
|
||||
* Returns whether a given promise is in the rejected state. When the static version is used on non-promises, the result is always false.
|
||||
*/
|
||||
isRejected(): boolean;
|
||||
/**
|
||||
* Returns whether a given promise is in the pending state. When the static version is used on non-promises, the result is always false.
|
||||
*/
|
||||
isPending(): boolean;
|
||||
|
||||
valueOf(): any;
|
||||
|
||||
/**
|
||||
* Returns a "state snapshot" object, which will be in one of three forms:
|
||||
*
|
||||
* - { state: "pending" }
|
||||
* - { state: "fulfilled", value: <fulfllment value> }
|
||||
* - { state: "rejected", reason: <rejection reason> }
|
||||
*/
|
||||
inspect(): PromiseState<T>;
|
||||
}
|
||||
|
||||
interface PromiseState<T> {
|
||||
/**
|
||||
* "fulfilled", "rejected", "pending"
|
||||
*/
|
||||
state: string;
|
||||
value?: T;
|
||||
reason?: any;
|
||||
}
|
||||
|
||||
// If no value provided, returned promise will be of void type
|
||||
export function when(): Promise<void>;
|
||||
|
||||
// if no fulfill, reject, or progress provided, returned promise will be of same type
|
||||
export function when<T>(value: T | IPromise<T>): Promise<T>;
|
||||
|
||||
// If a non-promise value is provided, it will not reject or progress
|
||||
export function when<T, U>(value: T | IPromise<T>, onFulfilled: (val: T) => U | IPromise<U>, onRejected?: (reason: any) => U | IPromise<U>, onProgress?: (progress: any) => any): Promise<U>;
|
||||
|
||||
/**
|
||||
* Currently "impossible" (and I use the term loosely) to implement due to TypeScript limitations as it is now.
|
||||
* See: https://github.com/Microsoft/TypeScript/issues/1784 for discussion on it.
|
||||
*/
|
||||
// export function try(method: Function, ...args: any[]): Promise<any>;
|
||||
|
||||
export function fbind<T>(method: (...args: any[]) => T | IPromise<T>, ...args: any[]): (...args: any[]) => Promise<T>;
|
||||
|
||||
export function fcall<T>(method: (...args: any[]) => T, ...args: any[]): Promise<T>;
|
||||
|
||||
export function send<T>(obj: any, functionName: string, ...args: any[]): Promise<T>;
|
||||
export function invoke<T>(obj: any, functionName: string, ...args: any[]): Promise<T>;
|
||||
export function mcall<T>(obj: any, functionName: string, ...args: any[]): Promise<T>;
|
||||
|
||||
export function denodeify<T>(nodeFunction: Function, ...args: any[]): (...args: any[]) => Promise<T>;
|
||||
export function nbind<T>(nodeFunction: Function, thisArg: any, ...args: any[]): (...args: any[]) => Promise<T>;
|
||||
export function nfbind<T>(nodeFunction: Function, ...args: any[]): (...args: any[]) => Promise<T>;
|
||||
export function nfcall<T>(nodeFunction: Function, ...args: any[]): Promise<T>;
|
||||
export function nfapply<T>(nodeFunction: Function, args: any[]): Promise<T>;
|
||||
|
||||
export function ninvoke<T>(nodeModule: any, functionName: string, ...args: any[]): Promise<T>;
|
||||
export function npost<T>(nodeModule: any, functionName: string, args: any[]): Promise<T>;
|
||||
export function nsend<T>(nodeModule: any, functionName: string, ...args: any[]): Promise<T>;
|
||||
export function nmcall<T>(nodeModule: any, functionName: string, ...args: any[]): Promise<T>;
|
||||
|
||||
/**
|
||||
* Returns a promise that is fulfilled with an array containing the fulfillment value of each promise, or is rejected with the same rejection reason as the first promise to be rejected.
|
||||
*/
|
||||
export function all<A, B, C, D, E, F>(promises: [IPromise<A>, IPromise<B>, IPromise<C>, IPromise<D>, IPromise<E>, IPromise<F>]): Promise<[A, B, C, D, E, F]>;
|
||||
/**
|
||||
* Returns a promise that is fulfilled with an array containing the fulfillment value of each promise, or is rejected with the same rejection reason as the first promise to be rejected.
|
||||
*/
|
||||
export function all<A, B, C, D, E>(promises: [IPromise<A>, IPromise<B>, IPromise<C>, IPromise<D>, IPromise<E>]): Promise<[A, B, C, D, E]>;
|
||||
/**
|
||||
* Returns a promise that is fulfilled with an array containing the fulfillment value of each promise, or is rejected with the same rejection reason as the first promise to be rejected.
|
||||
*/
|
||||
export function all<A, B, C, D>(promises: [IPromise<A>, IPromise<B>, IPromise<C>, IPromise<D>]): Promise<[A, B, C, D]>;
|
||||
/**
|
||||
* Returns a promise that is fulfilled with an array containing the fulfillment value of each promise, or is rejected with the same rejection reason as the first promise to be rejected.
|
||||
*/
|
||||
export function all<A, B, C>(promises: [IPromise<A>, IPromise<B>, IPromise<C>]): Promise<[A, B, C]>;
|
||||
/**
|
||||
* Returns a promise that is fulfilled with an array containing the fulfillment value of each promise, or is rejected with the same rejection reason as the first promise to be rejected.
|
||||
*/
|
||||
export function all<A, B>(promises: [IPromise<A>, IPromise<B>]): Promise<[A, B]>;
|
||||
/**
|
||||
* Returns a promise that is fulfilled with an array containing the fulfillment value of each promise, or is rejected with the same rejection reason as the first promise to be rejected.
|
||||
*/
|
||||
export function all<T>(promises: IPromise<T>[]): Promise<T[]>;
|
||||
|
||||
/**
|
||||
* Returns a promise for the first of an array of promises to become settled.
|
||||
*/
|
||||
export function race<T>(promises: IPromise<T>[]): Promise<T>;
|
||||
|
||||
/**
|
||||
* Returns a promise that is fulfilled with an array of promise state snapshots, but only after all the original promises have settled, i.e. become either fulfilled or rejected.
|
||||
*/
|
||||
export function allSettled<T>(promises: IPromise<T>[]): Promise<PromiseState<T>[]>;
|
||||
|
||||
export function allResolved<T>(promises: IPromise<T>[]): Promise<Promise<T>[]>;
|
||||
|
||||
/**
|
||||
* Like then, but "spreads" the array into a variadic fulfillment handler. If any of the promises in the array are rejected, instead calls onRejected with the first rejected promise's rejection reason.
|
||||
* This is especially useful in conjunction with all.
|
||||
*/
|
||||
export function spread<T, U>(promises: IPromise<T>[], onFulfilled: (...args: T[]) => U | IPromise<U>, onRejected?: (reason: any) => U | IPromise<U>): Promise<U>;
|
||||
|
||||
/**
|
||||
* Returns a promise that will have the same result as promise, except that if promise is not fulfilled or rejected before ms milliseconds, the returned promise will be rejected with an Error with the given message. If message is not supplied, the message will be "Timed out after " + ms + " ms".
|
||||
*/
|
||||
export function timeout<T>(promise: Promise<T>, ms: number, message?: string): Promise<T>;
|
||||
|
||||
/**
|
||||
* Returns a promise that will have the same result as promise, but will only be fulfilled or rejected after at least ms milliseconds have passed.
|
||||
*/
|
||||
export function delay<T>(promise: Promise<T>, ms: number): Promise<T>;
|
||||
/**
|
||||
* Returns a promise that will have the same result as promise, but will only be fulfilled or rejected after at least ms milliseconds have passed.
|
||||
*/
|
||||
export function delay<T>(value: T, ms: number): Promise<T>;
|
||||
/**
|
||||
* Returns a promise that will be fulfilled with undefined after at least ms milliseconds have passed.
|
||||
*/
|
||||
export function delay(ms: number): Promise <void>;
|
||||
/**
|
||||
* Returns whether a given promise is in the fulfilled state. When the static version is used on non-promises, the result is always true.
|
||||
*/
|
||||
export function isFulfilled(promise: Promise<any>): boolean;
|
||||
/**
|
||||
* Returns whether a given promise is in the rejected state. When the static version is used on non-promises, the result is always false.
|
||||
*/
|
||||
export function isRejected(promise: Promise<any>): boolean;
|
||||
/**
|
||||
* Returns whether a given promise is in the pending state. When the static version is used on non-promises, the result is always false.
|
||||
*/
|
||||
export function isPending(promise: Promise<any>): boolean;
|
||||
|
||||
/**
|
||||
* Returns a "deferred" object with a:
|
||||
* promise property
|
||||
* resolve(value) method
|
||||
* reject(reason) method
|
||||
* notify(value) method
|
||||
* makeNodeResolver() method
|
||||
*/
|
||||
export function defer<T>(): Deferred<T>;
|
||||
|
||||
/**
|
||||
* Returns a promise that is rejected with reason.
|
||||
*/
|
||||
export function reject<T>(reason?: any): Promise<T>;
|
||||
|
||||
export function Promise<T>(resolver: (resolve: (val: T | IPromise<T>) => void , reject: (reason: any) => void , notify: (progress: any) => void ) => void ): Promise<T>;
|
||||
|
||||
/**
|
||||
* Creates a new version of func that accepts any combination of promise and non-promise values, converting them to their fulfillment values before calling the original func. The returned version also always returns a promise: if func does a return or throw, then Q.promised(func) will return fulfilled or rejected promise, respectively.
|
||||
*
|
||||
* This can be useful for creating functions that accept either promises or non-promise values, and for ensuring that the function always returns a promise even in the face of unintentional thrown exceptions.
|
||||
*/
|
||||
export function promised<T>(callback: (...args: any[]) => T): (...args: any[]) => Promise<T>;
|
||||
|
||||
/**
|
||||
* Returns whether the given value is a Q promise.
|
||||
*/
|
||||
export function isPromise(object: any): boolean;
|
||||
/**
|
||||
* Returns whether the given value is a promise (i.e. it's an object with a then function).
|
||||
*/
|
||||
export function isPromiseAlike(object: any): boolean;
|
||||
/**
|
||||
* Returns whether a given promise is in the pending state. When the static version is used on non-promises, the result is always false.
|
||||
*/
|
||||
export function isPending(object: any): boolean;
|
||||
/**
|
||||
* If an object is not a promise, it is as "near" as possible.
|
||||
* If a promise is rejected, it is as "near" as possible too.
|
||||
* If it’s a fulfilled promise, the fulfillment value is nearer.
|
||||
* If it’s a deferred promise and the deferred has been resolved, the
|
||||
* resolution is "nearer".
|
||||
*/
|
||||
export function nearer<T>(promise: Promise<T>): T;
|
||||
|
||||
/**
|
||||
* This is an experimental tool for converting a generator function into a deferred function. This has the potential of reducing nested callbacks in engines that support yield.
|
||||
*/
|
||||
export function async<T>(generatorFunction: any): (...args: any[]) => Promise<T>;
|
||||
export function nextTick(callback: Function): void;
|
||||
|
||||
/**
|
||||
* A settable property that will intercept any uncaught errors that would otherwise be thrown in the next tick of the event loop, usually as a result of done. Can be useful for getting the full stack trace of an error in browsers, which is not usually possible with window.onerror.
|
||||
*/
|
||||
export var onerror: (reason: any) => void;
|
||||
/**
|
||||
* A settable property that lets you turn on long stack trace support. If turned on, "stack jumps" will be tracked across asynchronous promise operations, so that if an uncaught error is thrown by done or a rejection reason's stack property is inspected in a rejection callback, a long stack trace is produced.
|
||||
*/
|
||||
export var longStackSupport: boolean;
|
||||
|
||||
/**
|
||||
* Calling resolve with a pending promise causes promise to wait on the passed promise, becoming fulfilled with its fulfillment value or rejected with its rejection reason (or staying pending forever, if the passed promise does).
|
||||
* Calling resolve with a rejected promise causes promise to be rejected with the passed promise's rejection reason.
|
||||
* Calling resolve with a fulfilled promise causes promise to be fulfilled with the passed promise's fulfillment value.
|
||||
* Calling resolve with a non-promise value causes promise to be fulfilled with that value.
|
||||
*/
|
||||
export function resolve<T>(object: IPromise<T>): Promise<T>;
|
||||
/**
|
||||
* Calling resolve with a pending promise causes promise to wait on the passed promise, becoming fulfilled with its fulfillment value or rejected with its rejection reason (or staying pending forever, if the passed promise does).
|
||||
* Calling resolve with a rejected promise causes promise to be rejected with the passed promise's rejection reason.
|
||||
* Calling resolve with a fulfilled promise causes promise to be fulfilled with the passed promise's fulfillment value.
|
||||
* Calling resolve with a non-promise value causes promise to be fulfilled with that value.
|
||||
*/
|
||||
export function resolve<T>(object: T): Promise<T>;
|
||||
|
||||
/**
|
||||
* Resets the global "Q" variable to the value it has before Q was loaded.
|
||||
* This will either be undefined if there was no version or the version of Q which was already loaded before.
|
||||
* @returns { The last version of Q. }
|
||||
*/
|
||||
export function noConflict(): typeof Q;
|
||||
}
|
||||
|
||||
declare module "q" {
|
||||
export = Q;
|
||||
}
|
|
@ -1,8 +0,0 @@
|
|||
{
|
||||
"resolution": "main",
|
||||
"tree": {
|
||||
"src": "https://raw.githubusercontent.com/DefinitelyTyped/DefinitelyTyped/623f30ab194a3486e014ca39bc7f2089897d6ce4/q/Q.d.ts",
|
||||
"raw": "registry:dt/q#0.0.0+20160613154756",
|
||||
"typings": "https://raw.githubusercontent.com/DefinitelyTyped/DefinitelyTyped/623f30ab194a3486e014ca39bc7f2089897d6ce4/q/Q.d.ts"
|
||||
}
|
||||
}
|
|
@ -1,4 +0,0 @@
|
|||
/// <reference path="globals/mocha/index.d.ts" />
|
||||
/// <reference path="globals/q/index.d.ts" />
|
||||
/// <reference path="modules/ini/index.d.ts" />
|
||||
/// <reference path="modules/mockery/index.d.ts" />
|
|
@ -1,20 +0,0 @@
|
|||
// Generated by typings
|
||||
// Source: https://raw.githubusercontent.com/DefinitelyTyped/DefinitelyTyped/81862d240d257e28eda42029c4a1bc8bea984360/ini/index.d.ts
|
||||
declare module 'ini' {
|
||||
// Type definitions for ini v1.3.3
|
||||
// Project: https://github.com/isaacs/ini
|
||||
// Definitions by: Marcin Porębski <https://github.com/marcinporebski>
|
||||
// Definitions: https://github.com/DefinitelyTyped/DefinitelyTyped
|
||||
|
||||
interface EncodeOptions {
|
||||
section: string
|
||||
whitespace: boolean
|
||||
}
|
||||
|
||||
export function decode(inistring: string): any;
|
||||
export function parse(initstring: string): any;
|
||||
export function encode(object: any, options?: EncodeOptions): string;
|
||||
export function stringify(object: any, options?: EncodeOptions): string;
|
||||
export function safe(val: string): string;
|
||||
export function unsafe(val: string): string;
|
||||
}
|
|
@ -1,8 +0,0 @@
|
|||
{
|
||||
"resolution": "main",
|
||||
"tree": {
|
||||
"src": "https://raw.githubusercontent.com/DefinitelyTyped/DefinitelyTyped/81862d240d257e28eda42029c4a1bc8bea984360/ini/index.d.ts",
|
||||
"raw": "registry:dt/ini#1.3.3+20160505055005",
|
||||
"typings": "https://raw.githubusercontent.com/DefinitelyTyped/DefinitelyTyped/81862d240d257e28eda42029c4a1bc8bea984360/ini/index.d.ts"
|
||||
}
|
||||
}
|
|
@ -1,36 +0,0 @@
|
|||
// Generated by typings
|
||||
// Source: https://raw.githubusercontent.com/DefinitelyTyped/DefinitelyTyped/337587de8c13868283993bfacdcdd1a0f3291e7f/mockery/index.d.ts
|
||||
declare module 'mockery' {
|
||||
// Type definitions for mockery 1.4.0
|
||||
// Project: https://github.com/mfncooper/mockery
|
||||
// Definitions by: jt000 <https://github.com/jt000>
|
||||
// Definitions: https://github.com/DefinitelyTyped/DefinitelyTyped
|
||||
|
||||
|
||||
|
||||
interface MockeryEnableArgs {
|
||||
useCleanCache?: boolean;
|
||||
warnOnReplace?: boolean;
|
||||
warnOnUnregistered?: boolean;
|
||||
}
|
||||
|
||||
export function enable(args?: MockeryEnableArgs): void;
|
||||
export function disable(): void;
|
||||
|
||||
export function registerMock(name: string, mock: any): void;
|
||||
export function deregisterMock(name: string): void;
|
||||
|
||||
export function registerSubstitute(name: string, substitute: string): void;
|
||||
export function deregisterSubstitute(name: string): void;
|
||||
|
||||
export function registerAllowable(name: string, unhook?: boolean): void;
|
||||
export function deregisterAllowable(name: string): void;
|
||||
|
||||
export function registerAllowables(names: string[]): void;
|
||||
export function deregisterAllowables(names: string[]): void;
|
||||
|
||||
export function deregisterAll(): void;
|
||||
export function resetCache(): void;
|
||||
export function warnOnUnregistered(value: boolean): void;
|
||||
export function warnOnReplace(value: boolean): void;
|
||||
}
|
|
@ -1,8 +0,0 @@
|
|||
{
|
||||
"resolution": "main",
|
||||
"tree": {
|
||||
"src": "https://raw.githubusercontent.com/DefinitelyTyped/DefinitelyTyped/337587de8c13868283993bfacdcdd1a0f3291e7f/mockery/index.d.ts",
|
||||
"raw": "registry:dt/mockery#1.4.0+20160428043022",
|
||||
"typings": "https://raw.githubusercontent.com/DefinitelyTyped/DefinitelyTyped/337587de8c13868283993bfacdcdd1a0f3291e7f/mockery/index.d.ts"
|
||||
}
|
||||
}
|
|
@ -0,0 +1,82 @@
|
|||
import * as tmrm from "azure-pipelines-task-lib/mock-run";
|
||||
import * as path from "path";
|
||||
import * as fs from "fs";
|
||||
import { TaskLibAnswers } from "azure-pipelines-task-lib/mock-answer";
|
||||
import { UniversalMockHelper } from "packaging-common/Tests/UniversalMockHelper";
|
||||
|
||||
const taskPath = path.join(__dirname, "..", "restorecache.js");
|
||||
const tmr: tmrm.TaskMockRunner = new tmrm.TaskMockRunner(taskPath);
|
||||
const hash = "2f6b1287b26ff4716cffdeeabd434aa1a3da9f092ebf87579a916ca0bf91cd65";
|
||||
|
||||
const a: TaskLibAnswers = {
|
||||
findMatch: {
|
||||
"**/*/yarn.lock": ["src/webapi/yarn.lock", "src/application/yarn.lock"],
|
||||
"**/*/node_modules": [],
|
||||
},
|
||||
rmRF: {
|
||||
"/users/home/directory/tmp_cache": { success: true },
|
||||
},
|
||||
checkPath: { },
|
||||
exec: { },
|
||||
exist: { },
|
||||
which: { },
|
||||
};
|
||||
|
||||
tmr.setAnswers(a);
|
||||
const umh: UniversalMockHelper = new UniversalMockHelper(tmr, a, null);
|
||||
|
||||
umh.mockUniversalCommand(
|
||||
"download",
|
||||
"node-package-feed",
|
||||
"builddefinition1",
|
||||
`1.0.0-${process.platform}-${hash}`,
|
||||
"/users/home/directory/tmp_cache",
|
||||
{
|
||||
code: 0,
|
||||
stdout: "ArtifactTool.exe output",
|
||||
stderr: "",
|
||||
}
|
||||
);
|
||||
|
||||
tmr.setInput("keyFile", "**/*/yarn.lock");
|
||||
tmr.setInput("targetFolders", "**/*/node_modules");
|
||||
|
||||
// mock a specific module function called in task
|
||||
tmr.registerMock("fs", {
|
||||
readFileSync(
|
||||
path: string,
|
||||
options:
|
||||
| string
|
||||
| {
|
||||
encoding: string;
|
||||
flag?: string;
|
||||
}
|
||||
): string {
|
||||
if (path.endsWith("/yarn.lock")) {
|
||||
const segments = path.split('/');
|
||||
return segments.slice(segments.length - 3).join('/');
|
||||
}
|
||||
return fs.readFileSync(path, options);
|
||||
},
|
||||
chmodSync: fs.chmodSync,
|
||||
writeFileSync: fs.writeFileSync,
|
||||
readdirSync: fs.readdirSync,
|
||||
mkdirSync: fs.mkdirSync,
|
||||
copyFileSync: fs.copyFileSync,
|
||||
statSync: fs.statSync,
|
||||
linkSync: fs.linkSync,
|
||||
symlinkSync: fs.symlinkSync,
|
||||
});
|
||||
|
||||
tmr.registerMock("shelljs", {
|
||||
exec(command: string) {
|
||||
console.log(`Mock executing command: ${command}`);
|
||||
return {
|
||||
code: 0,
|
||||
stdout: "shelljs output",
|
||||
stderr: null,
|
||||
};
|
||||
},
|
||||
});
|
||||
|
||||
tmr.run();
|
|
@ -0,0 +1,83 @@
|
|||
import * as tmrm from "azure-pipelines-task-lib/mock-run";
|
||||
import * as path from "path";
|
||||
import * as fs from "fs";
|
||||
import { TaskLibAnswers } from "azure-pipelines-task-lib/mock-answer";
|
||||
import { UniversalMockHelper } from "packaging-common/Tests/UniversalMockHelper";
|
||||
|
||||
const taskPath = path.join(__dirname, "..", "restorecache.js");
|
||||
const tmr: tmrm.TaskMockRunner = new tmrm.TaskMockRunner(taskPath);
|
||||
const hash = "2f6b1287b26ff4716cffdeeabd434aa1a3da9f092ebf87579a916ca0bf91cd65";
|
||||
|
||||
const a: TaskLibAnswers = {
|
||||
findMatch: {
|
||||
"**/*/yarn.lock": ["src/webapi/yarn.lock", "src/application/yarn.lock"],
|
||||
"**/*/node_modules": [],
|
||||
},
|
||||
rmRF: {
|
||||
"/users/home/directory/tmp_cache": { success: true },
|
||||
},
|
||||
checkPath: { },
|
||||
exec: { },
|
||||
exist: { },
|
||||
which: { },
|
||||
};
|
||||
|
||||
tmr.setAnswers(a);
|
||||
|
||||
const umh: UniversalMockHelper = new UniversalMockHelper(tmr, a, "/users/tmp/ArtifactTool.exe");
|
||||
|
||||
umh.mockUniversalCommand(
|
||||
"download",
|
||||
"node-package-feed",
|
||||
"builddefinition1",
|
||||
`1.0.0-${process.platform}-${hash}`,
|
||||
"/users/home/directory/tmp_cache",
|
||||
{
|
||||
code: 0,
|
||||
stdout: "ArtifactTool.exe output",
|
||||
stderr: "",
|
||||
}
|
||||
);
|
||||
|
||||
tmr.setInput("keyFile", "**/*/yarn.lock");
|
||||
tmr.setInput("targetFolders", "**/*/node_modules");
|
||||
|
||||
// mock a specific module function called in task
|
||||
tmr.registerMock("fs", {
|
||||
readFileSync(
|
||||
path: string,
|
||||
options:
|
||||
| string
|
||||
| {
|
||||
encoding: string;
|
||||
flag?: string;
|
||||
}
|
||||
): string {
|
||||
if (path.endsWith("/yarn.lock")) {
|
||||
const segments = path.split('/');
|
||||
return segments.splice(segments.length - 3).join('/');
|
||||
}
|
||||
return fs.readFileSync(path, options);
|
||||
},
|
||||
chmodSync: fs.chmodSync,
|
||||
writeFileSync: fs.writeFileSync,
|
||||
readdirSync: fs.readdirSync,
|
||||
mkdirSync: fs.mkdirSync,
|
||||
copyFileSync: fs.copyFileSync,
|
||||
statSync: fs.statSync,
|
||||
linkSync: fs.linkSync,
|
||||
symlinkSync: fs.symlinkSync,
|
||||
});
|
||||
|
||||
tmr.registerMock("shelljs", {
|
||||
exec(command: string) {
|
||||
console.log(`Mock executing command: ${command}`);
|
||||
return {
|
||||
code: 0,
|
||||
stdout: "shelljs output",
|
||||
stderr: null,
|
||||
};
|
||||
},
|
||||
});
|
||||
|
||||
tmr.run();
|
|
@ -0,0 +1,83 @@
|
|||
import * as tmrm from "azure-pipelines-task-lib/mock-run";
|
||||
import * as path from "path";
|
||||
import * as fs from "fs";
|
||||
import { TaskLibAnswers } from "azure-pipelines-task-lib/mock-answer";
|
||||
import { UniversalMockHelper } from "packaging-common/Tests/UniversalMockHelper";
|
||||
|
||||
const taskPath = path.join(__dirname, "..", "restorecache.js");
|
||||
const tmr: tmrm.TaskMockRunner = new tmrm.TaskMockRunner(taskPath);
|
||||
const hash = "2f6b1287b26ff4716cffdeeabd434aa1a3da9f092ebf87579a916ca0bf91cd65";
|
||||
|
||||
const a: TaskLibAnswers = {
|
||||
findMatch: {
|
||||
"**/*/yarn.lock": ["src/webapi/yarn.lock", "src/application/yarn.lock"],
|
||||
"**/*/node_modules": [],
|
||||
},
|
||||
rmRF: {
|
||||
"/users/home/directory/tmp_cache": { success: true },
|
||||
},
|
||||
checkPath: { },
|
||||
exec: { },
|
||||
exist: { },
|
||||
which: { },
|
||||
};
|
||||
|
||||
tmr.setAnswers(a);
|
||||
|
||||
const umh: UniversalMockHelper = new UniversalMockHelper(tmr, a, "/users/tmp/ArtifactTool.exe");
|
||||
|
||||
umh.mockUniversalCommand(
|
||||
"download",
|
||||
"node-package-feed",
|
||||
"builddefinition1",
|
||||
`1.0.0-${process.platform}-${hash}`,
|
||||
"/users/home/directory/tmp_cache",
|
||||
{
|
||||
code: 1,
|
||||
stdout: "ArtifactTool.exe output",
|
||||
stderr: "Can't find the package ",
|
||||
}
|
||||
);
|
||||
|
||||
tmr.setInput("keyFile", "**/*/yarn.lock");
|
||||
tmr.setInput("targetFolders", "**/*/node_modules");
|
||||
|
||||
// mock a specific module function called in task
|
||||
tmr.registerMock("fs", {
|
||||
readFileSync(
|
||||
path: string,
|
||||
options:
|
||||
| string
|
||||
| {
|
||||
encoding: string;
|
||||
flag?: string;
|
||||
}
|
||||
): string {
|
||||
if (path.endsWith("/yarn.lock")) {
|
||||
const segments = path.split('/');
|
||||
return segments.splice(segments.length - 3).join('/');
|
||||
}
|
||||
return fs.readFileSync(path, options);
|
||||
},
|
||||
chmodSync: fs.chmodSync,
|
||||
writeFileSync: fs.writeFileSync,
|
||||
readdirSync: fs.readdirSync,
|
||||
mkdirSync: fs.mkdirSync,
|
||||
copyFileSync: fs.copyFileSync,
|
||||
statSync: fs.statSync,
|
||||
linkSync: fs.linkSync,
|
||||
symlinkSync: fs.symlinkSync,
|
||||
});
|
||||
|
||||
tmr.registerMock("shelljs", {
|
||||
exec(command: string) {
|
||||
console.log(`Mock executing command: ${command}`);
|
||||
return {
|
||||
code: 0,
|
||||
stdout: "shelljs output",
|
||||
stderr: null,
|
||||
};
|
||||
},
|
||||
});
|
||||
|
||||
tmr.run();
|
|
@ -0,0 +1,13 @@
|
|||
import * as ma from "azure-pipelines-task-lib/mock-answer";
|
||||
import * as tmrm from "azure-pipelines-task-lib/mock-run";
|
||||
import * as path from "path";
|
||||
|
||||
let taskPath = path.join(__dirname, "..", "restorecache.js");
|
||||
let tmr: tmrm.TaskMockRunner = new tmrm.TaskMockRunner(taskPath);
|
||||
|
||||
tmr.setInput("keyFile", "**/*/yarn.lock");
|
||||
tmr.setInput("targetFolders", "**/*/node_modules");
|
||||
|
||||
process.env["SYSTEM_PULLREQUEST_ISFORK"] = "true";
|
||||
|
||||
tmr.run();
|
|
@ -0,0 +1,23 @@
|
|||
import * as ma from "azure-pipelines-task-lib/mock-answer";
|
||||
import * as tmrm from "azure-pipelines-task-lib/mock-run";
|
||||
import * as path from "path";
|
||||
|
||||
const taskPath = path.join(__dirname, "..", "restorecache.js");
|
||||
const tmr: tmrm.TaskMockRunner = new tmrm.TaskMockRunner(taskPath);
|
||||
|
||||
tmr.setInput("keyFile", "**/*/yarn.lock");
|
||||
tmr.setInput("targetFolders", "**/*/node_modules");
|
||||
|
||||
// provide answers for task mock
|
||||
const a: ma.TaskLibAnswers = {
|
||||
findMatch: {
|
||||
"**/*/yarn.lock": [],
|
||||
},
|
||||
rmRF: {
|
||||
"*": { success: true },
|
||||
},
|
||||
} as ma.TaskLibAnswers;
|
||||
|
||||
tmr.setAnswers(a);
|
||||
|
||||
tmr.run();
|
|
@ -0,0 +1,83 @@
|
|||
import * as tmrm from "azure-pipelines-task-lib/mock-run";
|
||||
import * as path from "path";
|
||||
import * as fs from "fs";
|
||||
import { TaskLibAnswers } from "azure-pipelines-task-lib/mock-answer";
|
||||
import { UniversalMockHelper } from "packaging-common/Tests/UniversalMockHelper";
|
||||
|
||||
const taskPath = path.join(__dirname, "..", "restorecache.js");
|
||||
const tmr: tmrm.TaskMockRunner = new tmrm.TaskMockRunner(taskPath);
|
||||
const hash = "2f6b1287b26ff4716cffdeeabd434aa1a3da9f092ebf87579a916ca0bf91cd65";
|
||||
|
||||
const a: TaskLibAnswers = {
|
||||
findMatch: {
|
||||
"**/*/yarn.lock": ["src/webapi/yarn.lock", "src/application/yarn.lock"],
|
||||
"**/*/node_modules": [],
|
||||
},
|
||||
rmRF: {
|
||||
"/users/home/directory/tmp_cache": { success: true },
|
||||
},
|
||||
checkPath: { },
|
||||
exec: { },
|
||||
exist: { },
|
||||
which: { },
|
||||
};
|
||||
|
||||
tmr.setAnswers(a);
|
||||
|
||||
const umh: UniversalMockHelper = new UniversalMockHelper(tmr, a, "/users/tmp/ArtifactTool.exe");
|
||||
|
||||
umh.mockUniversalCommand(
|
||||
"download",
|
||||
"node-package-feed",
|
||||
"builddefinition1",
|
||||
`1.0.0-${process.platform}-${hash}`,
|
||||
"/users/home/directory/tmp_cache",
|
||||
{
|
||||
code: 1,
|
||||
stdout: "ArtifactTool.exe output",
|
||||
stderr: "An error occurred on the service. User lacks permission to complete this action.",
|
||||
}
|
||||
);
|
||||
|
||||
tmr.setInput("keyFile", "**/*/yarn.lock");
|
||||
tmr.setInput("targetFolders", "**/*/node_modules");
|
||||
|
||||
// mock a specific module function called in task
|
||||
tmr.registerMock("fs", {
|
||||
readFileSync(
|
||||
path: string,
|
||||
options:
|
||||
| string
|
||||
| {
|
||||
encoding: string;
|
||||
flag?: string;
|
||||
}
|
||||
): string {
|
||||
if (path.endsWith("/yarn.lock")) {
|
||||
const segments = path.split('/');
|
||||
return segments.splice(segments.length - 3).join('/');
|
||||
}
|
||||
return fs.readFileSync(path, options);
|
||||
},
|
||||
chmodSync: fs.chmodSync,
|
||||
writeFileSync: fs.writeFileSync,
|
||||
readdirSync: fs.readdirSync,
|
||||
mkdirSync: fs.mkdirSync,
|
||||
copyFileSync: fs.copyFileSync,
|
||||
statSync: fs.statSync,
|
||||
linkSync: fs.linkSync,
|
||||
symlinkSync: fs.symlinkSync,
|
||||
});
|
||||
|
||||
tmr.registerMock("shelljs", {
|
||||
exec(command: string) {
|
||||
console.log(`Mock executing command: ${command}`);
|
||||
return {
|
||||
code: 0,
|
||||
stdout: "shelljs output",
|
||||
stderr: null,
|
||||
};
|
||||
},
|
||||
});
|
||||
|
||||
tmr.run();
|
|
@ -0,0 +1,108 @@
|
|||
import * as tmrm from "azure-pipelines-task-lib/mock-run";
|
||||
import * as path from "path";
|
||||
import * as fs from "fs";
|
||||
import { TaskLibAnswers } from "azure-pipelines-task-lib/mock-answer";
|
||||
import { UniversalMockHelper } from "packaging-common/Tests/UniversalMockHelper";
|
||||
|
||||
const taskPath = path.join(__dirname, "..", "savecache.js");
|
||||
const tmr: tmrm.TaskMockRunner = new tmrm.TaskMockRunner(taskPath);
|
||||
const hash = "2f6b1287b26ff4716cffdeeabd434aa1a3da9f092ebf87579a916ca0bf91cd65";
|
||||
|
||||
// provide answers for task mock
|
||||
const a: TaskLibAnswers = {
|
||||
findMatch: {
|
||||
"**/*/yarn.lock": ["src/webapi/yarn.lock", "src/application/yarn.lock"],
|
||||
"**/*/node_modules": ["src/webapi/node_modules", "src/application/node_modules"],
|
||||
},
|
||||
find: {
|
||||
DefaultWorkingDirectory: [
|
||||
"src/webapi/node_modules",
|
||||
"src/application/node_modules",
|
||||
"src/webapi/startup.config",
|
||||
"src/application/program.cs",
|
||||
],
|
||||
},
|
||||
rmRF: {
|
||||
"/users/home/DefaultWorkingDirectory/tmp_cache": { success: true },
|
||||
"DefaultWorkingDirectory/tmp_cache": {success: true},
|
||||
"\"DefaultWorkingDirectory/tmp_cache\"": {success: true},
|
||||
},
|
||||
stats: {
|
||||
"src/webapi/node_modules": {
|
||||
isDirectory() {return true; },
|
||||
},
|
||||
"src/application/node_modules": {
|
||||
isDirectory() {return true; },
|
||||
},
|
||||
},
|
||||
exist: {
|
||||
"DefaultWorkingDirectory/tmp_cache": true,
|
||||
},
|
||||
checkPath: { },
|
||||
exec: { },
|
||||
which: { },
|
||||
};
|
||||
|
||||
tmr.setAnswers(a);
|
||||
|
||||
const umh: UniversalMockHelper = new UniversalMockHelper(tmr, a, null);
|
||||
|
||||
umh.mockUniversalCommand(
|
||||
"publish",
|
||||
"node-package-feed",
|
||||
"builddefinition1",
|
||||
`1.0.0-${process.platform}-${hash}`,
|
||||
"DefaultWorkingDirectory/tmp_cache",
|
||||
{
|
||||
code: 0,
|
||||
stdout: "ArtifactTool.exe output",
|
||||
stderr: "",
|
||||
}
|
||||
);
|
||||
|
||||
tmr.setInput("keyFile", "**/*/yarn.lock");
|
||||
tmr.setInput("targetFolder", "**/*/node_modules");
|
||||
|
||||
const key = `${process.platform}-${hash}`.toUpperCase();
|
||||
process.env[key] = "false";
|
||||
process.env["SYSTEM_DEFAULTWORKINGDIRECTORY"] = "DefaultWorkingDirectory";
|
||||
|
||||
// mock a specific module function called in task
|
||||
tmr.registerMock("fs", {
|
||||
readFileSync(
|
||||
path: string,
|
||||
options:
|
||||
| string
|
||||
| {
|
||||
encoding: string;
|
||||
flag?: string;
|
||||
}
|
||||
): string {
|
||||
if (path.endsWith("/yarn.lock")) {
|
||||
const segments = path.split('/');
|
||||
return segments.splice(segments.length - 3).join('/');
|
||||
}
|
||||
return fs.readFileSync(path, options);
|
||||
},
|
||||
chmodSync: fs.chmodSync,
|
||||
writeFileSync: fs.writeFileSync,
|
||||
readdirSync: fs.readdirSync,
|
||||
mkdirSync: fs.mkdirSync,
|
||||
copyFileSync: fs.copyFileSync,
|
||||
statSync: fs.statSync,
|
||||
linkSync: fs.linkSync,
|
||||
symlinkSync: fs.symlinkSync,
|
||||
});
|
||||
|
||||
tmr.registerMock("shelljs", {
|
||||
exec(command: string) {
|
||||
console.log(`Mock exec: ${command}`);
|
||||
return {
|
||||
code: 0,
|
||||
stdout: "shelljs output",
|
||||
stderr: null,
|
||||
};
|
||||
},
|
||||
});
|
||||
|
||||
tmr.run();
|
|
@ -0,0 +1,74 @@
|
|||
import * as ma from "azure-pipelines-task-lib/mock-answer";
|
||||
import * as tmrm from "azure-pipelines-task-lib/mock-run";
|
||||
import * as path from "path";
|
||||
import * as fs from "fs";
|
||||
|
||||
const taskPath = path.join(__dirname, "..", "savecache.js");
|
||||
const tmr: tmrm.TaskMockRunner = new tmrm.TaskMockRunner(taskPath);
|
||||
const hash = "2f6b1287b26ff4716cffdeeabd434aa1a3da9f092ebf87579a916ca0bf91cd65";
|
||||
|
||||
tmr.setInput("keyFile", "**/*/yarn.lock");
|
||||
tmr.setInput("targetFolder", "**/*/node_modules");
|
||||
|
||||
process.env["SYSTEM_DEFAULTWORKINGDIRECTORY"] = "DefaultWorkingDirectory";
|
||||
|
||||
const key = `${process.platform}-${hash}`;
|
||||
process.env[key.toUpperCase()] = "true";
|
||||
|
||||
// provide answers for task mock
|
||||
const a: ma.TaskLibAnswers = {
|
||||
findMatch: {
|
||||
"**/*/yarn.lock": ["src/webapi/yarn.lock", "src/application/yarn.lock"],
|
||||
"**/*/node_modules": ["src/webapi/node_modules", "src/application/node_modules"],
|
||||
},
|
||||
find: {
|
||||
DefaultWorkingDirectory: [
|
||||
"src/webapi/node_modules",
|
||||
"src/application/node_modules",
|
||||
"src/webapi/startup.config",
|
||||
"src/application/program.cs",
|
||||
],
|
||||
},
|
||||
rmRF: {
|
||||
"*": { success: true },
|
||||
},
|
||||
stats: {
|
||||
"src/webapi/node_modules": {
|
||||
isDirectory() {return true; },
|
||||
},
|
||||
"src/application/node_modules": {
|
||||
isDirectory() {return true; },
|
||||
},
|
||||
},
|
||||
} as ma.TaskLibAnswers;
|
||||
|
||||
tmr.setAnswers(a);
|
||||
|
||||
// mock a specific module function called in task
|
||||
tmr.registerMock("fs", {
|
||||
readFileSync(
|
||||
path: string,
|
||||
options:
|
||||
| string
|
||||
| {
|
||||
encoding: string;
|
||||
flag?: string;
|
||||
}
|
||||
): string {
|
||||
if (path.endsWith("/yarn.lock")) {
|
||||
const segments = path.split('/');
|
||||
return segments.splice(segments.length - 3).join('/');
|
||||
}
|
||||
return fs.readFileSync(path, options);
|
||||
},
|
||||
chmodSync: fs.chmodSync,
|
||||
writeFileSync: fs.writeFileSync,
|
||||
readdirSync: fs.readdirSync,
|
||||
mkdirSync: fs.mkdirSync,
|
||||
copyFileSync: fs.copyFileSync,
|
||||
statSync: fs.statSync,
|
||||
linkSync: fs.linkSync,
|
||||
symlinkSync: fs.symlinkSync,
|
||||
});
|
||||
|
||||
tmr.run();
|
|
@ -0,0 +1,108 @@
|
|||
import * as tmrm from "azure-pipelines-task-lib/mock-run";
|
||||
import * as path from "path";
|
||||
import * as fs from "fs";
|
||||
import { TaskLibAnswers } from "azure-pipelines-task-lib/mock-answer";
|
||||
import { UniversalMockHelper } from "packaging-common/Tests/UniversalMockHelper";
|
||||
|
||||
const taskPath = path.join(__dirname, "..", "savecache.js");
|
||||
const tmr: tmrm.TaskMockRunner = new tmrm.TaskMockRunner(taskPath);
|
||||
const hash = "2f6b1287b26ff4716cffdeeabd434aa1a3da9f092ebf87579a916ca0bf91cd65";
|
||||
|
||||
// provide answers for task mock
|
||||
const a: TaskLibAnswers = {
|
||||
findMatch: {
|
||||
"**/*/yarn.lock": ["src/webapi/yarn.lock", "src/application/yarn.lock"],
|
||||
"**/*/node_modules": ["src/webapi/node_modules", "src/application/node_modules"],
|
||||
},
|
||||
find: {
|
||||
DefaultWorkingDirectory: [
|
||||
"src/webapi/node_modules",
|
||||
"src/application/node_modules",
|
||||
"src/webapi/startup.config",
|
||||
"src/application/program.cs",
|
||||
],
|
||||
},
|
||||
rmRF: {
|
||||
"/users/home/DefaultWorkingDirectory/tmp_cache": { success: true },
|
||||
"DefaultWorkingDirectory/tmp_cache": {success: true},
|
||||
"\"DefaultWorkingDirectory/tmp_cache\"": {success: true},
|
||||
},
|
||||
stats: {
|
||||
"src/webapi/node_modules": {
|
||||
isDirectory() { return true; },
|
||||
},
|
||||
"src/application/node_modules": {
|
||||
isDirectory() { return true; },
|
||||
},
|
||||
},
|
||||
exist: {
|
||||
"DefaultWorkingDirectory/tmp_cache": true,
|
||||
},
|
||||
checkPath: { },
|
||||
exec: { },
|
||||
which: { },
|
||||
};
|
||||
|
||||
tmr.setAnswers(a);
|
||||
|
||||
const umh: UniversalMockHelper = new UniversalMockHelper(tmr, a, "/users/tmp/ArtifactTool.exe");
|
||||
|
||||
umh.mockUniversalCommand(
|
||||
"publish",
|
||||
"node-package-feed",
|
||||
"builddefinition1",
|
||||
`1.0.0-${process.platform}-${hash}`,
|
||||
"DefaultWorkingDirectory/tmp_cache",
|
||||
{
|
||||
code: 0,
|
||||
stdout: "ArtifactTool.exe output",
|
||||
stderr: "",
|
||||
}
|
||||
);
|
||||
|
||||
tmr.setInput("keyFile", "**/*/yarn.lock");
|
||||
tmr.setInput("targetFolder", "**/*/node_modules");
|
||||
|
||||
const key = `${process.platform}-${hash}`.toUpperCase();
|
||||
process.env[key] = "false";
|
||||
process.env["SYSTEM_DEFAULTWORKINGDIRECTORY"] = "DefaultWorkingDirectory";
|
||||
|
||||
// mock a specific module function called in task
|
||||
tmr.registerMock("fs", {
|
||||
readFileSync(
|
||||
path: string,
|
||||
options:
|
||||
| string
|
||||
| {
|
||||
encoding: string;
|
||||
flag?: string;
|
||||
}
|
||||
): string {
|
||||
if (path.endsWith("/yarn.lock")) {
|
||||
const segments = path.split('/');
|
||||
return segments.splice(segments.length - 3).join('/');
|
||||
}
|
||||
return fs.readFileSync(path, options);
|
||||
},
|
||||
chmodSync: fs.chmodSync,
|
||||
writeFileSync: fs.writeFileSync,
|
||||
readdirSync: fs.readdirSync,
|
||||
mkdirSync: fs.mkdirSync,
|
||||
copyFileSync: fs.copyFileSync,
|
||||
statSync: fs.statSync,
|
||||
linkSync: fs.linkSync,
|
||||
symlinkSync: fs.symlinkSync,
|
||||
});
|
||||
|
||||
tmr.registerMock("shelljs", {
|
||||
exec(command: string) {
|
||||
console.log(`Mock exec: ${command}`);
|
||||
return {
|
||||
code: 0,
|
||||
stdout: "shelljs output",
|
||||
stderr: null,
|
||||
};
|
||||
},
|
||||
});
|
||||
|
||||
tmr.run();
|
|
@ -0,0 +1,13 @@
|
|||
import * as ma from "azure-pipelines-task-lib/mock-answer";
|
||||
import * as tmrm from "azure-pipelines-task-lib/mock-run";
|
||||
import * as path from "path";
|
||||
|
||||
let taskPath = path.join(__dirname, "..", "savecache.js");
|
||||
let tmr: tmrm.TaskMockRunner = new tmrm.TaskMockRunner(taskPath);
|
||||
|
||||
tmr.setInput("keyFile", "**/*/yarn.lock");
|
||||
tmr.setInput("targetFolders", "**/*/node_modules");
|
||||
|
||||
process.env["SYSTEM_PULLREQUEST_ISFORK"] = "true";
|
||||
|
||||
tmr.run();
|
|
@ -0,0 +1,71 @@
|
|||
import * as ma from "azure-pipelines-task-lib/mock-answer";
|
||||
import * as tmrm from "azure-pipelines-task-lib/mock-run";
|
||||
import * as path from "path";
|
||||
import * as fs from "fs";
|
||||
|
||||
const taskPath = path.join(__dirname, "..", "savecache.js");
|
||||
const tmr: tmrm.TaskMockRunner = new tmrm.TaskMockRunner(taskPath);
|
||||
const hash = "2f6b1287b26ff4716cffdeeabd434aa1a3da9f092ebf87579a916ca0bf91cd65";
|
||||
|
||||
tmr.setInput("keyFile", "**/*/yarn.lock");
|
||||
tmr.setInput("targetFolder", "**/*/node_modules");
|
||||
|
||||
process.env["SYSTEM_DEFAULTWORKINGDIRECTORY"] = "DefaultWorkingDirectory";
|
||||
|
||||
// provide answers for task mock
|
||||
const a: ma.TaskLibAnswers = {
|
||||
findMatch: {
|
||||
"**/*/yarn.lock": ["src/webapi/yarn.lock", "src/application/yarn.lock"],
|
||||
"**/*/node_modules": ["src/webapi/node_modules", "src/application/node_modules"],
|
||||
},
|
||||
find: {
|
||||
DefaultWorkingDirectory: [
|
||||
"src/webapi/node_modules",
|
||||
"src/application/node_modules",
|
||||
"src/webapi/startup.config",
|
||||
"src/application/program.cs",
|
||||
],
|
||||
},
|
||||
rmRF: {
|
||||
"*": { success: true },
|
||||
},
|
||||
stats: {
|
||||
"src/webapi/node_modules": {
|
||||
isDirectory() {return true; },
|
||||
},
|
||||
"src/application/node_modules": {
|
||||
isDirectory() {return true; },
|
||||
},
|
||||
},
|
||||
} as ma.TaskLibAnswers;
|
||||
|
||||
tmr.setAnswers(a);
|
||||
|
||||
// mock a specific module function called in task
|
||||
tmr.registerMock("fs", {
|
||||
readFileSync(
|
||||
path: string,
|
||||
options:
|
||||
| string
|
||||
| {
|
||||
encoding: string;
|
||||
flag?: string;
|
||||
}
|
||||
): string {
|
||||
if (path.endsWith("/yarn.lock")) {
|
||||
const segments = path.split('/');
|
||||
return segments.splice(segments.length - 3).join('/');
|
||||
}
|
||||
return fs.readFileSync(path, options);
|
||||
},
|
||||
chmodSync: fs.chmodSync,
|
||||
writeFileSync: fs.writeFileSync,
|
||||
readdirSync: fs.readdirSync,
|
||||
mkdirSync: fs.mkdirSync,
|
||||
copyFileSync: fs.copyFileSync,
|
||||
statSync: fs.statSync,
|
||||
linkSync: fs.linkSync,
|
||||
symlinkSync: fs.symlinkSync,
|
||||
});
|
||||
|
||||
tmr.run();
|
|
@ -0,0 +1,23 @@
|
|||
import * as ma from "azure-pipelines-task-lib/mock-answer";
|
||||
import * as tmrm from "azure-pipelines-task-lib/mock-run";
|
||||
import * as path from "path";
|
||||
|
||||
const taskPath = path.join(__dirname, "..", "savecache.js");
|
||||
const tmr: tmrm.TaskMockRunner = new tmrm.TaskMockRunner(taskPath);
|
||||
|
||||
tmr.setInput("keyFile", "**/*/yarn.lock");
|
||||
tmr.setInput("targetFolder", "**/*/node_modules");
|
||||
|
||||
// provide answers for task mock
|
||||
const a: ma.TaskLibAnswers = {
|
||||
findMatch: {
|
||||
"**/*/yarn.lock": [],
|
||||
},
|
||||
rmRF: {
|
||||
"*": { success: true },
|
||||
},
|
||||
} as ma.TaskLibAnswers;
|
||||
|
||||
tmr.setAnswers(a);
|
||||
|
||||
tmr.run();
|
|
@ -0,0 +1,31 @@
|
|||
import * as ma from "azure-pipelines-task-lib/mock-answer";
|
||||
import * as tmrm from "azure-pipelines-task-lib/mock-run";
|
||||
import * as path from "path";
|
||||
|
||||
const taskPath = path.join(__dirname, "..", "savecache.js");
|
||||
const tmr: tmrm.TaskMockRunner = new tmrm.TaskMockRunner(taskPath);
|
||||
|
||||
tmr.setInput("keyFile", "**/*/yarn.lock");
|
||||
tmr.setInput("targetFolder", "**/*/node_modules");
|
||||
process.env["SYSTEM_DEFAULTWORKINGDIRECTORY"] = "DefaultWorkingDirectory";
|
||||
|
||||
// provide answers for task mock
|
||||
const a: ma.TaskLibAnswers = {
|
||||
findMatch: {
|
||||
"**/*/yarn.lock": ["src/webapi/yarn.lock", "src/application/yarn.lock"],
|
||||
"**/*/node_modules": [],
|
||||
},
|
||||
find: {
|
||||
DefaultWorkingDirectory: [
|
||||
"src/webapi/startup.config",
|
||||
"src/application/program.cs",
|
||||
],
|
||||
},
|
||||
rmRF: {
|
||||
"*": { success: true },
|
||||
},
|
||||
} as ma.TaskLibAnswers;
|
||||
|
||||
tmr.setAnswers(a);
|
||||
|
||||
tmr.run();
|
|
@ -0,0 +1,108 @@
|
|||
import * as tmrm from "azure-pipelines-task-lib/mock-run";
|
||||
import * as path from "path";
|
||||
import * as fs from "fs";
|
||||
import { TaskLibAnswers } from "azure-pipelines-task-lib/mock-answer";
|
||||
import { UniversalMockHelper } from "packaging-common/Tests/UniversalMockHelper";
|
||||
|
||||
const taskPath = path.join(__dirname, "..", "savecache.js");
|
||||
const tmr: tmrm.TaskMockRunner = new tmrm.TaskMockRunner(taskPath);
|
||||
const hash = "2f6b1287b26ff4716cffdeeabd434aa1a3da9f092ebf87579a916ca0bf91cd65";
|
||||
|
||||
// provide answers for task mock
|
||||
const a: TaskLibAnswers = {
|
||||
findMatch: {
|
||||
"**/*/yarn.lock": ["src/webapi/yarn.lock", "src/application/yarn.lock"],
|
||||
"**/*/node_modules": ["src/webapi/node_modules", "src/application/node_modules"],
|
||||
},
|
||||
find: {
|
||||
DefaultWorkingDirectory: [
|
||||
"src/webapi/node_modules",
|
||||
"src/application/node_modules",
|
||||
"src/webapi/startup.config",
|
||||
"src/application/program.cs",
|
||||
],
|
||||
},
|
||||
rmRF: {
|
||||
"/users/home/DefaultWorkingDirectory/tmp_cache": { success: true },
|
||||
"DefaultWorkingDirectory/tmp_cache": {success: true},
|
||||
"\"DefaultWorkingDirectory/tmp_cache\"": {success: true},
|
||||
},
|
||||
stats: {
|
||||
"src/webapi/node_modules": {
|
||||
isDirectory() { return true; },
|
||||
},
|
||||
"src/application/node_modules": {
|
||||
isDirectory() { return true; },
|
||||
},
|
||||
},
|
||||
exist: {
|
||||
"DefaultWorkingDirectory/tmp_cache": true,
|
||||
},
|
||||
checkPath: { },
|
||||
exec: { },
|
||||
which: { },
|
||||
};
|
||||
|
||||
tmr.setAnswers(a);
|
||||
|
||||
const umh: UniversalMockHelper = new UniversalMockHelper(tmr, a, "/users/tmp/ArtifactTool.exe");
|
||||
|
||||
umh.mockUniversalCommand(
|
||||
"publish",
|
||||
"node-package-feed",
|
||||
"builddefinition1",
|
||||
`1.0.0-${process.platform}-${hash}`,
|
||||
"DefaultWorkingDirectory/tmp_cache",
|
||||
{
|
||||
code: 1,
|
||||
stdout: "ArtifactTool.exe output",
|
||||
stderr: "An error occurred on the service. User lacks permission to complete this action.",
|
||||
}
|
||||
);
|
||||
|
||||
tmr.setInput("keyFile", "**/*/yarn.lock");
|
||||
tmr.setInput("targetFolder", "**/*/node_modules");
|
||||
|
||||
const key = `${process.platform}-${hash}`.toUpperCase();
|
||||
process.env[key] = "false";
|
||||
process.env["SYSTEM_DEFAULTWORKINGDIRECTORY"] = "DefaultWorkingDirectory";
|
||||
|
||||
// mock a specific module function called in task
|
||||
tmr.registerMock("fs", {
|
||||
readFileSync(
|
||||
path: string,
|
||||
options:
|
||||
| string
|
||||
| {
|
||||
encoding: string;
|
||||
flag?: string;
|
||||
}
|
||||
): string {
|
||||
if (path.endsWith("/yarn.lock")) {
|
||||
const segments = path.split('/');
|
||||
return segments.splice(segments.length - 3).join('/');
|
||||
}
|
||||
return fs.readFileSync(path, options);
|
||||
},
|
||||
chmodSync: fs.chmodSync,
|
||||
writeFileSync: fs.writeFileSync,
|
||||
readdirSync: fs.readdirSync,
|
||||
mkdirSync: fs.mkdirSync,
|
||||
copyFileSync: fs.copyFileSync,
|
||||
statSync: fs.statSync,
|
||||
linkSync: fs.linkSync,
|
||||
symlinkSync: fs.symlinkSync,
|
||||
});
|
||||
|
||||
tmr.registerMock("shelljs", {
|
||||
exec(command: string) {
|
||||
console.log(`Mock executing command: ${command}`);
|
||||
return {
|
||||
code: 0,
|
||||
stdout: "shelljs output",
|
||||
stderr: null,
|
||||
};
|
||||
},
|
||||
});
|
||||
|
||||
tmr.run();
|
|
@ -0,0 +1,377 @@
|
|||
import * as path from "path";
|
||||
import * as assert from "assert";
|
||||
import * as ttm from "azure-pipelines-task-lib/mock-test";
|
||||
import { platform } from "os";
|
||||
|
||||
before(function() {
|
||||
this.timeout(5000);
|
||||
});
|
||||
|
||||
const hash = "2f6b1287b26ff4716cffdeeabd434aa1a3da9f092ebf87579a916ca0bf91cd65";
|
||||
|
||||
describe("RestoreCache tests", function() {
|
||||
before(function() { });
|
||||
|
||||
after(() => { });
|
||||
|
||||
it("RestoreCache runs successfully with warnings if no key files are found",
|
||||
function(done: MochaDone) {
|
||||
const tp = path.join(__dirname, "RestoreCacheNoKeyFiles.js");
|
||||
const tr: ttm.MockTestRunner = new ttm.MockTestRunner(tp);
|
||||
|
||||
tr.run();
|
||||
|
||||
assert.equal(tr.succeeded, true, "should have succeeded");
|
||||
assert.equal(
|
||||
tr.warningIssues.length > 0,
|
||||
true,
|
||||
"should have warnings from key file"
|
||||
);
|
||||
assert.equal(tr.errorIssues.length, 0, "should have no errors");
|
||||
assert.equal(
|
||||
tr.stdout.indexOf("no key files matching:") >= 0,
|
||||
true,
|
||||
"should display 'no key files matching:'"
|
||||
);
|
||||
|
||||
done();
|
||||
});
|
||||
|
||||
it("RestoreCache is skipped if run from repository fork", function(done: MochaDone) {
|
||||
const tp = path.join(__dirname, "RestoreCacheFromFork.js");
|
||||
const tr: ttm.MockTestRunner = new ttm.MockTestRunner(tp);
|
||||
|
||||
tr.run();
|
||||
|
||||
assert.equal(tr.succeeded, true, "should have succeeded");
|
||||
assert.equal(tr.warningIssues.length, 0, "should have no warnings");
|
||||
assert.equal(tr.errorIssues.length, 0, "should have no errors");
|
||||
assert.equal(
|
||||
tr.stdout.indexOf("result=Skipped;") >= 0,
|
||||
true,
|
||||
"task result should be: 'Skipped'"
|
||||
);
|
||||
assert.equal(
|
||||
tr.stdout.indexOf("Caches are not restored for forked repositories.") >=
|
||||
0,
|
||||
true,
|
||||
"should display 'Caches are not restored for forked repositories.'"
|
||||
);
|
||||
|
||||
done();
|
||||
});
|
||||
|
||||
it("RestoreCache runs successfully if cache hit", (done: MochaDone) => {
|
||||
const tp = path.join(__dirname, "RestoreCacheCacheHit.js");
|
||||
const tr: ttm.MockTestRunner = new ttm.MockTestRunner(tp);
|
||||
|
||||
tr.run();
|
||||
|
||||
assert(tr.invokedToolCount === 1, "should have run ArtifactTool once");
|
||||
assert(
|
||||
tr.ran(
|
||||
`/users/tmp/ArtifactTool.exe universal download --feed node-package-feed --service https://example.visualstudio.com/defaultcollection --package-name builddefinition1 --package-version 1.0.0-${process.platform}-${hash} --path /users/home/directory/tmp_cache --patvar UNIVERSAL_DOWNLOAD_PAT --verbosity verbose`
|
||||
),
|
||||
"it should have run ArtifactTool"
|
||||
);
|
||||
assert(
|
||||
tr.stdOutContained("ArtifactTool.exe output"),
|
||||
"should have ArtifactTool output"
|
||||
);
|
||||
assert(tr.succeeded, "should have succeeded");
|
||||
assert.equal(tr.errorIssues.length, 0, "should have no errors");
|
||||
assert(
|
||||
tr.stdOutContained("set CacheRestored=true"),
|
||||
"'CacheRestored' variable should be set to true"
|
||||
);
|
||||
assert(
|
||||
tr.stdOutContained(`${process.platform}-${hash}=true`),
|
||||
"variable should be set to mark key as valid in build"
|
||||
);
|
||||
|
||||
done();
|
||||
});
|
||||
|
||||
it("RestoreCache runs successfully if cache miss", (done: MochaDone) => {
|
||||
const tp = path.join(__dirname, "RestoreCacheCacheMiss.js");
|
||||
const tr: ttm.MockTestRunner = new ttm.MockTestRunner(tp);
|
||||
|
||||
tr.run();
|
||||
|
||||
assert(tr.invokedToolCount === 1, "should have run ArtifactTool once");
|
||||
assert(
|
||||
tr.ran(
|
||||
`/users/tmp/ArtifactTool.exe universal download --feed node-package-feed --service https://example.visualstudio.com/defaultcollection --package-name builddefinition1 --package-version 1.0.0-${process.platform}-${hash} --path /users/home/directory/tmp_cache --patvar UNIVERSAL_DOWNLOAD_PAT --verbosity verbose`
|
||||
),
|
||||
"it should have run ArtifactTool"
|
||||
);
|
||||
assert(
|
||||
tr.stdOutContained("ArtifactTool.exe output"),
|
||||
"should have ArtifactTool output"
|
||||
);
|
||||
assert(
|
||||
tr.stdOutContained(`Cache miss: ${process.platform}-${hash}`),
|
||||
"should have output stating cache miss"
|
||||
);
|
||||
assert(tr.succeeded, "should have succeeded");
|
||||
assert.equal(tr.errorIssues.length, 0, "should have no errors");
|
||||
assert(
|
||||
tr.stdOutContained("set CacheRestored=false"),
|
||||
"'CacheRestored' variable should be set to false"
|
||||
);
|
||||
assert(
|
||||
tr.stdOutContained(`${process.platform}-${hash}=false`),
|
||||
"variable should be set to mark key as valid in build"
|
||||
);
|
||||
|
||||
done();
|
||||
});
|
||||
|
||||
it("RestoreCache handles artifact permissions errors gracefully", (done: MochaDone) => {
|
||||
const tp = path.join(__dirname, "RestoreCachePermissionsError.js");
|
||||
const tr: ttm.MockTestRunner = new ttm.MockTestRunner(tp);
|
||||
|
||||
tr.run();
|
||||
|
||||
assert(tr.invokedToolCount === 1, "should have run ArtifactTool once");
|
||||
assert(
|
||||
tr.ran(
|
||||
`/users/tmp/ArtifactTool.exe universal download --feed node-package-feed --service https://example.visualstudio.com/defaultcollection --package-name builddefinition1 --package-version 1.0.0-${process.platform}-${hash} --path /users/home/directory/tmp_cache --patvar UNIVERSAL_DOWNLOAD_PAT --verbosity verbose`
|
||||
),
|
||||
"it should have run ArtifactTool"
|
||||
);
|
||||
assert(
|
||||
tr.stdOutContained("ArtifactTool.exe output"),
|
||||
"should have ArtifactTool output"
|
||||
);
|
||||
assert(
|
||||
tr.stdOutContained(`Cache miss: ${process.platform}-${hash}`) !== true,
|
||||
"should not have output stating cache miss"
|
||||
);
|
||||
assert(tr.succeeded, "should have succeeded");
|
||||
assert.equal(tr.errorIssues.length, 0, "should have no errors");
|
||||
assert(tr.warningIssues.length > 0, "should have permissions warnings");
|
||||
assert(
|
||||
tr.stdOutContained("warning;]Error: An unexpected error occurred while trying to download the package. Exit code(1) and error(An error occurred on the service. User lacks permission to complete this action.)"),
|
||||
"There should be a warning about permissions"
|
||||
);
|
||||
assert(
|
||||
tr.stdOutContained("warning;]Issue running universal packages tools"),
|
||||
"There should be a warning about universal packages tools"
|
||||
);
|
||||
assert(
|
||||
tr.stdOutContained("set CacheRestored=false") !== true,
|
||||
"'CacheRestored' variable should not be set"
|
||||
);
|
||||
assert(
|
||||
tr.stdOutContained(`${process.platform}-${hash}=`) !== true,
|
||||
"variable should not be set to mark key as valid in build"
|
||||
);
|
||||
|
||||
done();
|
||||
});
|
||||
|
||||
it("RestoreCache handles artifact tool download issues gracefully", (done: MochaDone) => {
|
||||
const tp = path.join(__dirname, "RestoreCacheArtifactToolErr.js");
|
||||
const tr: ttm.MockTestRunner = new ttm.MockTestRunner(tp);
|
||||
|
||||
tr.run();
|
||||
|
||||
assert(
|
||||
tr.stdOutContained("Error initializing artifact tool"),
|
||||
"should have error initializing artifact tool"
|
||||
);
|
||||
assert(tr.succeeded, "should have succeeded");
|
||||
assert.equal(tr.errorIssues.length, 0, "should have no errors");
|
||||
assert(tr.warningIssues.length > 0, "should have warnings");
|
||||
assert(
|
||||
tr.stdOutContained("set CacheRestored=") !== true,
|
||||
"'CacheRestored' variable should not be set"
|
||||
);
|
||||
assert(
|
||||
tr.stdOutContained(`set ${process.platform}-${hash}=`) !== true,
|
||||
"variable should not be set to mark key as valid in build"
|
||||
);
|
||||
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
describe("SaveCache tests", function() {
|
||||
before(function() { });
|
||||
|
||||
after(() => { });
|
||||
|
||||
it("SaveCache runs successfully with warnings if no key files are found", function(done: MochaDone) {
|
||||
const tp = path.join(__dirname, "SaveCacheNoKeyFiles.js");
|
||||
const tr: ttm.MockTestRunner = new ttm.MockTestRunner(tp);
|
||||
|
||||
tr.run();
|
||||
|
||||
assert.equal(tr.succeeded, true, "should have succeeded");
|
||||
assert.equal(
|
||||
tr.warningIssues.length > 0,
|
||||
true,
|
||||
"should have warnings from key file"
|
||||
);
|
||||
assert.equal(tr.errorIssues.length, 0, "should have no errors");
|
||||
assert.equal(
|
||||
tr.stdout.indexOf("no key files matching:") >= 0,
|
||||
true,
|
||||
"should display 'no key files matching:'"
|
||||
);
|
||||
|
||||
done();
|
||||
});
|
||||
|
||||
it("SaveCache runs successfully with warnings if no target folders are found", function(done: MochaDone) {
|
||||
const tp = path.join(__dirname, "SaveCacheNoTargetFolders.js");
|
||||
const tr: ttm.MockTestRunner = new ttm.MockTestRunner(tp);
|
||||
|
||||
tr.run();
|
||||
|
||||
assert.equal(tr.succeeded, true, "should have succeeded");
|
||||
assert.equal(
|
||||
tr.warningIssues.length > 0,
|
||||
true,
|
||||
"should have warnings from target folder"
|
||||
);
|
||||
assert.equal(tr.errorIssues.length, 0, "should have no errors");
|
||||
assert.equal(
|
||||
tr.stdout.indexOf("no target folders matching:") >= 0,
|
||||
true,
|
||||
"should display 'no target folders matching:'"
|
||||
);
|
||||
|
||||
done();
|
||||
});
|
||||
|
||||
it("SaveCache is skipped if no run from repository fork", function(done: MochaDone) {
|
||||
const tp = path.join(__dirname, "SaveCacheFromFork.js");
|
||||
const tr: ttm.MockTestRunner = new ttm.MockTestRunner(tp);
|
||||
|
||||
tr.run();
|
||||
|
||||
assert.equal(tr.succeeded, true, "should have succeeded");
|
||||
assert.equal(tr.warningIssues.length, 0, "should have no warnings");
|
||||
assert.equal(tr.errorIssues.length, 0, "should have no errors");
|
||||
assert.equal(
|
||||
tr.stdout.indexOf("result=Skipped;") >= 0,
|
||||
true,
|
||||
"task result should be: 'Skipped'"
|
||||
);
|
||||
assert.equal(
|
||||
tr.stdout.indexOf("Caches are not saved from forked repositories.") >= 0,
|
||||
true,
|
||||
"should display 'Caches are not saved from forked repositories.'"
|
||||
);
|
||||
|
||||
done();
|
||||
});
|
||||
|
||||
it("SaveCache doesn't create archive if cache hit", (done: MochaDone) => {
|
||||
const tp = path.join(__dirname, "SaveCacheCacheHit.js");
|
||||
const tr: ttm.MockTestRunner = new ttm.MockTestRunner(tp);
|
||||
|
||||
tr.run();
|
||||
|
||||
assert(
|
||||
tr.stdOutContained("Cache entry already exists for:"),
|
||||
"should have bailed out due to cache already present"
|
||||
);
|
||||
assert(tr.succeeded, "should have succeeded");
|
||||
assert.equal(tr.errorIssues.length, 0, "should have no errors");
|
||||
|
||||
done();
|
||||
});
|
||||
|
||||
it("SaveCache doesn't create an archive if no matching hash", (done: MochaDone) => {
|
||||
const tp = path.join(__dirname, "SaveCacheNoHashMatch.js");
|
||||
const tr: ttm.MockTestRunner = new ttm.MockTestRunner(tp);
|
||||
|
||||
tr.run();
|
||||
|
||||
assert(
|
||||
tr.stdOutContained("Not caching artifact produced during build:"),
|
||||
"should have bailed out due to no matching hash"
|
||||
);
|
||||
assert(
|
||||
tr.stdOutContained("result=Skipped;"),
|
||||
"task result should be: 'Skipped'"
|
||||
);
|
||||
assert(tr.succeeded, "should have succeeded");
|
||||
assert.equal(tr.errorIssues.length, 0, "should have no errors");
|
||||
|
||||
done();
|
||||
});
|
||||
|
||||
it("SaveCache handles artifact tool download issues gracefully", (done: MochaDone) => {
|
||||
const tp = path.join(__dirname, "SaveCacheArtifactToolErr.js");
|
||||
const tr: ttm.MockTestRunner = new ttm.MockTestRunner(tp);
|
||||
|
||||
tr.run();
|
||||
|
||||
assert(
|
||||
tr.stdOutContained("Error initializing artifact tool"),
|
||||
"should have error initializing artifact tool"
|
||||
);
|
||||
assert(tr.succeeded, "should have succeeded");
|
||||
assert.equal(tr.errorIssues.length, 0, "should have no errors");
|
||||
assert(tr.warningIssues.length > 0, "should have warnings");
|
||||
assert(
|
||||
tr.stdOutContained("set CacheRestored=") !== true,
|
||||
"'CacheRestored' variable should not be set"
|
||||
);
|
||||
assert(
|
||||
tr.stdOutContained(`set ${process.platform}-${hash}=`) !== true,
|
||||
"variable should not be set to mark key as valid in build"
|
||||
);
|
||||
assert(
|
||||
tr.stdOutContained("Cache successfully saved") !== true,
|
||||
"should not have saved new cache entry"
|
||||
);
|
||||
|
||||
done();
|
||||
});
|
||||
|
||||
it("SaveCache handles artifact permissions errors gracefully", (done: MochaDone) => {
|
||||
const tp = path.join(__dirname, "SaveCachePermissionsError.js");
|
||||
const tr: ttm.MockTestRunner = new ttm.MockTestRunner(tp);
|
||||
|
||||
tr.run();
|
||||
|
||||
assert(tr.succeeded, "should have succeeded");
|
||||
assert.equal(tr.errorIssues.length, 0, "should have no errors");
|
||||
assert(tr.warningIssues.length > 0, "should have warnings");
|
||||
assert(
|
||||
tr.stdOutContained("warning;]Issue saving package: Error: An unexpected error occurred while trying to push the package. Exit code(1) and error(An error occurred on the service. User lacks permission to complete this action.)"),
|
||||
"There should be a warning about permissions"
|
||||
);
|
||||
assert(
|
||||
tr.stdOutContained("warning;]Cache unsuccessfully saved. Find more information in logs above"),
|
||||
"There should be a warning about cache not being saved"
|
||||
);
|
||||
assert(
|
||||
tr.stdOutContained("Cache successfully saved") !== true,
|
||||
"should not have saved new cache entry"
|
||||
);
|
||||
|
||||
done();
|
||||
});
|
||||
|
||||
it("SaveCache creates an archive if cache miss", (done: MochaDone) => {
|
||||
const tp = path.join(__dirname, "SaveCacheCacheMiss.js");
|
||||
const tr: ttm.MockTestRunner = new ttm.MockTestRunner(tp);
|
||||
|
||||
tr.run();
|
||||
|
||||
assert(
|
||||
tr.stdOutContained("Cache successfully saved"),
|
||||
"should have saved new cache entry"
|
||||
);
|
||||
assert(tr.succeeded, "should have succeeded");
|
||||
assert.equal(tr.errorIssues.length, 0, "should have no errors");
|
||||
|
||||
done();
|
||||
});
|
||||
});
|
Разница между файлами не показана из-за своего большого размера
Загрузить разницу
|
@ -13,14 +13,16 @@
|
|||
},
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@types/mocha": "^5.2.5",
|
||||
"@types/mocha": "^5.2.6",
|
||||
"@types/node": "^10.12.18",
|
||||
"@types/q": "^1.5.1",
|
||||
"adm-zip": "^0.4.11",
|
||||
"azure-devops-node-api": "^7.0.0",
|
||||
"azure-pipelines-task-lib": "^2.8.0",
|
||||
"azure-pipelines-tool-lib": "^0.12.0",
|
||||
"mocha": "^6.0.2",
|
||||
"packaging-common": "file:../../_build/Tasks/Common/packaging-common-1.0.1.tgz",
|
||||
"shelljs": "^0.8.3",
|
||||
"vso-node-api": "6.5.0",
|
||||
"vsts-task-lib": "2.6.0",
|
||||
"vsts-task-tool-lib": "0.4.1"
|
||||
"ts-node": "^8.0.3"
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
// Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
// Licensed under the MIT License.
|
||||
|
||||
import * as tl from 'vsts-task-lib';
|
||||
import * as tl from 'azure-pipelines-task-lib';
|
||||
import * as path from 'path';
|
||||
import { cacheUtilities } from 'packaging-common/cache/cacheUtilities';
|
||||
const cache = new cacheUtilities();
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
// Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
// Licensed under the MIT License.
|
||||
|
||||
import * as tl from 'vsts-task-lib';
|
||||
import * as tl from 'azure-pipelines-task-lib';
|
||||
import * as path from 'path';
|
||||
import { cacheUtilities } from 'packaging-common/cache/cacheUtilities';
|
||||
const cache = new cacheUtilities();
|
||||
|
|
|
@ -9,7 +9,7 @@
|
|||
"version": {
|
||||
"Major": 1,
|
||||
"Minor": 0,
|
||||
"Patch": 9
|
||||
"Patch": 10
|
||||
},
|
||||
"instanceNameFormat": "Restore and save artifact based on: $(keyfile)",
|
||||
"inputs": [
|
||||
|
|
|
@ -9,7 +9,7 @@
|
|||
"version": {
|
||||
"Major": 1,
|
||||
"Minor": 0,
|
||||
"Patch": 9
|
||||
"Patch": 10
|
||||
},
|
||||
"instanceNameFormat": "ms-resource:loc.instanceNameFormat",
|
||||
"inputs": [
|
||||
|
|
|
@ -1,9 +1,11 @@
|
|||
{
|
||||
"compilerOptions": {
|
||||
"target": "ES6",
|
||||
"module": "commonjs"
|
||||
"module": "commonjs",
|
||||
"sourceMap": true
|
||||
},
|
||||
"exclude": [
|
||||
"node_modules"
|
||||
]
|
||||
],
|
||||
"types": ["mocha", "node"]
|
||||
}
|
|
@ -27,15 +27,33 @@
|
|||
"resolved": "https://registry.npmjs.org/@types/q/-/q-1.5.1.tgz",
|
||||
"integrity": "sha512-eqz8c/0kwNi/OEHQfvIuJVLTst3in0e7uTKeuY+WL/zfKn0xVujOTp42bS/vUUokhK5P2BppLd9JXMOMHcgbjA=="
|
||||
},
|
||||
"@types/semver": {
|
||||
"version": "5.5.0",
|
||||
"resolved": "https://registry.npmjs.org/@types/semver/-/semver-5.5.0.tgz",
|
||||
"integrity": "sha512-41qEJgBH/TWgo5NFSvBCJ1qkoi3Q6ONSF2avrHq1LVEZfYpdHmj0y9SuTK+u9ZhG1sYQKBL1AWXKyLWP4RaUoQ=="
|
||||
},
|
||||
"@types/uuid": {
|
||||
"version": "3.4.4",
|
||||
"resolved": "https://registry.npmjs.org/@types/uuid/-/uuid-3.4.4.tgz",
|
||||
"integrity": "sha512-tPIgT0GUmdJQNSHxp0X2jnpQfBSTfGxUMc/2CXBU2mnyTFVYVa2ojpoQ74w0U2yn2vw3jnC640+77lkFFpdVDw==",
|
||||
"requires": {
|
||||
"@types/node": "*"
|
||||
}
|
||||
},
|
||||
"adm-zip": {
|
||||
"version": "0.4.13",
|
||||
"resolved": "https://registry.npmjs.org/adm-zip/-/adm-zip-0.4.13.tgz",
|
||||
"integrity": "sha512-fERNJX8sOXfel6qCBCMPvZLzENBEhZTzKqg6vrOW5pvoEaQuJhRU4ndTAh6lHOxn1I6jnz2NHra56ZODM751uw=="
|
||||
},
|
||||
"arg": {
|
||||
"version": "4.1.0",
|
||||
"resolved": "https://registry.npmjs.org/arg/-/arg-4.1.0.tgz",
|
||||
"integrity": "sha512-ZWc51jO3qegGkVh8Hwpv636EkbesNV5ZNQPCtRa+0qytRYPEs9IYT9qITY9buezqUH5uqyzlWLcufrzU2rffdg=="
|
||||
},
|
||||
"azure-devops-node-api": {
|
||||
"version": "6.6.3",
|
||||
"resolved": "https://registry.npmjs.org/azure-devops-node-api/-/azure-devops-node-api-6.6.3.tgz",
|
||||
"integrity": "sha512-94wSu4O6CSSXoqYWg7Rzt2/IqbW2xVNu2qOtx6e7lnXxnDOcAu4eRzi8tgVNHsXTIGOVEsTqgMvGvFThKr9Pig==",
|
||||
"version": "7.0.0",
|
||||
"resolved": "https://registry.npmjs.org/azure-devops-node-api/-/azure-devops-node-api-7.0.0.tgz",
|
||||
"integrity": "sha512-WXTqFDE2QhfKli1EkcaMbGYuDpOVcNoccnQBF/bZCkPZsogLJOnsZHO/BJnd2VrT+eSJtPoVcHWjKfE/Zcihew==",
|
||||
"requires": {
|
||||
"os": "0.1.1",
|
||||
"tunnel": "0.0.4",
|
||||
|
@ -54,6 +72,51 @@
|
|||
}
|
||||
}
|
||||
},
|
||||
"azure-pipelines-task-lib": {
|
||||
"version": "2.8.0",
|
||||
"resolved": "https://registry.npmjs.org/azure-pipelines-task-lib/-/azure-pipelines-task-lib-2.8.0.tgz",
|
||||
"integrity": "sha512-PR8oap9z2j+o455W3PwAfB4SX1p4GdJc9OHQaQV0V+iQS1IBY6dVgcNSQMkHAXb0V1bbuLOFBLanXPe5eSgGTQ==",
|
||||
"requires": {
|
||||
"minimatch": "3.0.4",
|
||||
"mockery": "^1.7.0",
|
||||
"q": "^1.1.2",
|
||||
"semver": "^5.1.0",
|
||||
"shelljs": "^0.3.0",
|
||||
"uuid": "^3.0.1"
|
||||
},
|
||||
"dependencies": {
|
||||
"shelljs": {
|
||||
"version": "0.3.0",
|
||||
"resolved": "https://registry.npmjs.org/shelljs/-/shelljs-0.3.0.tgz",
|
||||
"integrity": "sha1-NZbmMHp4FUT1kfN9phg2DzHbV7E="
|
||||
}
|
||||
}
|
||||
},
|
||||
"azure-pipelines-tool-lib": {
|
||||
"version": "0.12.0",
|
||||
"resolved": "https://registry.npmjs.org/azure-pipelines-tool-lib/-/azure-pipelines-tool-lib-0.12.0.tgz",
|
||||
"integrity": "sha512-JAlFvMTtEXISrnJY/kgq0LecLi089RqXRf/gMsXYbflmzszklkc+LUJpR0A7NDmJ+9/MWpKY/ZX+Q/zirYa7gw==",
|
||||
"requires": {
|
||||
"@types/semver": "^5.3.0",
|
||||
"@types/uuid": "^3.0.1",
|
||||
"azure-pipelines-task-lib": "^2.8.0",
|
||||
"semver": "^5.3.0",
|
||||
"semver-compare": "^1.0.0",
|
||||
"typed-rest-client": "1.0.9",
|
||||
"uuid": "^3.0.1"
|
||||
},
|
||||
"dependencies": {
|
||||
"typed-rest-client": {
|
||||
"version": "1.0.9",
|
||||
"resolved": "https://registry.npmjs.org/typed-rest-client/-/typed-rest-client-1.0.9.tgz",
|
||||
"integrity": "sha512-iOdwgmnP/tF6Qs+oY4iEtCf/3fnCDl7Gy9LGPJ4E3M4Wj3uaSko15FVwbsaBmnBqTJORnXBWVY5306D4HH8oiA==",
|
||||
"requires": {
|
||||
"tunnel": "0.0.4",
|
||||
"underscore": "1.8.3"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"balanced-match": {
|
||||
"version": "1.0.0",
|
||||
"resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.0.tgz",
|
||||
|
@ -68,11 +131,21 @@
|
|||
"concat-map": "0.0.1"
|
||||
}
|
||||
},
|
||||
"buffer-from": {
|
||||
"version": "1.1.1",
|
||||
"resolved": "https://registry.npmjs.org/buffer-from/-/buffer-from-1.1.1.tgz",
|
||||
"integrity": "sha512-MQcXEUbCKtEo7bhqEs6560Hyd4XaovZlO/k9V3hjVUF/zwW7KBVdSK4gIt/bzwS9MbR5qob+F5jusZsb0YQK2A=="
|
||||
},
|
||||
"concat-map": {
|
||||
"version": "0.0.1",
|
||||
"resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz",
|
||||
"integrity": "sha1-2Klr13/Wjfd5OnMDajug1UBdR3s="
|
||||
},
|
||||
"diff": {
|
||||
"version": "3.5.0",
|
||||
"resolved": "https://registry.npmjs.org/diff/-/diff-3.5.0.tgz",
|
||||
"integrity": "sha512-A46qtFgd+g7pDZinpnwiRJtxbC1hpgf0uzP3iG89scHk0AUC7A1TGxf5OiiOUv/JMZR8GOt8hL900hV0bOy5xA=="
|
||||
},
|
||||
"fs.realpath": {
|
||||
"version": "1.0.0",
|
||||
"resolved": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz",
|
||||
|
@ -116,17 +189,17 @@
|
|||
"integrity": "sha512-mT34yGKMNceBQUoVn7iCDKDntA7SC6gycMAWzGx1z/CMCTV7b2AAtXlo3nRyHZ1FelRkQbQjprHSYGwzLtkVbw=="
|
||||
},
|
||||
"ip-address": {
|
||||
"version": "5.8.9",
|
||||
"resolved": "https://registry.npmjs.org/ip-address/-/ip-address-5.8.9.tgz",
|
||||
"integrity": "sha512-7ay355oMN34iXhET1BmCJVsHjOTSItEEIIpOs38qUC23AIhOy+xIPnkrTuEFjeLMrTJ7m8KMXWgWfy/2Vn9sDw==",
|
||||
"version": "5.9.0",
|
||||
"resolved": "https://registry.npmjs.org/ip-address/-/ip-address-5.9.0.tgz",
|
||||
"integrity": "sha512-+4yKpEyent8IpjuDQVkIpzIDbxSlCHTPdmaXCRLH0ttt3YsrbNxuZJ6h+1wLPx10T7gWsLN7M6BXIHV2vZNOGw==",
|
||||
"requires": {
|
||||
"jsbn": "1.1.0",
|
||||
"lodash.find": "^4.6.0",
|
||||
"lodash.max": "^4.0.1",
|
||||
"lodash.merge": "^4.6.0",
|
||||
"lodash.merge": "^4.6.1",
|
||||
"lodash.padstart": "^4.6.1",
|
||||
"lodash.repeat": "^4.1.0",
|
||||
"sprintf-js": "1.1.0"
|
||||
"sprintf-js": "1.1.1"
|
||||
}
|
||||
},
|
||||
"jsbn": {
|
||||
|
@ -167,6 +240,11 @@
|
|||
"inherits": "^2.0.1"
|
||||
}
|
||||
},
|
||||
"make-error": {
|
||||
"version": "1.3.5",
|
||||
"resolved": "https://registry.npmjs.org/make-error/-/make-error-1.3.5.tgz",
|
||||
"integrity": "sha512-c3sIjNUow0+8swNwVpqoH4YCShKNFkMaw6oH1mNS2haDZQqkeZFlHS3dhoeEbKKmJB4vXpJucU6oH75aDYeE9g=="
|
||||
},
|
||||
"minimatch": {
|
||||
"version": "3.0.4",
|
||||
"resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.0.4.tgz",
|
||||
|
@ -197,17 +275,47 @@
|
|||
"version": "file:../../_build/Tasks/Common/packaging-common-1.0.1.tgz",
|
||||
"requires": {
|
||||
"@types/ltx": "^2.8.0",
|
||||
"@types/node": "^10.12.9",
|
||||
"@types/node": "^11.13.0",
|
||||
"adm-zip": "^0.4.11",
|
||||
"azure-devops-node-api": "^6.6.0",
|
||||
"azure-pipelines-task-lib": "^2.8.0",
|
||||
"azure-pipelines-tool-lib": "^0.12.0",
|
||||
"ini": "^1.3.4",
|
||||
"ip-address": "^5.8.9",
|
||||
"ltx": "^2.6.2",
|
||||
"q": "^1.5.0",
|
||||
"shelljs": "^0.8.3",
|
||||
"typed-rest-client": "0.12.0",
|
||||
"vsts-task-lib": "2.6.0",
|
||||
"vsts-task-tool-lib": "0.4.1"
|
||||
"ts-node": "^8.0.3",
|
||||
"typed-rest-client": "0.12.0"
|
||||
},
|
||||
"dependencies": {
|
||||
"@types/node": {
|
||||
"version": "11.13.4",
|
||||
"resolved": "https://registry.npmjs.org/@types/node/-/node-11.13.4.tgz",
|
||||
"integrity": "sha512-+rabAZZ3Yn7tF/XPGHupKIL5EcAbrLxnTr/hgQICxbeuAfWtT0UZSfULE+ndusckBItcv4o6ZeOJplQikVcLvQ=="
|
||||
},
|
||||
"azure-devops-node-api": {
|
||||
"version": "6.6.3",
|
||||
"resolved": "https://registry.npmjs.org/azure-devops-node-api/-/azure-devops-node-api-6.6.3.tgz",
|
||||
"integrity": "sha512-94wSu4O6CSSXoqYWg7Rzt2/IqbW2xVNu2qOtx6e7lnXxnDOcAu4eRzi8tgVNHsXTIGOVEsTqgMvGvFThKr9Pig==",
|
||||
"requires": {
|
||||
"os": "0.1.1",
|
||||
"tunnel": "0.0.4",
|
||||
"typed-rest-client": "1.0.9",
|
||||
"underscore": "1.8.3"
|
||||
},
|
||||
"dependencies": {
|
||||
"typed-rest-client": {
|
||||
"version": "1.0.9",
|
||||
"resolved": "https://registry.npmjs.org/typed-rest-client/-/typed-rest-client-1.0.9.tgz",
|
||||
"integrity": "sha512-iOdwgmnP/tF6Qs+oY4iEtCf/3fnCDl7Gy9LGPJ4E3M4Wj3uaSko15FVwbsaBmnBqTJORnXBWVY5306D4HH8oiA==",
|
||||
"requires": {
|
||||
"tunnel": "0.0.4",
|
||||
"underscore": "1.8.3"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"path-is-absolute": {
|
||||
|
@ -261,10 +369,36 @@
|
|||
"rechoir": "^0.6.2"
|
||||
}
|
||||
},
|
||||
"source-map": {
|
||||
"version": "0.6.1",
|
||||
"resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz",
|
||||
"integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g=="
|
||||
},
|
||||
"source-map-support": {
|
||||
"version": "0.5.12",
|
||||
"resolved": "https://registry.npmjs.org/source-map-support/-/source-map-support-0.5.12.tgz",
|
||||
"integrity": "sha512-4h2Pbvyy15EE02G+JOZpUCmqWJuqrs+sEkzewTm++BPi7Hvn/HwcqLAcNxYAyI0x13CpPPn+kMjl+hplXMHITQ==",
|
||||
"requires": {
|
||||
"buffer-from": "^1.0.0",
|
||||
"source-map": "^0.6.0"
|
||||
}
|
||||
},
|
||||
"sprintf-js": {
|
||||
"version": "1.1.0",
|
||||
"resolved": "https://registry.npmjs.org/sprintf-js/-/sprintf-js-1.1.0.tgz",
|
||||
"integrity": "sha1-z/yvcC2vZeo5u04PorKZzsGhvkY="
|
||||
"version": "1.1.1",
|
||||
"resolved": "https://registry.npmjs.org/sprintf-js/-/sprintf-js-1.1.1.tgz",
|
||||
"integrity": "sha1-Nr54Mgr+WAH2zqPueLblqrlA6gw="
|
||||
},
|
||||
"ts-node": {
|
||||
"version": "8.0.3",
|
||||
"resolved": "https://registry.npmjs.org/ts-node/-/ts-node-8.0.3.tgz",
|
||||
"integrity": "sha512-2qayBA4vdtVRuDo11DEFSsD/SFsBXQBRZZhbRGSIkmYmVkWjULn/GGMdG10KVqkaGndljfaTD8dKjWgcejO8YA==",
|
||||
"requires": {
|
||||
"arg": "^4.1.0",
|
||||
"diff": "^3.1.0",
|
||||
"make-error": "^1.1.1",
|
||||
"source-map-support": "^0.5.6",
|
||||
"yn": "^3.0.0"
|
||||
}
|
||||
},
|
||||
"tunnel": {
|
||||
"version": "0.0.4",
|
||||
|
@ -290,63 +424,15 @@
|
|||
"resolved": "https://registry.npmjs.org/uuid/-/uuid-3.3.2.tgz",
|
||||
"integrity": "sha512-yXJmeNaw3DnnKAOKJE51sL/ZaYfWJRl1pK9dr19YFCu0ObS231AB1/LbqTKRAQ5kw8A90rA6fr4riOUpTZvQZA=="
|
||||
},
|
||||
"vso-node-api": {
|
||||
"version": "6.5.0",
|
||||
"resolved": "https://registry.npmjs.org/vso-node-api/-/vso-node-api-6.5.0.tgz",
|
||||
"integrity": "sha512-hFjPLMJkq02zF8U+LhZ4airH0ivaiKzGdlNAQlYFB3lWuGH/UANUrl63DVPUQOyGw+7ZNQ+ufM44T6mWN92xyg==",
|
||||
"requires": {
|
||||
"tunnel": "0.0.4",
|
||||
"typed-rest-client": "^0.12.0",
|
||||
"underscore": "1.8.3"
|
||||
}
|
||||
},
|
||||
"vsts-task-lib": {
|
||||
"version": "2.6.0",
|
||||
"resolved": "https://registry.npmjs.org/vsts-task-lib/-/vsts-task-lib-2.6.0.tgz",
|
||||
"integrity": "sha512-ja2qX4BIUvswcNbGtIoGo1SM5mRVc3Yaf7oM4oY64bNHs04chKfvH6f3cDDG0pd44OrZIGQE9LgECzeau6z2wA==",
|
||||
"requires": {
|
||||
"minimatch": "3.0.4",
|
||||
"mockery": "^1.7.0",
|
||||
"q": "^1.1.2",
|
||||
"semver": "^5.1.0",
|
||||
"shelljs": "^0.3.0",
|
||||
"uuid": "^3.0.1"
|
||||
},
|
||||
"dependencies": {
|
||||
"shelljs": {
|
||||
"version": "0.3.0",
|
||||
"resolved": "https://registry.npmjs.org/shelljs/-/shelljs-0.3.0.tgz",
|
||||
"integrity": "sha1-NZbmMHp4FUT1kfN9phg2DzHbV7E="
|
||||
}
|
||||
}
|
||||
},
|
||||
"vsts-task-tool-lib": {
|
||||
"version": "0.4.1",
|
||||
"resolved": "https://registry.npmjs.org/vsts-task-tool-lib/-/vsts-task-tool-lib-0.4.1.tgz",
|
||||
"integrity": "sha1-mYLTv14YS95SqpdCGJROEGJzRWU=",
|
||||
"requires": {
|
||||
"semver": "^5.3.0",
|
||||
"semver-compare": "^1.0.0",
|
||||
"typed-rest-client": "^0.11.0",
|
||||
"uuid": "^3.0.1",
|
||||
"vsts-task-lib": "^2.0.7"
|
||||
},
|
||||
"dependencies": {
|
||||
"typed-rest-client": {
|
||||
"version": "0.11.0",
|
||||
"resolved": "https://registry.npmjs.org/typed-rest-client/-/typed-rest-client-0.11.0.tgz",
|
||||
"integrity": "sha1-DvQTUtYo7i4IePtYpniRZF9qG0E=",
|
||||
"requires": {
|
||||
"tunnel": "0.0.4",
|
||||
"underscore": "1.8.3"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"wrappy": {
|
||||
"version": "1.0.2",
|
||||
"resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz",
|
||||
"integrity": "sha1-tSQ9jz7BqjXxNkYFvA0QNuMKtp8="
|
||||
},
|
||||
"yn": {
|
||||
"version": "3.1.0",
|
||||
"resolved": "https://registry.npmjs.org/yn/-/yn-3.1.0.tgz",
|
||||
"integrity": "sha512-kKfnnYkbTfrAdd0xICNFw7Atm8nKpLcLv9AZGEt+kczL/WQVai4e2V6ZN8U/O+iI6WrNuJjNNOyu4zfhl9D3Hg=="
|
||||
}
|
||||
}
|
||||
}
|
|
@ -17,10 +17,10 @@
|
|||
"@types/node": "^10.12.18",
|
||||
"@types/q": "^1.5.1",
|
||||
"adm-zip": "^0.4.11",
|
||||
"azure-devops-node-api": "^7.0.0",
|
||||
"azure-pipelines-task-lib": "^2.8.0",
|
||||
"azure-pipelines-tool-lib": "^0.12.0",
|
||||
"packaging-common": "file:../../_build/Tasks/Common/packaging-common-1.0.1.tgz",
|
||||
"shelljs": "^0.8.3",
|
||||
"vso-node-api": "6.5.0",
|
||||
"vsts-task-lib": "2.6.0",
|
||||
"vsts-task-tool-lib": "0.4.1"
|
||||
"shelljs": "^0.8.3"
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
// Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
// Licensed under the MIT License.
|
||||
|
||||
import * as tl from 'vsts-task-lib';
|
||||
import * as tl from 'azure-pipelines-task-lib';
|
||||
import * as path from 'path';
|
||||
import { cacheUtilities } from 'packaging-common/cache/cacheUtilities';
|
||||
const cache = new cacheUtilities();
|
||||
|
|
|
@ -9,7 +9,7 @@
|
|||
"version": {
|
||||
"Major": 1,
|
||||
"Minor": 0,
|
||||
"Patch": 9
|
||||
"Patch": 10
|
||||
},
|
||||
"instanceNameFormat": "Restore artifact based on: $(keyfile)",
|
||||
"inputs": [
|
||||
|
|
|
@ -9,7 +9,7 @@
|
|||
"version": {
|
||||
"Major": 1,
|
||||
"Minor": 0,
|
||||
"Patch": 9
|
||||
"Patch": 10
|
||||
},
|
||||
"instanceNameFormat": "ms-resource:loc.instanceNameFormat",
|
||||
"inputs": [
|
||||
|
|
|
@ -1,9 +1,11 @@
|
|||
{
|
||||
"compilerOptions": {
|
||||
"target": "ES6",
|
||||
"module": "commonjs"
|
||||
"module": "commonjs",
|
||||
"sourceMap": true
|
||||
},
|
||||
"exclude": [
|
||||
"node_modules"
|
||||
]
|
||||
],
|
||||
"types": ["mocha", "node"]
|
||||
}
|
|
@ -27,15 +27,33 @@
|
|||
"resolved": "https://registry.npmjs.org/@types/q/-/q-1.5.1.tgz",
|
||||
"integrity": "sha512-eqz8c/0kwNi/OEHQfvIuJVLTst3in0e7uTKeuY+WL/zfKn0xVujOTp42bS/vUUokhK5P2BppLd9JXMOMHcgbjA=="
|
||||
},
|
||||
"@types/semver": {
|
||||
"version": "5.5.0",
|
||||
"resolved": "https://registry.npmjs.org/@types/semver/-/semver-5.5.0.tgz",
|
||||
"integrity": "sha512-41qEJgBH/TWgo5NFSvBCJ1qkoi3Q6ONSF2avrHq1LVEZfYpdHmj0y9SuTK+u9ZhG1sYQKBL1AWXKyLWP4RaUoQ=="
|
||||
},
|
||||
"@types/uuid": {
|
||||
"version": "3.4.4",
|
||||
"resolved": "https://registry.npmjs.org/@types/uuid/-/uuid-3.4.4.tgz",
|
||||
"integrity": "sha512-tPIgT0GUmdJQNSHxp0X2jnpQfBSTfGxUMc/2CXBU2mnyTFVYVa2ojpoQ74w0U2yn2vw3jnC640+77lkFFpdVDw==",
|
||||
"requires": {
|
||||
"@types/node": "*"
|
||||
}
|
||||
},
|
||||
"adm-zip": {
|
||||
"version": "0.4.13",
|
||||
"resolved": "https://registry.npmjs.org/adm-zip/-/adm-zip-0.4.13.tgz",
|
||||
"integrity": "sha512-fERNJX8sOXfel6qCBCMPvZLzENBEhZTzKqg6vrOW5pvoEaQuJhRU4ndTAh6lHOxn1I6jnz2NHra56ZODM751uw=="
|
||||
},
|
||||
"arg": {
|
||||
"version": "4.1.0",
|
||||
"resolved": "https://registry.npmjs.org/arg/-/arg-4.1.0.tgz",
|
||||
"integrity": "sha512-ZWc51jO3qegGkVh8Hwpv636EkbesNV5ZNQPCtRa+0qytRYPEs9IYT9qITY9buezqUH5uqyzlWLcufrzU2rffdg=="
|
||||
},
|
||||
"azure-devops-node-api": {
|
||||
"version": "6.6.3",
|
||||
"resolved": "https://registry.npmjs.org/azure-devops-node-api/-/azure-devops-node-api-6.6.3.tgz",
|
||||
"integrity": "sha512-94wSu4O6CSSXoqYWg7Rzt2/IqbW2xVNu2qOtx6e7lnXxnDOcAu4eRzi8tgVNHsXTIGOVEsTqgMvGvFThKr9Pig==",
|
||||
"version": "7.0.0",
|
||||
"resolved": "https://registry.npmjs.org/azure-devops-node-api/-/azure-devops-node-api-7.0.0.tgz",
|
||||
"integrity": "sha512-WXTqFDE2QhfKli1EkcaMbGYuDpOVcNoccnQBF/bZCkPZsogLJOnsZHO/BJnd2VrT+eSJtPoVcHWjKfE/Zcihew==",
|
||||
"requires": {
|
||||
"os": "0.1.1",
|
||||
"tunnel": "0.0.4",
|
||||
|
@ -54,6 +72,51 @@
|
|||
}
|
||||
}
|
||||
},
|
||||
"azure-pipelines-task-lib": {
|
||||
"version": "2.8.0",
|
||||
"resolved": "https://registry.npmjs.org/azure-pipelines-task-lib/-/azure-pipelines-task-lib-2.8.0.tgz",
|
||||
"integrity": "sha512-PR8oap9z2j+o455W3PwAfB4SX1p4GdJc9OHQaQV0V+iQS1IBY6dVgcNSQMkHAXb0V1bbuLOFBLanXPe5eSgGTQ==",
|
||||
"requires": {
|
||||
"minimatch": "3.0.4",
|
||||
"mockery": "^1.7.0",
|
||||
"q": "^1.1.2",
|
||||
"semver": "^5.1.0",
|
||||
"shelljs": "^0.3.0",
|
||||
"uuid": "^3.0.1"
|
||||
},
|
||||
"dependencies": {
|
||||
"shelljs": {
|
||||
"version": "0.3.0",
|
||||
"resolved": "https://registry.npmjs.org/shelljs/-/shelljs-0.3.0.tgz",
|
||||
"integrity": "sha1-NZbmMHp4FUT1kfN9phg2DzHbV7E="
|
||||
}
|
||||
}
|
||||
},
|
||||
"azure-pipelines-tool-lib": {
|
||||
"version": "0.12.0",
|
||||
"resolved": "https://registry.npmjs.org/azure-pipelines-tool-lib/-/azure-pipelines-tool-lib-0.12.0.tgz",
|
||||
"integrity": "sha512-JAlFvMTtEXISrnJY/kgq0LecLi089RqXRf/gMsXYbflmzszklkc+LUJpR0A7NDmJ+9/MWpKY/ZX+Q/zirYa7gw==",
|
||||
"requires": {
|
||||
"@types/semver": "^5.3.0",
|
||||
"@types/uuid": "^3.0.1",
|
||||
"azure-pipelines-task-lib": "^2.8.0",
|
||||
"semver": "^5.3.0",
|
||||
"semver-compare": "^1.0.0",
|
||||
"typed-rest-client": "1.0.9",
|
||||
"uuid": "^3.0.1"
|
||||
},
|
||||
"dependencies": {
|
||||
"typed-rest-client": {
|
||||
"version": "1.0.9",
|
||||
"resolved": "https://registry.npmjs.org/typed-rest-client/-/typed-rest-client-1.0.9.tgz",
|
||||
"integrity": "sha512-iOdwgmnP/tF6Qs+oY4iEtCf/3fnCDl7Gy9LGPJ4E3M4Wj3uaSko15FVwbsaBmnBqTJORnXBWVY5306D4HH8oiA==",
|
||||
"requires": {
|
||||
"tunnel": "0.0.4",
|
||||
"underscore": "1.8.3"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"balanced-match": {
|
||||
"version": "1.0.0",
|
||||
"resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.0.tgz",
|
||||
|
@ -68,11 +131,21 @@
|
|||
"concat-map": "0.0.1"
|
||||
}
|
||||
},
|
||||
"buffer-from": {
|
||||
"version": "1.1.1",
|
||||
"resolved": "https://registry.npmjs.org/buffer-from/-/buffer-from-1.1.1.tgz",
|
||||
"integrity": "sha512-MQcXEUbCKtEo7bhqEs6560Hyd4XaovZlO/k9V3hjVUF/zwW7KBVdSK4gIt/bzwS9MbR5qob+F5jusZsb0YQK2A=="
|
||||
},
|
||||
"concat-map": {
|
||||
"version": "0.0.1",
|
||||
"resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz",
|
||||
"integrity": "sha1-2Klr13/Wjfd5OnMDajug1UBdR3s="
|
||||
},
|
||||
"diff": {
|
||||
"version": "3.5.0",
|
||||
"resolved": "https://registry.npmjs.org/diff/-/diff-3.5.0.tgz",
|
||||
"integrity": "sha512-A46qtFgd+g7pDZinpnwiRJtxbC1hpgf0uzP3iG89scHk0AUC7A1TGxf5OiiOUv/JMZR8GOt8hL900hV0bOy5xA=="
|
||||
},
|
||||
"fs.realpath": {
|
||||
"version": "1.0.0",
|
||||
"resolved": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz",
|
||||
|
@ -116,17 +189,17 @@
|
|||
"integrity": "sha512-mT34yGKMNceBQUoVn7iCDKDntA7SC6gycMAWzGx1z/CMCTV7b2AAtXlo3nRyHZ1FelRkQbQjprHSYGwzLtkVbw=="
|
||||
},
|
||||
"ip-address": {
|
||||
"version": "5.8.9",
|
||||
"resolved": "https://registry.npmjs.org/ip-address/-/ip-address-5.8.9.tgz",
|
||||
"integrity": "sha512-7ay355oMN34iXhET1BmCJVsHjOTSItEEIIpOs38qUC23AIhOy+xIPnkrTuEFjeLMrTJ7m8KMXWgWfy/2Vn9sDw==",
|
||||
"version": "5.9.0",
|
||||
"resolved": "https://registry.npmjs.org/ip-address/-/ip-address-5.9.0.tgz",
|
||||
"integrity": "sha512-+4yKpEyent8IpjuDQVkIpzIDbxSlCHTPdmaXCRLH0ttt3YsrbNxuZJ6h+1wLPx10T7gWsLN7M6BXIHV2vZNOGw==",
|
||||
"requires": {
|
||||
"jsbn": "1.1.0",
|
||||
"lodash.find": "^4.6.0",
|
||||
"lodash.max": "^4.0.1",
|
||||
"lodash.merge": "^4.6.0",
|
||||
"lodash.merge": "^4.6.1",
|
||||
"lodash.padstart": "^4.6.1",
|
||||
"lodash.repeat": "^4.1.0",
|
||||
"sprintf-js": "1.1.0"
|
||||
"sprintf-js": "1.1.1"
|
||||
}
|
||||
},
|
||||
"jsbn": {
|
||||
|
@ -167,6 +240,11 @@
|
|||
"inherits": "^2.0.1"
|
||||
}
|
||||
},
|
||||
"make-error": {
|
||||
"version": "1.3.5",
|
||||
"resolved": "https://registry.npmjs.org/make-error/-/make-error-1.3.5.tgz",
|
||||
"integrity": "sha512-c3sIjNUow0+8swNwVpqoH4YCShKNFkMaw6oH1mNS2haDZQqkeZFlHS3dhoeEbKKmJB4vXpJucU6oH75aDYeE9g=="
|
||||
},
|
||||
"minimatch": {
|
||||
"version": "3.0.4",
|
||||
"resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.0.4.tgz",
|
||||
|
@ -197,17 +275,47 @@
|
|||
"version": "file:../../_build/Tasks/Common/packaging-common-1.0.1.tgz",
|
||||
"requires": {
|
||||
"@types/ltx": "^2.8.0",
|
||||
"@types/node": "^10.12.9",
|
||||
"@types/node": "^11.13.0",
|
||||
"adm-zip": "^0.4.11",
|
||||
"azure-devops-node-api": "^6.6.0",
|
||||
"azure-pipelines-task-lib": "^2.8.0",
|
||||
"azure-pipelines-tool-lib": "^0.12.0",
|
||||
"ini": "^1.3.4",
|
||||
"ip-address": "^5.8.9",
|
||||
"ltx": "^2.6.2",
|
||||
"q": "^1.5.0",
|
||||
"shelljs": "^0.8.3",
|
||||
"typed-rest-client": "0.12.0",
|
||||
"vsts-task-lib": "2.6.0",
|
||||
"vsts-task-tool-lib": "0.4.1"
|
||||
"ts-node": "^8.0.3",
|
||||
"typed-rest-client": "0.12.0"
|
||||
},
|
||||
"dependencies": {
|
||||
"@types/node": {
|
||||
"version": "11.13.4",
|
||||
"resolved": "https://registry.npmjs.org/@types/node/-/node-11.13.4.tgz",
|
||||
"integrity": "sha512-+rabAZZ3Yn7tF/XPGHupKIL5EcAbrLxnTr/hgQICxbeuAfWtT0UZSfULE+ndusckBItcv4o6ZeOJplQikVcLvQ=="
|
||||
},
|
||||
"azure-devops-node-api": {
|
||||
"version": "6.6.3",
|
||||
"resolved": "https://registry.npmjs.org/azure-devops-node-api/-/azure-devops-node-api-6.6.3.tgz",
|
||||
"integrity": "sha512-94wSu4O6CSSXoqYWg7Rzt2/IqbW2xVNu2qOtx6e7lnXxnDOcAu4eRzi8tgVNHsXTIGOVEsTqgMvGvFThKr9Pig==",
|
||||
"requires": {
|
||||
"os": "0.1.1",
|
||||
"tunnel": "0.0.4",
|
||||
"typed-rest-client": "1.0.9",
|
||||
"underscore": "1.8.3"
|
||||
},
|
||||
"dependencies": {
|
||||
"typed-rest-client": {
|
||||
"version": "1.0.9",
|
||||
"resolved": "https://registry.npmjs.org/typed-rest-client/-/typed-rest-client-1.0.9.tgz",
|
||||
"integrity": "sha512-iOdwgmnP/tF6Qs+oY4iEtCf/3fnCDl7Gy9LGPJ4E3M4Wj3uaSko15FVwbsaBmnBqTJORnXBWVY5306D4HH8oiA==",
|
||||
"requires": {
|
||||
"tunnel": "0.0.4",
|
||||
"underscore": "1.8.3"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"path-is-absolute": {
|
||||
|
@ -261,10 +369,36 @@
|
|||
"rechoir": "^0.6.2"
|
||||
}
|
||||
},
|
||||
"source-map": {
|
||||
"version": "0.6.1",
|
||||
"resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz",
|
||||
"integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g=="
|
||||
},
|
||||
"source-map-support": {
|
||||
"version": "0.5.12",
|
||||
"resolved": "https://registry.npmjs.org/source-map-support/-/source-map-support-0.5.12.tgz",
|
||||
"integrity": "sha512-4h2Pbvyy15EE02G+JOZpUCmqWJuqrs+sEkzewTm++BPi7Hvn/HwcqLAcNxYAyI0x13CpPPn+kMjl+hplXMHITQ==",
|
||||
"requires": {
|
||||
"buffer-from": "^1.0.0",
|
||||
"source-map": "^0.6.0"
|
||||
}
|
||||
},
|
||||
"sprintf-js": {
|
||||
"version": "1.1.0",
|
||||
"resolved": "https://registry.npmjs.org/sprintf-js/-/sprintf-js-1.1.0.tgz",
|
||||
"integrity": "sha1-z/yvcC2vZeo5u04PorKZzsGhvkY="
|
||||
"version": "1.1.1",
|
||||
"resolved": "https://registry.npmjs.org/sprintf-js/-/sprintf-js-1.1.1.tgz",
|
||||
"integrity": "sha1-Nr54Mgr+WAH2zqPueLblqrlA6gw="
|
||||
},
|
||||
"ts-node": {
|
||||
"version": "8.0.3",
|
||||
"resolved": "https://registry.npmjs.org/ts-node/-/ts-node-8.0.3.tgz",
|
||||
"integrity": "sha512-2qayBA4vdtVRuDo11DEFSsD/SFsBXQBRZZhbRGSIkmYmVkWjULn/GGMdG10KVqkaGndljfaTD8dKjWgcejO8YA==",
|
||||
"requires": {
|
||||
"arg": "^4.1.0",
|
||||
"diff": "^3.1.0",
|
||||
"make-error": "^1.1.1",
|
||||
"source-map-support": "^0.5.6",
|
||||
"yn": "^3.0.0"
|
||||
}
|
||||
},
|
||||
"tunnel": {
|
||||
"version": "0.0.4",
|
||||
|
@ -290,63 +424,15 @@
|
|||
"resolved": "https://registry.npmjs.org/uuid/-/uuid-3.3.2.tgz",
|
||||
"integrity": "sha512-yXJmeNaw3DnnKAOKJE51sL/ZaYfWJRl1pK9dr19YFCu0ObS231AB1/LbqTKRAQ5kw8A90rA6fr4riOUpTZvQZA=="
|
||||
},
|
||||
"vso-node-api": {
|
||||
"version": "6.5.0",
|
||||
"resolved": "https://registry.npmjs.org/vso-node-api/-/vso-node-api-6.5.0.tgz",
|
||||
"integrity": "sha512-hFjPLMJkq02zF8U+LhZ4airH0ivaiKzGdlNAQlYFB3lWuGH/UANUrl63DVPUQOyGw+7ZNQ+ufM44T6mWN92xyg==",
|
||||
"requires": {
|
||||
"tunnel": "0.0.4",
|
||||
"typed-rest-client": "^0.12.0",
|
||||
"underscore": "1.8.3"
|
||||
}
|
||||
},
|
||||
"vsts-task-lib": {
|
||||
"version": "2.6.0",
|
||||
"resolved": "https://registry.npmjs.org/vsts-task-lib/-/vsts-task-lib-2.6.0.tgz",
|
||||
"integrity": "sha512-ja2qX4BIUvswcNbGtIoGo1SM5mRVc3Yaf7oM4oY64bNHs04chKfvH6f3cDDG0pd44OrZIGQE9LgECzeau6z2wA==",
|
||||
"requires": {
|
||||
"minimatch": "3.0.4",
|
||||
"mockery": "^1.7.0",
|
||||
"q": "^1.1.2",
|
||||
"semver": "^5.1.0",
|
||||
"shelljs": "^0.3.0",
|
||||
"uuid": "^3.0.1"
|
||||
},
|
||||
"dependencies": {
|
||||
"shelljs": {
|
||||
"version": "0.3.0",
|
||||
"resolved": "https://registry.npmjs.org/shelljs/-/shelljs-0.3.0.tgz",
|
||||
"integrity": "sha1-NZbmMHp4FUT1kfN9phg2DzHbV7E="
|
||||
}
|
||||
}
|
||||
},
|
||||
"vsts-task-tool-lib": {
|
||||
"version": "0.4.1",
|
||||
"resolved": "https://registry.npmjs.org/vsts-task-tool-lib/-/vsts-task-tool-lib-0.4.1.tgz",
|
||||
"integrity": "sha1-mYLTv14YS95SqpdCGJROEGJzRWU=",
|
||||
"requires": {
|
||||
"semver": "^5.3.0",
|
||||
"semver-compare": "^1.0.0",
|
||||
"typed-rest-client": "^0.11.0",
|
||||
"uuid": "^3.0.1",
|
||||
"vsts-task-lib": "^2.0.7"
|
||||
},
|
||||
"dependencies": {
|
||||
"typed-rest-client": {
|
||||
"version": "0.11.0",
|
||||
"resolved": "https://registry.npmjs.org/typed-rest-client/-/typed-rest-client-0.11.0.tgz",
|
||||
"integrity": "sha1-DvQTUtYo7i4IePtYpniRZF9qG0E=",
|
||||
"requires": {
|
||||
"tunnel": "0.0.4",
|
||||
"underscore": "1.8.3"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"wrappy": {
|
||||
"version": "1.0.2",
|
||||
"resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz",
|
||||
"integrity": "sha1-tSQ9jz7BqjXxNkYFvA0QNuMKtp8="
|
||||
},
|
||||
"yn": {
|
||||
"version": "3.1.0",
|
||||
"resolved": "https://registry.npmjs.org/yn/-/yn-3.1.0.tgz",
|
||||
"integrity": "sha512-kKfnnYkbTfrAdd0xICNFw7Atm8nKpLcLv9AZGEt+kczL/WQVai4e2V6ZN8U/O+iI6WrNuJjNNOyu4zfhl9D3Hg=="
|
||||
}
|
||||
}
|
||||
}
|
|
@ -14,10 +14,10 @@
|
|||
"@types/node": "^10.12.18",
|
||||
"@types/q": "^1.5.1",
|
||||
"adm-zip": "^0.4.11",
|
||||
"azure-devops-node-api": "^7.0.0",
|
||||
"azure-pipelines-task-lib": "^2.8.0",
|
||||
"azure-pipelines-tool-lib": "^0.12.0",
|
||||
"packaging-common": "file:../../_build/Tasks/Common/packaging-common-1.0.1.tgz",
|
||||
"shelljs": "^0.8.3",
|
||||
"vso-node-api": "6.5.0",
|
||||
"vsts-task-lib": "2.6.0",
|
||||
"vsts-task-tool-lib": "0.4.1"
|
||||
"shelljs": "^0.8.3"
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
// Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
// Licensed under the MIT License.
|
||||
|
||||
import * as tl from 'vsts-task-lib';
|
||||
import * as tl from 'azure-pipelines-task-lib';
|
||||
import * as path from 'path';
|
||||
import { cacheUtilities } from 'packaging-common/cache/cacheUtilities';
|
||||
const cache = new cacheUtilities();
|
||||
|
|
|
@ -9,7 +9,7 @@
|
|||
"version": {
|
||||
"Major": 1,
|
||||
"Minor": 0,
|
||||
"Patch": 9
|
||||
"Patch": 10
|
||||
},
|
||||
"instanceNameFormat": "Save artifact based on: $(keyfile)",
|
||||
"inputs": [
|
||||
|
|
|
@ -9,7 +9,7 @@
|
|||
"version": {
|
||||
"Major": 1,
|
||||
"Minor": 0,
|
||||
"Patch": 9
|
||||
"Patch": 10
|
||||
},
|
||||
"instanceNameFormat": "ms-resource:loc.instanceNameFormat",
|
||||
"inputs": [
|
||||
|
|
|
@ -1,9 +1,11 @@
|
|||
{
|
||||
"compilerOptions": {
|
||||
"target": "ES6",
|
||||
"module": "commonjs"
|
||||
"module": "commonjs",
|
||||
"sourceMap": true
|
||||
},
|
||||
"exclude": [
|
||||
"node_modules"
|
||||
]
|
||||
],
|
||||
"types": ["mocha", "node"]
|
||||
}
|
|
@ -1,5 +1,5 @@
|
|||
resources:
|
||||
- repo: self
|
||||
- repo: self
|
||||
|
||||
name: $(Version)$(Rev:.r)
|
||||
|
||||
|
@ -7,47 +7,84 @@ trigger:
|
|||
batch: true
|
||||
branches:
|
||||
include:
|
||||
- 'master'
|
||||
- "master"
|
||||
|
||||
pr:
|
||||
autoCancel: true
|
||||
branches:
|
||||
include:
|
||||
- 'master'
|
||||
|
||||
- "master"
|
||||
|
||||
jobs:
|
||||
- job: Extension
|
||||
pool:
|
||||
vmImage: macOS 10.13
|
||||
demands:
|
||||
- npm
|
||||
- node.js
|
||||
- job: BuildAndTest
|
||||
pool:
|
||||
vmImage: macOS 10.13
|
||||
demands:
|
||||
- npm
|
||||
- node.js
|
||||
|
||||
steps:
|
||||
- task: NodeTool@0
|
||||
inputs:
|
||||
versionSpec: "8.12.0"
|
||||
steps:
|
||||
- task: NodeTool@0
|
||||
inputs:
|
||||
versionSpec: "8.12.0"
|
||||
|
||||
- task: Npm@1
|
||||
displayName: 'npm install'
|
||||
inputs:
|
||||
verbose: false
|
||||
- task: Npm@1
|
||||
displayName: "npm install"
|
||||
inputs:
|
||||
verbose: false
|
||||
|
||||
- task: Gulp@0
|
||||
displayName: 'gulp build'
|
||||
- task: Gulp@0
|
||||
displayName: 'gulp build'
|
||||
inputs:
|
||||
targets: build
|
||||
|
||||
- task: ms-devlabs.vsts-developer-tools-build-tasks.package-extension-build-task.PackageVSTSExtension@1
|
||||
displayName: 'Package Extension'
|
||||
inputs:
|
||||
extensionVersion: '$(Build.BuildNumber)'
|
||||
extensionVisibility: public
|
||||
updateTasksVersion: false
|
||||
- task: Gulp@0
|
||||
displayName: 'gulp test'
|
||||
inputs:
|
||||
targets: test
|
||||
arguments: '--testResults=TESTRESULTS.xml'
|
||||
|
||||
- task: CopyFiles@2
|
||||
displayName: 'Copy Files to staging'
|
||||
inputs:
|
||||
Contents: '**/*.vsix'
|
||||
TargetFolder: '$(Build.ArtifactStagingDirectory)'
|
||||
- task: PublishTestResults@2
|
||||
displayName: 'Publish Test Results **/TESTRESULTS.xml'
|
||||
inputs:
|
||||
testResultsFiles: '**/TESTRESULTS.xml'
|
||||
condition: succeededOrFailed()
|
||||
|
||||
- job: Package
|
||||
dependsOn: BuildAndTest
|
||||
pool:
|
||||
vmImage: VS2017-Win2016
|
||||
demands:
|
||||
- npm
|
||||
- node.js
|
||||
|
||||
- task: PublishBuildArtifacts@1
|
||||
displayName: 'Publish build artifacts'
|
||||
steps:
|
||||
- task: NodeTool@0
|
||||
inputs:
|
||||
versionSpec: "8.12.0"
|
||||
|
||||
- task: Npm@1
|
||||
displayName: "npm install"
|
||||
inputs:
|
||||
verbose: false
|
||||
|
||||
- task: Gulp@0
|
||||
displayName: 'gulp build'
|
||||
inputs:
|
||||
targets: build
|
||||
|
||||
- task: ms-devlabs.vsts-developer-tools-build-tasks.package-extension-build-task.PackageVSTSExtension@1
|
||||
displayName: "Package Extension"
|
||||
inputs:
|
||||
extensionVersion: "$(Build.BuildNumber)"
|
||||
extensionVisibility: public
|
||||
updateTasksVersion: false
|
||||
|
||||
- task: CopyFiles@2
|
||||
displayName: "Copy Files to staging"
|
||||
inputs:
|
||||
Contents: "**/*.vsix"
|
||||
TargetFolder: "$(Build.ArtifactStagingDirectory)"
|
||||
|
||||
- task: PublishBuildArtifacts@1
|
||||
displayName: "Publish build artifacts"
|
||||
|
|
56
gulpfile.js
56
gulpfile.js
|
@ -1,28 +1,40 @@
|
|||
var gulp = require('gulp');
|
||||
var gutil = require('gulp-util');
|
||||
var child_process = require('child_process');
|
||||
var process = require('process');
|
||||
var gulp = require("gulp");
|
||||
var gutil = require("gulp-util");
|
||||
var child_process = require("child_process");
|
||||
var process = require("process");
|
||||
|
||||
function make (target, cb) {
|
||||
var cl = ('node make.js ' + target + ' ' + process.argv.slice(3).join(' ')).trim();
|
||||
console.log('------------------------------------------------------------');
|
||||
console.log('> ' + cl);
|
||||
console.log('------------------------------------------------------------');
|
||||
try {
|
||||
child_process.execSync(cl, { cwd: __dirname, stdio: 'inherit' });
|
||||
}
|
||||
catch (err) {
|
||||
var msg = err.output ? err.output.toString() : err.message;
|
||||
console.error(msg);
|
||||
cb(new gutil.PluginError(msg));
|
||||
return false;
|
||||
}
|
||||
function make(target, cb) {
|
||||
var cl = (
|
||||
"node make.js " +
|
||||
target +
|
||||
" " +
|
||||
process.argv.slice(3).join(" ")
|
||||
).trim();
|
||||
console.log("------------------------------------------------------------");
|
||||
console.log("> " + cl);
|
||||
console.log("------------------------------------------------------------");
|
||||
try {
|
||||
child_process.execSync(cl, { cwd: __dirname, stdio: "inherit" });
|
||||
} catch (err) {
|
||||
var msg = err.output ? err.output.toString() : err.message;
|
||||
console.error(msg);
|
||||
cb(new gutil.PluginError(msg));
|
||||
return false;
|
||||
}
|
||||
|
||||
return true;
|
||||
return true;
|
||||
}
|
||||
|
||||
gulp.task('build', function (cb) {
|
||||
make('build', cb);
|
||||
gulp.task("build", function(cb) {
|
||||
make("build", cb);
|
||||
});
|
||||
|
||||
gulp.task('default', ['build']);
|
||||
gulp.task("test", function(cb) {
|
||||
make("test", cb);
|
||||
});
|
||||
|
||||
gulp.task("clean", function(cb) {
|
||||
make("clean", cb);
|
||||
});
|
||||
|
||||
gulp.task("default", ["clean", "build", "test"]);
|
||||
|
|
2859
make-util.js
2859
make-util.js
Разница между файлами не показана из-за своего большого размера
Загрузить разницу
457
make.js
457
make.js
|
@ -1,14 +1,7 @@
|
|||
// parse command line options
|
||||
var minimist = require('minimist');
|
||||
var minimist = require("minimist");
|
||||
var mopts = {
|
||||
string: [
|
||||
'node',
|
||||
'runner',
|
||||
'server',
|
||||
'suite',
|
||||
'task',
|
||||
'version'
|
||||
]
|
||||
string: ["node", "runner", "server", "suite", "task", "version", "testResults"]
|
||||
};
|
||||
var options = minimist(process.argv, mopts);
|
||||
|
||||
|
@ -17,12 +10,12 @@ var options = minimist(process.argv, mopts);
|
|||
process.argv = options._;
|
||||
|
||||
// modules
|
||||
var make = require('shelljs/make');
|
||||
var fs = require('fs');
|
||||
var os = require('os');
|
||||
var path = require('path');
|
||||
var semver = require('semver');
|
||||
var util = require('./make-util');
|
||||
var make = require("shelljs/make");
|
||||
var fs = require("fs");
|
||||
var os = require("os");
|
||||
var path = require("path");
|
||||
var semver = require("semver");
|
||||
var util = require("./make-util");
|
||||
|
||||
// util functions
|
||||
var cd = util.cd;
|
||||
|
@ -48,49 +41,54 @@ var createYamlSnippetFile = util.createYamlSnippetFile;
|
|||
var createMarkdownDocFile = util.createMarkdownDocFile;
|
||||
|
||||
// global paths
|
||||
var buildPath = path.join(__dirname, '_build', 'Tasks');
|
||||
var commonPath = path.join(__dirname, '_build', 'Tasks', 'Common');
|
||||
var buildPath = path.join(__dirname, "_build", "Tasks");
|
||||
var buildTestsPath = path.join(__dirname, "_build", "Tests");
|
||||
var commonPath = path.join(__dirname, "_build", "Tasks", "Common");
|
||||
|
||||
// node min version
|
||||
var minNodeVer = '6.10.3';
|
||||
var minNodeVer = "6.10.3";
|
||||
if (semver.lt(process.versions.node, minNodeVer)) {
|
||||
fail('requires node >= ' + minNodeVer + '. installed: ' + process.versions.node);
|
||||
fail(
|
||||
"requires node >= " + minNodeVer + ". installed: " + process.versions.node
|
||||
);
|
||||
}
|
||||
|
||||
// add node modules .bin to the path so we can dictate version of tsc etc...
|
||||
var binPath = path.join(__dirname, 'node_modules', '.bin');
|
||||
if (!test('-d', binPath)) {
|
||||
fail('node modules bin not found. ensure npm install has been run.');
|
||||
var binPath = path.join(__dirname, "node_modules", ".bin");
|
||||
if (!test("-d", binPath)) {
|
||||
fail("node modules bin not found. ensure npm install has been run.");
|
||||
}
|
||||
addPath(binPath);
|
||||
|
||||
// resolve list of tasks
|
||||
var taskList;
|
||||
if (options.task) {
|
||||
// find using --task parameter
|
||||
taskList = matchFind(options.task, path.join(__dirname, 'Tasks'), { noRecurse: true, matchBase: true })
|
||||
.map(function (item) {
|
||||
return path.basename(item);
|
||||
});
|
||||
if (!taskList.length) {
|
||||
fail('Unable to find any tasks matching pattern ' + options.task);
|
||||
}
|
||||
}
|
||||
else {
|
||||
// load the default list
|
||||
taskList = fileToJson(path.join(__dirname, 'make-options.json')).tasks;
|
||||
// find using --task parameter
|
||||
taskList = matchFind(options.task, path.join(__dirname, "Tasks"), {
|
||||
noRecurse: true,
|
||||
matchBase: true
|
||||
}).map(function(item) {
|
||||
return path.basename(item);
|
||||
});
|
||||
if (!taskList.length) {
|
||||
fail("Unable to find any tasks matching pattern " + options.task);
|
||||
}
|
||||
} else {
|
||||
// load the default list
|
||||
taskList = fileToJson(path.join(__dirname, "make-options.json")).tasks;
|
||||
}
|
||||
|
||||
// set the runner options. should either be empty or a comma delimited list of test runners.
|
||||
// for example: ts OR ts,ps
|
||||
//
|
||||
// note, currently the ts runner igores this setting and will always run.
|
||||
process.env['TASK_TEST_RUNNER'] = options.runner || '';
|
||||
process.env["TASK_TEST_RUNNER"] = options.runner || "";
|
||||
|
||||
target.clean = function () {
|
||||
rm('-Rf', path.join(__dirname, '_build'));
|
||||
mkdir('-p', buildPath);
|
||||
rm('-Rf', path.join(__dirname, '_test'));
|
||||
target.clean = function() {
|
||||
rm("-Rf", path.join(__dirname, "_build"));
|
||||
// mkdir("-p", buildPath);
|
||||
rm("-Rf", path.join(__dirname, "_test"));
|
||||
rm("-Rf", path.join(__dirname, "_testresults"));
|
||||
};
|
||||
|
||||
//
|
||||
|
@ -99,201 +97,260 @@ target.clean = function () {
|
|||
// ex: node make.js gendocs --task ShellScript
|
||||
//
|
||||
target.gendocs = function() {
|
||||
var docsDir = path.join(__dirname, '_gendocs');
|
||||
rm('-Rf', docsDir);
|
||||
mkdir('-p', docsDir);
|
||||
console.log();
|
||||
console.log('> generating docs');
|
||||
var docsDir = path.join(__dirname, "_gendocs");
|
||||
rm("-Rf", docsDir);
|
||||
mkdir("-p", docsDir);
|
||||
console.log();
|
||||
console.log("> generating docs");
|
||||
|
||||
taskList.forEach(function(taskName) {
|
||||
var taskPath = path.join(__dirname, 'Tasks', taskName);
|
||||
ensureExists(taskPath);
|
||||
taskList.forEach(function(taskName) {
|
||||
var taskPath = path.join(__dirname, "Tasks", taskName);
|
||||
ensureExists(taskPath);
|
||||
|
||||
// load the task.json
|
||||
var taskJsonPath = path.join(taskPath, 'task.json');
|
||||
if (test('-f', taskJsonPath)) {
|
||||
var taskDef = fileToJson(taskJsonPath);
|
||||
validateTask(taskDef);
|
||||
// load the task.json
|
||||
var taskJsonPath = path.join(taskPath, "task.json");
|
||||
if (test("-f", taskJsonPath)) {
|
||||
var taskDef = fileToJson(taskJsonPath);
|
||||
validateTask(taskDef);
|
||||
|
||||
// create YAML snippet Markdown
|
||||
var yamlOutputFilename = taskName + '.md';
|
||||
createYamlSnippetFile(taskDef, docsDir, yamlOutputFilename);
|
||||
// create YAML snippet Markdown
|
||||
var yamlOutputFilename = taskName + ".md";
|
||||
createYamlSnippetFile(taskDef, docsDir, yamlOutputFilename);
|
||||
|
||||
// create Markdown documentation file
|
||||
var mdDocOutputFilename = taskName + '.md';
|
||||
createMarkdownDocFile(taskDef, taskJsonPath, docsDir, mdDocOutputFilename);
|
||||
}
|
||||
});
|
||||
// create Markdown documentation file
|
||||
var mdDocOutputFilename = taskName + ".md";
|
||||
createMarkdownDocFile(
|
||||
taskDef,
|
||||
taskJsonPath,
|
||||
docsDir,
|
||||
mdDocOutputFilename
|
||||
);
|
||||
}
|
||||
});
|
||||
|
||||
banner('Generating docs successful', true);
|
||||
}
|
||||
banner("Generating docs successful", true);
|
||||
};
|
||||
|
||||
//
|
||||
// ex: node make.js build
|
||||
// ex: node make.js build --task ShellScript
|
||||
//
|
||||
target.build = function() {
|
||||
target.clean();
|
||||
target.clean();
|
||||
|
||||
ensureTool('tsc', '--version', 'Version 2.3.4');
|
||||
ensureTool('npm', '--version', function (output) {
|
||||
if (semver.lt(output, '5.6.0')) {
|
||||
fail('Expected 5.6.0 or higher. To fix, run: npm install -g npm');
|
||||
}
|
||||
});
|
||||
ensureTool("tsc", "--version", "Version 3.4.1");
|
||||
ensureTool("npm", "--version", function(output) {
|
||||
if (semver.lt(output, "5.6.0")) {
|
||||
fail("Expected 5.6.0 or higher. To fix, run: npm install -g npm");
|
||||
}
|
||||
});
|
||||
|
||||
taskList.forEach(function(taskName) {
|
||||
banner('Building: ' + taskName);
|
||||
var taskPath = path.join(__dirname, 'Tasks', taskName);
|
||||
ensureExists(taskPath);
|
||||
taskList.forEach(function(taskName) {
|
||||
banner("Building: " + taskName);
|
||||
var taskPath = path.join(__dirname, "Tasks", taskName);
|
||||
ensureExists(taskPath);
|
||||
|
||||
// load the task.json
|
||||
var outDir;
|
||||
var shouldBuildNode = test('-f', path.join(taskPath, 'tsconfig.json'));
|
||||
var taskJsonPath = path.join(taskPath, 'task.json');
|
||||
if (test('-f', taskJsonPath)) {
|
||||
var taskDef = fileToJson(taskJsonPath);
|
||||
validateTask(taskDef);
|
||||
// load the task.json
|
||||
var outDir;
|
||||
var shouldBuildNode = test("-f", path.join(taskPath, "tsconfig.json"));
|
||||
var taskJsonPath = path.join(taskPath, "task.json");
|
||||
if (test("-f", taskJsonPath)) {
|
||||
var taskDef = fileToJson(taskJsonPath);
|
||||
validateTask(taskDef);
|
||||
|
||||
// fixup the outDir (required for relative pathing in legacy L0 tests)
|
||||
outDir = path.join(buildPath, taskName);
|
||||
// fixup the outDir (required for relative pathing in legacy L0 tests)
|
||||
outDir = path.join(buildPath, taskName);
|
||||
|
||||
// create loc files
|
||||
createTaskLocJson(taskPath);
|
||||
createResjson(taskDef, taskPath);
|
||||
// create loc files
|
||||
createTaskLocJson(taskPath);
|
||||
createResjson(taskDef, taskPath);
|
||||
|
||||
// determine the type of task
|
||||
shouldBuildNode = shouldBuildNode || taskDef.execution.hasOwnProperty('Node');
|
||||
}
|
||||
else {
|
||||
outDir = path.join(buildPath, path.basename(taskPath));
|
||||
}
|
||||
// determine the type of task
|
||||
shouldBuildNode =
|
||||
shouldBuildNode || taskDef.execution.hasOwnProperty("Node");
|
||||
} else {
|
||||
outDir = path.join(buildPath, path.basename(taskPath));
|
||||
}
|
||||
|
||||
mkdir('-p', outDir);
|
||||
mkdir("-p", outDir);
|
||||
|
||||
// get externals
|
||||
var taskMakePath = path.join(taskPath, 'make.json');
|
||||
var taskMake = test('-f', taskMakePath) ? fileToJson(taskMakePath) : {};
|
||||
if (taskMake.hasOwnProperty('externals')) {
|
||||
console.log('');
|
||||
console.log('> getting task externals');
|
||||
getExternals(taskMake.externals, outDir);
|
||||
}
|
||||
// get externals
|
||||
var taskMakePath = path.join(taskPath, "make.json");
|
||||
var taskMake = test("-f", taskMakePath) ? fileToJson(taskMakePath) : {};
|
||||
if (taskMake.hasOwnProperty("externals")) {
|
||||
console.log("");
|
||||
console.log("> getting task externals");
|
||||
getExternals(taskMake.externals, outDir);
|
||||
}
|
||||
|
||||
//--------------------------------
|
||||
// Common: build, copy, install
|
||||
//--------------------------------
|
||||
var commonPacks = [];
|
||||
if (taskMake.hasOwnProperty('common')) {
|
||||
var common = taskMake['common'];
|
||||
//--------------------------------
|
||||
// Common: build, copy, install
|
||||
//--------------------------------
|
||||
var commonPacks = [];
|
||||
if (taskMake.hasOwnProperty("common")) {
|
||||
var common = taskMake["common"];
|
||||
|
||||
common.forEach(function(mod) {
|
||||
var modPath = path.join(taskPath, mod['module']);
|
||||
var modName = path.basename(modPath);
|
||||
var modOutDir = path.join(commonPath, modName);
|
||||
common.forEach(function(mod) {
|
||||
var modPath = path.join(taskPath, mod["module"]);
|
||||
var modName = path.basename(modPath);
|
||||
var modOutDir = path.join(commonPath, modName);
|
||||
|
||||
if (!test('-d', modOutDir)) {
|
||||
banner('Building module ' + modPath, true);
|
||||
if (!test("-d", modOutDir)) {
|
||||
banner("Building module " + modPath, true);
|
||||
|
||||
mkdir('-p', modOutDir);
|
||||
mkdir("-p", modOutDir);
|
||||
|
||||
// create loc files
|
||||
var modJsonPath = path.join(modPath, 'module.json');
|
||||
if (test('-f', modJsonPath)) {
|
||||
createResjson(fileToJson(modJsonPath), modPath);
|
||||
}
|
||||
// create loc files
|
||||
var modJsonPath = path.join(modPath, "module.json");
|
||||
if (test("-f", modJsonPath)) {
|
||||
createResjson(fileToJson(modJsonPath), modPath);
|
||||
}
|
||||
|
||||
// npm install and compile
|
||||
if ((mod.type === 'node' && mod.compile == true) || test('-f', path.join(modPath, 'tsconfig.json'))) {
|
||||
buildNodeTask(modPath, modOutDir);
|
||||
}
|
||||
// npm install and compile
|
||||
if (
|
||||
(mod.type === "node" && mod.compile == true) ||
|
||||
test("-f", path.join(modPath, "tsconfig.json"))
|
||||
) {
|
||||
buildNodeTask(modPath, modOutDir);
|
||||
}
|
||||
|
||||
// copy default resources and any additional resources defined in the module's make.json
|
||||
console.log();
|
||||
console.log('> copying module resources');
|
||||
var modMakePath = path.join(modPath, 'make.json');
|
||||
var modMake = test('-f', modMakePath) ? fileToJson(modMakePath) : {};
|
||||
copyTaskResources(modMake, modPath, modOutDir);
|
||||
// copy default resources and any additional resources defined in the module's make.json
|
||||
console.log();
|
||||
console.log("> copying module resources");
|
||||
var modMakePath = path.join(modPath, "make.json");
|
||||
var modMake = test("-f", modMakePath) ? fileToJson(modMakePath) : {};
|
||||
copyTaskResources(modMake, modPath, modOutDir);
|
||||
|
||||
// get externals
|
||||
if (modMake.hasOwnProperty('externals')) {
|
||||
console.log('');
|
||||
console.log('> getting module externals');
|
||||
getExternals(modMake.externals, modOutDir);
|
||||
}
|
||||
// get externals
|
||||
if (modMake.hasOwnProperty("externals")) {
|
||||
console.log("");
|
||||
console.log("> getting module externals");
|
||||
getExternals(modMake.externals, modOutDir);
|
||||
}
|
||||
|
||||
if (mod.type === 'node' && mod.compile == true || test('-f', path.join(modPath, 'package.json'))) {
|
||||
var commonPack = util.getCommonPackInfo(modOutDir);
|
||||
if (
|
||||
(mod.type === "node" && mod.compile == true) ||
|
||||
test("-f", path.join(modPath, "package.json"))
|
||||
) {
|
||||
var commonPack = util.getCommonPackInfo(modOutDir);
|
||||
|
||||
// assert the pack file does not already exist (name should be unique)
|
||||
if (test('-f', commonPack.packFilePath)) {
|
||||
fail(`Pack file already exists: ${commonPack.packFilePath}`);
|
||||
}
|
||||
|
||||
// pack the Node module. a pack file is required for dedupe.
|
||||
// installing from a folder creates a symlink, and does not dedupe.
|
||||
cd(path.dirname(modOutDir));
|
||||
run(`npm pack ./${path.basename(modOutDir)}`);
|
||||
}
|
||||
}
|
||||
|
||||
// store the npm pack file info
|
||||
if (mod.type === 'node' && mod.compile == true) {
|
||||
commonPacks.push(util.getCommonPackInfo(modOutDir));
|
||||
}
|
||||
// copy ps module resources to the task output dir
|
||||
else if (mod.type === 'ps') {
|
||||
console.log();
|
||||
console.log('> copying ps module to task');
|
||||
var dest;
|
||||
if (mod.hasOwnProperty('dest')) {
|
||||
dest = path.join(outDir, mod.dest, modName);
|
||||
}
|
||||
else {
|
||||
dest = path.join(outDir, 'ps_modules', modName);
|
||||
}
|
||||
|
||||
matchCopy('!Tests', modOutDir, dest, { noRecurse: true, matchBase: true });
|
||||
}
|
||||
});
|
||||
|
||||
// npm install the common modules to the task dir
|
||||
if (commonPacks.length) {
|
||||
cd(taskPath);
|
||||
var installPaths = commonPacks.map(function (commonPack) {
|
||||
return `file:${path.relative(taskPath, commonPack.packFilePath)}`;
|
||||
});
|
||||
run(`npm install --save-exact ${installPaths.join(' ')}`);
|
||||
// assert the pack file does not already exist (name should be unique)
|
||||
if (test("-f", commonPack.packFilePath)) {
|
||||
fail(`Pack file already exists: ${commonPack.packFilePath}`);
|
||||
}
|
||||
|
||||
// pack the Node module. a pack file is required for dedupe.
|
||||
// installing from a folder creates a symlink, and does not dedupe.
|
||||
cd(path.dirname(modOutDir));
|
||||
run(`npm pack ./${path.basename(modOutDir)}`);
|
||||
}
|
||||
}
|
||||
|
||||
// build Node task
|
||||
if (shouldBuildNode) {
|
||||
buildNodeTask(taskPath, outDir);
|
||||
// store the npm pack file info
|
||||
if (mod.type === "node" && mod.compile == true) {
|
||||
commonPacks.push(util.getCommonPackInfo(modOutDir));
|
||||
}
|
||||
// copy ps module resources to the task output dir
|
||||
else if (mod.type === "ps") {
|
||||
console.log();
|
||||
console.log("> copying ps module to task");
|
||||
var dest;
|
||||
if (mod.hasOwnProperty("dest")) {
|
||||
dest = path.join(outDir, mod.dest, modName);
|
||||
} else {
|
||||
dest = path.join(outDir, "ps_modules", modName);
|
||||
}
|
||||
|
||||
// remove the hashes for the common packages, they change every build
|
||||
if (commonPacks.length) {
|
||||
var lockFilePath = path.join(taskPath, 'package-lock.json');
|
||||
if (!test('-f', lockFilePath)) {
|
||||
lockFilePath = path.join(taskPath, 'npm-shrinkwrap.json');
|
||||
}
|
||||
var packageLock = fileToJson(lockFilePath);
|
||||
Object.keys(packageLock.dependencies).forEach(function (dependencyName) {
|
||||
commonPacks.forEach(function (commonPack) {
|
||||
if (dependencyName == commonPack.packageName) {
|
||||
delete packageLock.dependencies[dependencyName].integrity;
|
||||
}
|
||||
});
|
||||
});
|
||||
fs.writeFileSync(lockFilePath, JSON.stringify(packageLock, null, ' '));
|
||||
matchCopy("!Tests", modOutDir, dest, {
|
||||
noRecurse: true,
|
||||
matchBase: true
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
// copy default resources and any additional resources defined in the task's make.json
|
||||
console.log();
|
||||
console.log('> copying task resources');
|
||||
copyTaskResources(taskMake, taskPath, outDir);
|
||||
});
|
||||
// npm install the common modules to the task dir
|
||||
if (commonPacks.length) {
|
||||
cd(taskPath);
|
||||
var installPaths = commonPacks.map(function(commonPack) {
|
||||
return `file:${path.relative(taskPath, commonPack.packFilePath)}`;
|
||||
});
|
||||
run(`npm install --save-exact ${installPaths.join(" ")}`);
|
||||
}
|
||||
}
|
||||
|
||||
banner('Build successful', true);
|
||||
}
|
||||
// build Node task
|
||||
if (shouldBuildNode) {
|
||||
buildNodeTask(taskPath, outDir);
|
||||
}
|
||||
|
||||
// remove the hashes for the common packages, they change every build
|
||||
if (commonPacks.length) {
|
||||
var lockFilePath = path.join(taskPath, "package-lock.json");
|
||||
if (!test("-f", lockFilePath)) {
|
||||
lockFilePath = path.join(taskPath, "npm-shrinkwrap.json");
|
||||
}
|
||||
var packageLock = fileToJson(lockFilePath);
|
||||
Object.keys(packageLock.dependencies).forEach(function(dependencyName) {
|
||||
commonPacks.forEach(function(commonPack) {
|
||||
if (dependencyName == commonPack.packageName) {
|
||||
delete packageLock.dependencies[dependencyName].integrity;
|
||||
}
|
||||
});
|
||||
});
|
||||
fs.writeFileSync(lockFilePath, JSON.stringify(packageLock, null, " "));
|
||||
}
|
||||
|
||||
// copy default resources and any additional resources defined in the task's make.json
|
||||
console.log();
|
||||
console.log("> copying task resources");
|
||||
copyTaskResources(taskMake, taskPath, outDir);
|
||||
});
|
||||
|
||||
banner("Build successful", true);
|
||||
};
|
||||
|
||||
//
|
||||
// will run tests for the scope of tasks being built
|
||||
// npm test
|
||||
// node make.js test
|
||||
// node make.js test --task ShellScript --suite L0
|
||||
//
|
||||
target.test = function() {
|
||||
ensureTool("tsc", "--version", "Version 3.4.1");
|
||||
ensureTool("mocha", "--version", "6.0.2");
|
||||
|
||||
// find the tests
|
||||
var suiteType = options.suite || "_suite";
|
||||
var taskType = options.task || "*";
|
||||
var pattern1 = buildPath + "/" + taskType + "/Tests/" + suiteType + ".js";
|
||||
var pattern2 =
|
||||
buildPath + "/Common/" + taskType + "/Tests/" + suiteType + ".js";
|
||||
var testsSpec = matchFind(pattern1, buildPath).concat(
|
||||
matchFind(pattern2, buildPath)
|
||||
);
|
||||
// .concat(matchFind(pattern3, buildTestsPath, { noRecurse: true }));
|
||||
if (!testsSpec.length && !process.env.TF_BUILD) {
|
||||
fail(
|
||||
`Unable to find tests using the following patterns: ${JSON.stringify([
|
||||
pattern1,
|
||||
pattern2,
|
||||
pattern3
|
||||
])}`
|
||||
);
|
||||
}
|
||||
|
||||
// setup the version of node to run the tests
|
||||
util.installNode(options.node);
|
||||
|
||||
let testCmd = "";
|
||||
if (options.testResults)
|
||||
testCmd = ` --reporter mocha-junit-reporter --reporter-options mochaFile=${options.testResults}`;
|
||||
|
||||
run(
|
||||
"mocha " +
|
||||
testsSpec.join(
|
||||
" "
|
||||
) + testCmd,
|
||||
/*inheritStreams:*/ true
|
||||
);
|
||||
};
|
||||
|
|
Разница между файлами не показана из-за своего большого размера
Загрузить разницу
15
package.json
15
package.json
|
@ -4,7 +4,8 @@
|
|||
"description": "Tasks for caching Azure Pipelines artifacts",
|
||||
"main": "gulpfile.js",
|
||||
"scripts": {
|
||||
"build": "node make.js build"
|
||||
"build": "node make.js build",
|
||||
"test": "mocha --require ts-node/register Tasks/**/_suite.ts"
|
||||
},
|
||||
"repository": {
|
||||
"type": "git",
|
||||
|
@ -23,20 +24,24 @@
|
|||
},
|
||||
"homepage": "https://github.com/Microsoft/azure-pipelines-artifact-caching-tasks",
|
||||
"devDependencies": {
|
||||
"gulp": "3.9.0",
|
||||
"@types/mocha": "^5.2.6",
|
||||
"@types/node": "^11.13.0",
|
||||
"gulp": "^3.9.1",
|
||||
"gulp-util": "3.0.4",
|
||||
"markdown-toc": "^1.2.0",
|
||||
"minimatch": "3.0.2",
|
||||
"minimist": "1.1.1",
|
||||
"mocha": "2.3.3",
|
||||
"mocha-junit-reporter": "1.17.0",
|
||||
"mocha": "^6.0.2",
|
||||
"mocha-junit-reporter": "^1.18.0",
|
||||
"node-uuid": "1.4.6",
|
||||
"q": "1.4.1",
|
||||
"semver": "4.3.3",
|
||||
"shelljs": "^0.3.0",
|
||||
"sync-request": "3.0.1",
|
||||
"ts-node": "^8.0.3",
|
||||
"typed-rest-client": "1.0.9",
|
||||
"typescript": "2.3.4",
|
||||
"typescript": "^3.4.1",
|
||||
"typescript-tslint-plugin": "^0.3.1",
|
||||
"validator": "3.33.0"
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,6 +1,14 @@
|
|||
{
|
||||
"compilerOptions": {
|
||||
"target": "ES6",
|
||||
"module": "commonjs"
|
||||
}
|
||||
"compilerOptions": {
|
||||
"target": "ES6",
|
||||
"module": "commonjs",
|
||||
"declaration": true,
|
||||
"sourceMap": true,
|
||||
"plugins": [
|
||||
{
|
||||
"name": "typescript-tslint-plugin"
|
||||
}
|
||||
],
|
||||
"types": ["mocha", "node"]
|
||||
}
|
||||
}
|
|
@ -0,0 +1,68 @@
|
|||
{
|
||||
"defaultSeverity": "error",
|
||||
"extends": [
|
||||
"tslint:recommended"
|
||||
],
|
||||
"rules": {
|
||||
"no-string-literal" : [
|
||||
false
|
||||
],
|
||||
"indent": [
|
||||
true,
|
||||
"spaces"
|
||||
],
|
||||
"interface-name": [
|
||||
true,
|
||||
"never-prefix"
|
||||
],
|
||||
"variable-name": [
|
||||
true,
|
||||
"check-format",
|
||||
"allow-leading-underscore"
|
||||
],
|
||||
"curly": [
|
||||
true,
|
||||
"ignore-same-line"
|
||||
],
|
||||
"member-access": [
|
||||
true
|
||||
],
|
||||
"max-classes-per-file": false,
|
||||
"no-empty": false,
|
||||
"object-literal-sort-keys": false,
|
||||
"no-unused-expression": true,
|
||||
"arrow-parens": [
|
||||
true,
|
||||
"ban-single-arg-parens"
|
||||
],
|
||||
"quotemark": false,
|
||||
"eofline": false,
|
||||
"semicolon": [
|
||||
true,
|
||||
"always"
|
||||
],
|
||||
"no-console": false,
|
||||
"class-name": true,
|
||||
"prefer-const": true,
|
||||
"import-spacing": true,
|
||||
"ordered-imports": false,
|
||||
"trailing-comma": [
|
||||
true,
|
||||
{
|
||||
"multiline": {
|
||||
"objects": "always",
|
||||
"arrays": "always",
|
||||
"functions": "ignore",
|
||||
"typeLiterals": "ignore"
|
||||
}
|
||||
}
|
||||
],
|
||||
"max-line-length": false
|
||||
},
|
||||
"rulesDirectory": [],
|
||||
"linterOptions": {
|
||||
"exclude": [
|
||||
"e2e/**/*"
|
||||
]
|
||||
}
|
||||
}
|
Загрузка…
Ссылка в новой задаче