Convert scripts/Gulpfile to checked mjs/cjs so they can run without compilation (#50988)
This commit is contained in:
Родитель
dbeae5d943
Коммит
ad56b5ca56
|
@ -17,20 +17,8 @@ build.json
|
|||
*.config
|
||||
scripts/debug.bat
|
||||
scripts/run.bat
|
||||
scripts/word2md.js
|
||||
scripts/buildProtocol.js
|
||||
scripts/ior.js
|
||||
scripts/configurePrerelease.js
|
||||
scripts/open-user-pr.js
|
||||
scripts/open-cherry-pick-pr.js
|
||||
scripts/processDiagnosticMessages.d.ts
|
||||
scripts/processDiagnosticMessages.js
|
||||
scripts/produceLKG.js
|
||||
scripts/importDefinitelyTypedTests/importDefinitelyTypedTests.js
|
||||
scripts/generateLocalizedDiagnosticMessages.js
|
||||
scripts/configureLanguageServiceBuild.js
|
||||
scripts/*.js.map
|
||||
scripts/typings/
|
||||
scripts/**/*.js
|
||||
scripts/**/*.js.map
|
||||
coverage/
|
||||
internal/
|
||||
**/.DS_Store
|
||||
|
|
|
@ -4,7 +4,8 @@
|
|||
/lib/**
|
||||
/src/lib/*.generated.d.ts
|
||||
# Ignore all compiled script outputs
|
||||
/scripts/*.js
|
||||
/scripts/**/*.js
|
||||
/scripts/**/*.d.*
|
||||
# But, not the ones that are hand-written.
|
||||
# TODO: remove once scripts are pure JS
|
||||
!/scripts/browserIntegrationTest.js
|
||||
|
|
|
@ -2,7 +2,7 @@ const fs = require("fs");
|
|||
const path = require("path");
|
||||
|
||||
const rulesDir = path.join(__dirname, "scripts", "eslint", "rules");
|
||||
const ext = ".js";
|
||||
const ext = ".cjs";
|
||||
const ruleFiles = fs.readdirSync(rulesDir).filter((p) => p.endsWith(ext));
|
||||
|
||||
module.exports = {
|
||||
|
|
|
@ -12,17 +12,6 @@
|
|||
"plugins": [
|
||||
"@typescript-eslint", "jsdoc", "no-null", "import", "eslint-plugin-local"
|
||||
],
|
||||
"overrides": [
|
||||
// By default, the ESLint CLI only looks at .js files. But, it will also look at
|
||||
// any files which are referenced in an override config. Most users of typescript-eslint
|
||||
// get this behavior by default by extending a recommended typescript-eslint config, which
|
||||
// just so happens to override some core ESLint rules. We don't extend from any config, so
|
||||
// explicitly reference TS files here so the CLI picks them up.
|
||||
//
|
||||
// ESLint in VS Code will lint any opened file (so long as it's not eslintignore'd), so
|
||||
// that will work regardless of the below.
|
||||
{ "files": ["*.ts", "*.mts", "*.cts", "*.mjs", "*.cjs"] }
|
||||
],
|
||||
"rules": {
|
||||
"@typescript-eslint/adjacent-overload-signatures": "error",
|
||||
"@typescript-eslint/array-type": "error",
|
||||
|
@ -151,5 +140,31 @@
|
|||
"no-prototype-builtins": "error",
|
||||
"no-self-assign": "error",
|
||||
"no-dupe-else-if": "error"
|
||||
}
|
||||
},
|
||||
"overrides": [
|
||||
// By default, the ESLint CLI only looks at .js files. But, it will also look at
|
||||
// any files which are referenced in an override config. Most users of typescript-eslint
|
||||
// get this behavior by default by extending a recommended typescript-eslint config, which
|
||||
// just so happens to override some core ESLint rules. We don't extend from any config, so
|
||||
// explicitly reference TS files here so the CLI picks them up.
|
||||
//
|
||||
// ESLint in VS Code will lint any opened file (so long as it's not eslintignore'd), so
|
||||
// that will work regardless of the below.
|
||||
//
|
||||
// The same applies to mjs files; ESLint appears to not scan those either.
|
||||
{ "files": ["*.ts", "*.mts", "*.cts", "*.mjs", "*.cjs"] },
|
||||
{
|
||||
"files": ["*.mjs", "*.mts"],
|
||||
"rules": {
|
||||
// These globals don't exist outside of CJS files.
|
||||
"no-restricted-globals": ["error",
|
||||
{ "name": "__filename" },
|
||||
{ "name": "__dirname" },
|
||||
{ "name": "require" },
|
||||
{ "name": "module" },
|
||||
{ "name": "exports" }
|
||||
]
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
|
|
|
@ -67,3 +67,20 @@ jobs:
|
|||
|
||||
- name: Validate the browser can import TypeScript
|
||||
run: gulp test-browser-integration
|
||||
|
||||
misc:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/setup-node@v3
|
||||
with:
|
||||
node-version: "*"
|
||||
check-latest: true
|
||||
- run: npm ci
|
||||
|
||||
- name: Build scripts
|
||||
run: gulp scripts
|
||||
|
||||
- name: ESLint tests
|
||||
run: gulp run-eslint-rules-tests
|
||||
|
|
|
@ -40,22 +40,8 @@ tests/cases/**/*.js.map
|
|||
scripts/eslint/built/
|
||||
scripts/debug.bat
|
||||
scripts/run.bat
|
||||
scripts/word2md.js
|
||||
scripts/buildProtocol.js
|
||||
scripts/ior.js
|
||||
scripts/configurePrerelease.js
|
||||
scripts/configureLanguageServiceBuild.js
|
||||
scripts/open-user-pr.js
|
||||
scripts/open-cherry-pick-pr.js
|
||||
scripts/processDiagnosticMessages.d.ts
|
||||
scripts/processDiagnosticMessages.js
|
||||
scripts/produceLKG.js
|
||||
scripts/importDefinitelyTypedTests/importDefinitelyTypedTests.js
|
||||
scripts/generateLocalizedDiagnosticMessages.js
|
||||
scripts/request-pr-review.js
|
||||
scripts/errorCheck.js
|
||||
scripts/*.js.map
|
||||
scripts/typings/
|
||||
scripts/**/*.js
|
||||
scripts/**/*.js.map
|
||||
coverage/
|
||||
internal/
|
||||
**/.DS_Store
|
||||
|
|
|
@ -1,20 +1,22 @@
|
|||
// @ts-check
|
||||
const path = require("path");
|
||||
const fs = require("fs");
|
||||
const log = require("fancy-log");
|
||||
const newer = require("gulp-newer");
|
||||
const sourcemaps = require("gulp-sourcemaps");
|
||||
const del = require("del");
|
||||
const rename = require("gulp-rename");
|
||||
const concat = require("gulp-concat");
|
||||
const merge2 = require("merge2");
|
||||
const { src, dest, task, parallel, series, watch } = require("gulp");
|
||||
const { append, transform } = require("gulp-insert");
|
||||
const { prependFile } = require("./scripts/build/prepend");
|
||||
const { exec, readJson, needsUpdate, getDiffTool, getDirSize, rm } = require("./scripts/build/utils");
|
||||
const { runConsoleTests, refBaseline, localBaseline, refRwcBaseline, localRwcBaseline } = require("./scripts/build/tests");
|
||||
const { buildProject, cleanProject, watchProject } = require("./scripts/build/projects");
|
||||
const cmdLineOptions = require("./scripts/build/options");
|
||||
import path from "path";
|
||||
import fs from "fs";
|
||||
import log from "fancy-log";
|
||||
import newer from "gulp-newer";
|
||||
import sourcemaps from "gulp-sourcemaps";
|
||||
import del from "del";
|
||||
import rename from "gulp-rename";
|
||||
import concat from "gulp-concat";
|
||||
import merge2 from "merge2";
|
||||
import gulp from "gulp";
|
||||
import { append, transform } from "gulp-insert";
|
||||
import { prependFile } from "./scripts/build/prepend.mjs";
|
||||
import { exec, readJson, needsUpdate, getDiffTool, getDirSize, rm } from "./scripts/build/utils.mjs";
|
||||
import { runConsoleTests, refBaseline, localBaseline, refRwcBaseline, localRwcBaseline } from "./scripts/build/tests.mjs";
|
||||
import { buildProject, cleanProject, watchProject } from "./scripts/build/projects.mjs";
|
||||
import cmdLineOptions from "./scripts/build/options.mjs";
|
||||
|
||||
const { src, dest, task, parallel, series, watch } = gulp;
|
||||
|
||||
const copyright = "CopyrightNotice.txt";
|
||||
const cleanTasks = [];
|
||||
|
@ -23,9 +25,6 @@ const buildScripts = () => buildProject("scripts");
|
|||
task("scripts", buildScripts);
|
||||
task("scripts").description = "Builds files in the 'scripts' folder.";
|
||||
|
||||
const cleanScripts = () => cleanProject("scripts");
|
||||
cleanTasks.push(cleanScripts);
|
||||
|
||||
/** @type {{ libs: string[]; paths: Record<string, string | undefined>; }} */
|
||||
const libraries = readJson("./src/lib/libs.json");
|
||||
const libs = libraries.libs.map(lib => {
|
||||
|
@ -56,10 +55,10 @@ const diagnosticMessagesJson = "src/compiler/diagnosticMessages.json";
|
|||
const diagnosticMessagesGeneratedJson = "src/compiler/diagnosticMessages.generated.json";
|
||||
const generateDiagnostics = async () => {
|
||||
if (needsUpdate(diagnosticMessagesJson, [diagnosticMessagesGeneratedJson, diagnosticInformationMapTs])) {
|
||||
await exec(process.execPath, ["scripts/processDiagnosticMessages.js", diagnosticMessagesJson]);
|
||||
await exec(process.execPath, ["scripts/processDiagnosticMessages.mjs", diagnosticMessagesJson]);
|
||||
}
|
||||
};
|
||||
task("generate-diagnostics", series(buildScripts, generateDiagnostics));
|
||||
task("generate-diagnostics", generateDiagnostics);
|
||||
task("generate-diagnostics").description = "Generates a diagnostic file in TypeScript based on an input JSON file";
|
||||
|
||||
const cleanDiagnostics = () => del([diagnosticInformationMapTs, diagnosticMessagesGeneratedJson]);
|
||||
|
@ -88,7 +87,7 @@ const localizationTargets = ["cs", "de", "es", "fr", "it", "ja", "ko", "pl", "pt
|
|||
|
||||
const localize = async () => {
|
||||
if (needsUpdate(diagnosticMessagesGeneratedJson, generatedLCGFile)) {
|
||||
return exec(process.execPath, ["scripts/generateLocalizedDiagnosticMessages.js", "src/loc/lcl", "built/local", diagnosticMessagesGeneratedJson], { ignoreExitCode: true });
|
||||
return exec(process.execPath, ["scripts/generateLocalizedDiagnosticMessages.mjs", "src/loc/lcl", "built/local", diagnosticMessagesGeneratedJson], { ignoreExitCode: true });
|
||||
}
|
||||
};
|
||||
|
||||
|
@ -97,7 +96,7 @@ const cleanDebugTools = () => cleanProject("src/debug");
|
|||
cleanTasks.push(cleanDebugTools);
|
||||
|
||||
// Pre-build steps when targeting the LKG compiler
|
||||
const lkgPreBuild = parallel(generateLibs, series(buildScripts, generateDiagnostics, buildDebugTools));
|
||||
const lkgPreBuild = parallel(generateLibs, series(generateDiagnostics, buildDebugTools));
|
||||
|
||||
const buildTsc = () => buildProject("src/tsc");
|
||||
task("tsc", series(lkgPreBuild, buildTsc));
|
||||
|
@ -113,7 +112,7 @@ task("watch-tsc", series(lkgPreBuild, parallel(watchLib, watchDiagnostics, watch
|
|||
task("watch-tsc").description = "Watch for changes and rebuild the command-line compiler only.";
|
||||
|
||||
// Pre-build steps when targeting the built/local compiler.
|
||||
const localPreBuild = parallel(generateLibs, series(buildScripts, generateDiagnostics, buildDebugTools, buildTsc));
|
||||
const localPreBuild = parallel(generateLibs, series(generateDiagnostics, buildDebugTools, buildTsc));
|
||||
|
||||
// Pre-build steps to use based on supplied options.
|
||||
const preBuild = cmdLineOptions.lkg ? lkgPreBuild : localPreBuild;
|
||||
|
@ -335,17 +334,8 @@ task("clean-tests").description = "Cleans the outputs for the test infrastructur
|
|||
|
||||
const watchTests = () => watchProject("src/testRunner", cmdLineOptions);
|
||||
|
||||
const buildEslintRules = () => buildProject("scripts/eslint");
|
||||
task("build-eslint-rules", buildEslintRules);
|
||||
task("build-eslint-rules").description = "Compiles eslint rules to js";
|
||||
|
||||
const cleanEslintRules = () => cleanProject("scripts/eslint");
|
||||
cleanTasks.push(cleanEslintRules);
|
||||
task("clean-eslint-rules", cleanEslintRules);
|
||||
task("clean-eslint-rules").description = "Cleans the outputs for the eslint rules";
|
||||
|
||||
const runEslintRulesTests = () => runConsoleTests("scripts/eslint/built/tests", "mocha-fivemat-progress-reporter", /*runInParallel*/ false, /*watchMode*/ false);
|
||||
task("run-eslint-rules-tests", series(buildEslintRules, runEslintRulesTests));
|
||||
const runEslintRulesTests = () => runConsoleTests("scripts/eslint/tests", "mocha-fivemat-progress-reporter", /*runInParallel*/ false, /*watchMode*/ false);
|
||||
task("run-eslint-rules-tests", runEslintRulesTests);
|
||||
task("run-eslint-rules-tests").description = "Runs the eslint rule tests";
|
||||
|
||||
/** @type { (folder: string) => { (): Promise<any>; displayName?: string } } */
|
||||
|
@ -459,8 +449,8 @@ task("runtests-parallel").flags = {
|
|||
};
|
||||
|
||||
|
||||
task("test-browser-integration", () => exec(process.execPath, ["scripts/browserIntegrationTest.js"]));
|
||||
task("test-browser-integration").description = "Runs scripts/browserIntegrationTest.ts which tests that typescript.js loads in a browser";
|
||||
task("test-browser-integration", () => exec(process.execPath, ["scripts/browserIntegrationTest.mjs"]));
|
||||
task("test-browser-integration").description = "Runs scripts/browserIntegrationTest.mjs which tests that typescript.js loads in a browser";
|
||||
|
||||
|
||||
task("diff", () => exec(getDiffTool(), [refBaseline, localBaseline], { ignoreExitCode: true, waitForExit: false }));
|
||||
|
@ -493,13 +483,9 @@ const updateSublime = () => src(["built/local/tsserver.js", "built/local/tsserve
|
|||
task("update-sublime", updateSublime);
|
||||
task("update-sublime").description = "Updates the sublime plugin's tsserver";
|
||||
|
||||
const buildImportDefinitelyTypedTests = () => buildProject("scripts/importDefinitelyTypedTests");
|
||||
const cleanImportDefinitelyTypedTests = () => cleanProject("scripts/importDefinitelyTypedTests");
|
||||
cleanTasks.push(cleanImportDefinitelyTypedTests);
|
||||
|
||||
// TODO(rbuckton): Should the path to DefinitelyTyped be configurable via an environment variable?
|
||||
const importDefinitelyTypedTests = () => exec(process.execPath, ["scripts/importDefinitelyTypedTests/importDefinitelyTypedTests.js", "./", "../DefinitelyTyped"]);
|
||||
task("importDefinitelyTypedTests", series(buildImportDefinitelyTypedTests, importDefinitelyTypedTests));
|
||||
const importDefinitelyTypedTests = () => exec(process.execPath, ["scripts/importDefinitelyTypedTests.mjs", "./", "../DefinitelyTyped"]);
|
||||
task("importDefinitelyTypedTests", importDefinitelyTypedTests);
|
||||
task("importDefinitelyTypedTests").description = "Runs the importDefinitelyTypedTests script to copy DT's tests to the TS-internal RWC tests";
|
||||
|
||||
const buildReleaseTsc = () => buildProject("src/tsc/tsconfig.release.json");
|
||||
|
@ -529,7 +515,7 @@ const produceLKG = async () => {
|
|||
throw new Error("Cannot replace the LKG unless all built targets are present in directory 'built/local/'. The following files are missing:\n" + missingFiles.join("\n"));
|
||||
}
|
||||
const sizeBefore = getDirSize("lib");
|
||||
await exec(process.execPath, ["scripts/produceLKG.js"]);
|
||||
await exec(process.execPath, ["scripts/produceLKG.mjs"]);
|
||||
const sizeAfter = getDirSize("lib");
|
||||
if (sizeAfter > (sizeBefore * 1.10)) {
|
||||
throw new Error("The lib folder increased by 10% or more. This likely indicates a bug.");
|
||||
|
@ -543,8 +529,8 @@ task("LKG").flags = {
|
|||
};
|
||||
task("lkg", series("LKG"));
|
||||
|
||||
const generateSpec = () => exec("cscript", ["//nologo", "scripts/word2md.js", path.resolve("doc/TypeScript Language Specification - ARCHIVED.docx"), path.resolve("doc/spec-ARCHIVED.md")]);
|
||||
task("generate-spec", series(buildScripts, generateSpec));
|
||||
const generateSpec = () => exec("cscript", ["//nologo", "scripts/word2md.mjs", path.resolve("doc/TypeScript Language Specification - ARCHIVED.docx"), path.resolve("doc/spec-ARCHIVED.md")]);
|
||||
task("generate-spec", generateSpec);
|
||||
task("generate-spec").description = "Generates a Markdown version of the Language Specification";
|
||||
|
||||
task("clean", series(parallel(cleanTasks), cleanBuilt));
|
||||
|
@ -554,13 +540,13 @@ const configureNightly = () => exec(process.execPath, ["scripts/configurePrerele
|
|||
task("configure-nightly", series(buildScripts, configureNightly));
|
||||
task("configure-nightly").description = "Runs scripts/configurePrerelease.ts to prepare a build for nightly publishing";
|
||||
|
||||
const configureInsiders = () => exec(process.execPath, ["scripts/configurePrerelease.js", "insiders", "package.json", "src/compiler/corePublic.ts"]);
|
||||
task("configure-insiders", series(buildScripts, configureInsiders));
|
||||
task("configure-insiders").description = "Runs scripts/configurePrerelease.ts to prepare a build for insiders publishing";
|
||||
const configureInsiders = () => exec(process.execPath, ["scripts/configurePrerelease.mjs", "insiders", "package.json", "src/compiler/corePublic.ts"]);
|
||||
task("configure-insiders", configureInsiders);
|
||||
task("configure-insiders").description = "Runs scripts/configurePrerelease.mjs to prepare a build for insiders publishing";
|
||||
|
||||
const configureExperimental = () => exec(process.execPath, ["scripts/configurePrerelease.js", "experimental", "package.json", "src/compiler/corePublic.ts"]);
|
||||
task("configure-experimental", series(buildScripts, configureExperimental));
|
||||
task("configure-experimental").description = "Runs scripts/configurePrerelease.ts to prepare a build for experimental publishing";
|
||||
const configureExperimental = () => exec(process.execPath, ["scripts/configurePrerelease.mjs", "experimental", "package.json", "src/compiler/corePublic.ts"]);
|
||||
task("configure-experimental", configureExperimental);
|
||||
task("configure-experimental").description = "Runs scripts/configurePrerelease.mjs to prepare a build for experimental publishing";
|
||||
|
||||
const publishNightly = () => exec("npm", ["publish", "--tag", "next"]);
|
||||
task("publish-nightly", series(task("clean"), task("LKG"), task("clean"), task("runtests-parallel"), publishNightly));
|
|
@ -15,6 +15,7 @@
|
|||
"devDependencies": {
|
||||
"@octokit/rest": "latest",
|
||||
"@types/chai": "latest",
|
||||
"@types/fancy-log": "^2.0.0",
|
||||
"@types/fs-extra": "^9.0.13",
|
||||
"@types/glob": "latest",
|
||||
"@types/gulp": "^4.0.9",
|
||||
|
@ -29,8 +30,8 @@
|
|||
"@types/mocha": "latest",
|
||||
"@types/ms": "latest",
|
||||
"@types/node": "latest",
|
||||
"@types/node-fetch": "^2.6.2",
|
||||
"@types/source-map-support": "latest",
|
||||
"@types/which": "^2.0.1",
|
||||
"@types/xml2js": "^0.4.11",
|
||||
"@typescript-eslint/eslint-plugin": "^5.33.1",
|
||||
"@typescript-eslint/parser": "^5.33.1",
|
||||
|
@ -61,7 +62,7 @@
|
|||
"mocha": "latest",
|
||||
"mocha-fivemat-progress-reporter": "latest",
|
||||
"ms": "^2.1.3",
|
||||
"node-fetch": "^2.6.7",
|
||||
"node-fetch": "^3.2.10",
|
||||
"source-map-support": "latest",
|
||||
"typescript": "^4.8.4",
|
||||
"vinyl": "latest",
|
||||
|
@ -385,6 +386,26 @@
|
|||
"node": ">= 14"
|
||||
}
|
||||
},
|
||||
"node_modules/@octokit/request/node_modules/node-fetch": {
|
||||
"version": "2.6.7",
|
||||
"resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-2.6.7.tgz",
|
||||
"integrity": "sha512-ZjMPFEfVx5j+y2yF35Kzx5sF7kDzxuDj6ziH4FFbOp87zKDZNx8yExJIb05OGF4Nlt9IHFIMBkRl41VdvcNdbQ==",
|
||||
"dev": true,
|
||||
"dependencies": {
|
||||
"whatwg-url": "^5.0.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": "4.x || >=6.0.0"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"encoding": "^0.1.0"
|
||||
},
|
||||
"peerDependenciesMeta": {
|
||||
"encoding": {
|
||||
"optional": true
|
||||
}
|
||||
}
|
||||
},
|
||||
"node_modules/@octokit/rest": {
|
||||
"version": "19.0.4",
|
||||
"resolved": "https://registry.npmjs.org/@octokit/rest/-/rest-19.0.4.tgz",
|
||||
|
@ -421,6 +442,12 @@
|
|||
"integrity": "sha512-Q5Vn3yjTDyCMV50TB6VRIbQNxSE4OmZR86VSbGaNpfUolm0iePBB4KdEEHmxoY5sT2+2DIvXW0rvMDP2nHZ4Mg==",
|
||||
"dev": true
|
||||
},
|
||||
"node_modules/@types/fancy-log": {
|
||||
"version": "2.0.0",
|
||||
"resolved": "https://registry.npmjs.org/@types/fancy-log/-/fancy-log-2.0.0.tgz",
|
||||
"integrity": "sha512-g39Vp8ZJ3D0gXhhkhDidVvdy4QajkF7/PV6HGn23FMaMqE/tLC1JNHUeQ7SshKLsBjucakZsXBLkWULbGLdL5g==",
|
||||
"dev": true
|
||||
},
|
||||
"node_modules/@types/fs-extra": {
|
||||
"version": "9.0.13",
|
||||
"resolved": "https://registry.npmjs.org/@types/fs-extra/-/fs-extra-9.0.13.tgz",
|
||||
|
@ -565,16 +592,6 @@
|
|||
"integrity": "sha512-0os9vz6BpGwxGe9LOhgP/ncvYN5Tx1fNcd2TM3rD/aCGBkysb+ZWpXEocG24h6ZzOi13+VB8HndAQFezsSOw1w==",
|
||||
"dev": true
|
||||
},
|
||||
"node_modules/@types/node-fetch": {
|
||||
"version": "2.6.2",
|
||||
"resolved": "https://registry.npmjs.org/@types/node-fetch/-/node-fetch-2.6.2.tgz",
|
||||
"integrity": "sha512-DHqhlq5jeESLy19TYhLakJ07kNumXWjcDdxXsLUMJZ6ue8VZJj4kLPQVE/2mdHh3xZziNF1xppu5lwmS53HR+A==",
|
||||
"dev": true,
|
||||
"dependencies": {
|
||||
"@types/node": "*",
|
||||
"form-data": "^3.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@types/source-map-support": {
|
||||
"version": "0.5.6",
|
||||
"resolved": "https://registry.npmjs.org/@types/source-map-support/-/source-map-support-0.5.6.tgz",
|
||||
|
@ -622,6 +639,12 @@
|
|||
"@types/vinyl": "*"
|
||||
}
|
||||
},
|
||||
"node_modules/@types/which": {
|
||||
"version": "2.0.1",
|
||||
"resolved": "https://registry.npmjs.org/@types/which/-/which-2.0.1.tgz",
|
||||
"integrity": "sha512-Jjakcv8Roqtio6w1gr0D7y6twbhx6gGgFGF5BLwajPpnOIOxFkakFhCq+LmyyeAz7BX6ULrjBOxdKaCDy+4+dQ==",
|
||||
"dev": true
|
||||
},
|
||||
"node_modules/@types/xml2js": {
|
||||
"version": "0.4.11",
|
||||
"resolved": "https://registry.npmjs.org/@types/xml2js/-/xml2js-0.4.11.tgz",
|
||||
|
@ -1234,12 +1257,6 @@
|
|||
"node": ">= 0.10"
|
||||
}
|
||||
},
|
||||
"node_modules/asynckit": {
|
||||
"version": "0.4.0",
|
||||
"resolved": "https://registry.npmjs.org/asynckit/-/asynckit-0.4.0.tgz",
|
||||
"integrity": "sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q==",
|
||||
"dev": true
|
||||
},
|
||||
"node_modules/at-least-node": {
|
||||
"version": "1.0.0",
|
||||
"resolved": "https://registry.npmjs.org/at-least-node/-/at-least-node-1.0.0.tgz",
|
||||
|
@ -1753,18 +1770,6 @@
|
|||
"color-support": "bin.js"
|
||||
}
|
||||
},
|
||||
"node_modules/combined-stream": {
|
||||
"version": "1.0.8",
|
||||
"resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.8.tgz",
|
||||
"integrity": "sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg==",
|
||||
"dev": true,
|
||||
"dependencies": {
|
||||
"delayed-stream": "~1.0.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">= 0.8"
|
||||
}
|
||||
},
|
||||
"node_modules/comment-parser": {
|
||||
"version": "1.3.1",
|
||||
"resolved": "https://registry.npmjs.org/comment-parser/-/comment-parser-1.3.1.tgz",
|
||||
|
@ -1879,6 +1884,15 @@
|
|||
"type": "^1.0.1"
|
||||
}
|
||||
},
|
||||
"node_modules/data-uri-to-buffer": {
|
||||
"version": "4.0.0",
|
||||
"resolved": "https://registry.npmjs.org/data-uri-to-buffer/-/data-uri-to-buffer-4.0.0.tgz",
|
||||
"integrity": "sha512-Vr3mLBA8qWmcuschSLAOogKgQ/Jwxulv3RNE4FXnYWRGujzrRWQI4m12fQqRkwX06C0KanhLr4hK+GydchZsaA==",
|
||||
"dev": true,
|
||||
"engines": {
|
||||
"node": ">= 12"
|
||||
}
|
||||
},
|
||||
"node_modules/debug": {
|
||||
"version": "4.3.4",
|
||||
"resolved": "https://registry.npmjs.org/debug/-/debug-4.3.4.tgz",
|
||||
|
@ -2030,15 +2044,6 @@
|
|||
"url": "https://github.com/sponsors/sindresorhus"
|
||||
}
|
||||
},
|
||||
"node_modules/delayed-stream": {
|
||||
"version": "1.0.0",
|
||||
"resolved": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-1.0.0.tgz",
|
||||
"integrity": "sha512-ZySD7Nf91aLB0RxL4KGrKHBXl7Eds1DAmEdcoVawXnLD7SDhpNgtuII2aAkg7a7QS41jxPSZ17p4VdGnMHk3MQ==",
|
||||
"dev": true,
|
||||
"engines": {
|
||||
"node": ">=0.4.0"
|
||||
}
|
||||
},
|
||||
"node_modules/deprecation": {
|
||||
"version": "2.3.1",
|
||||
"resolved": "https://registry.npmjs.org/deprecation/-/deprecation-2.3.1.tgz",
|
||||
|
@ -2964,6 +2969,29 @@
|
|||
"reusify": "^1.0.4"
|
||||
}
|
||||
},
|
||||
"node_modules/fetch-blob": {
|
||||
"version": "3.2.0",
|
||||
"resolved": "https://registry.npmjs.org/fetch-blob/-/fetch-blob-3.2.0.tgz",
|
||||
"integrity": "sha512-7yAQpD2UMJzLi1Dqv7qFYnPbaPx7ZfFK6PiIxQ4PfkGPyNyl2Ugx+a/umUonmKqjhM4DnfbMvdX6otXq83soQQ==",
|
||||
"dev": true,
|
||||
"funding": [
|
||||
{
|
||||
"type": "github",
|
||||
"url": "https://github.com/sponsors/jimmywarting"
|
||||
},
|
||||
{
|
||||
"type": "paypal",
|
||||
"url": "https://paypal.me/jimmywarting"
|
||||
}
|
||||
],
|
||||
"dependencies": {
|
||||
"node-domexception": "^1.0.0",
|
||||
"web-streams-polyfill": "^3.0.3"
|
||||
},
|
||||
"engines": {
|
||||
"node": "^12.20 || >= 14.13"
|
||||
}
|
||||
},
|
||||
"node_modules/file-entry-cache": {
|
||||
"version": "6.0.1",
|
||||
"resolved": "https://registry.npmjs.org/file-entry-cache/-/file-entry-cache-6.0.1.tgz",
|
||||
|
@ -3292,18 +3320,16 @@
|
|||
"node": ">=0.10.0"
|
||||
}
|
||||
},
|
||||
"node_modules/form-data": {
|
||||
"version": "3.0.1",
|
||||
"resolved": "https://registry.npmjs.org/form-data/-/form-data-3.0.1.tgz",
|
||||
"integrity": "sha512-RHkBKtLWUVwd7SqRIvCZMEvAMoGUp0XU+seQiZejj0COz3RI3hWP4sCv3gZWWLjJTd7rGwcsF5eKZGii0r/hbg==",
|
||||
"node_modules/formdata-polyfill": {
|
||||
"version": "4.0.10",
|
||||
"resolved": "https://registry.npmjs.org/formdata-polyfill/-/formdata-polyfill-4.0.10.tgz",
|
||||
"integrity": "sha512-buewHzMvYL29jdeQTVILecSaZKnt/RJWjoZCF5OW60Z67/GmSLBkOFM7qh1PI3zFNtJbaZL5eQu1vLfazOwj4g==",
|
||||
"dev": true,
|
||||
"dependencies": {
|
||||
"asynckit": "^0.4.0",
|
||||
"combined-stream": "^1.0.8",
|
||||
"mime-types": "^2.1.12"
|
||||
"fetch-blob": "^3.1.2"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">= 6"
|
||||
"node": ">=12.20.0"
|
||||
}
|
||||
},
|
||||
"node_modules/fragment-cache": {
|
||||
|
@ -5499,27 +5525,6 @@
|
|||
"node": ">=8.6"
|
||||
}
|
||||
},
|
||||
"node_modules/mime-db": {
|
||||
"version": "1.52.0",
|
||||
"resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.52.0.tgz",
|
||||
"integrity": "sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg==",
|
||||
"dev": true,
|
||||
"engines": {
|
||||
"node": ">= 0.6"
|
||||
}
|
||||
},
|
||||
"node_modules/mime-types": {
|
||||
"version": "2.1.35",
|
||||
"resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.35.tgz",
|
||||
"integrity": "sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==",
|
||||
"dev": true,
|
||||
"dependencies": {
|
||||
"mime-db": "1.52.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">= 0.6"
|
||||
}
|
||||
},
|
||||
"node_modules/minimatch": {
|
||||
"version": "3.1.2",
|
||||
"resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz",
|
||||
|
@ -5882,24 +5887,41 @@
|
|||
"integrity": "sha512-CXdUiJembsNjuToQvxayPZF9Vqht7hewsvy2sOWafLvi2awflj9mOC6bHIg50orX8IJvWKY9wYQ/zB2kogPslQ==",
|
||||
"dev": true
|
||||
},
|
||||
"node_modules/node-domexception": {
|
||||
"version": "1.0.0",
|
||||
"resolved": "https://registry.npmjs.org/node-domexception/-/node-domexception-1.0.0.tgz",
|
||||
"integrity": "sha512-/jKZoMpw0F8GRwl4/eLROPA3cfcXtLApP0QzLmUT/HuPCZWyB7IY9ZrMeKw2O/nFIqPQB3PVM9aYm0F312AXDQ==",
|
||||
"dev": true,
|
||||
"funding": [
|
||||
{
|
||||
"type": "github",
|
||||
"url": "https://github.com/sponsors/jimmywarting"
|
||||
},
|
||||
{
|
||||
"type": "github",
|
||||
"url": "https://paypal.me/jimmywarting"
|
||||
}
|
||||
],
|
||||
"engines": {
|
||||
"node": ">=10.5.0"
|
||||
}
|
||||
},
|
||||
"node_modules/node-fetch": {
|
||||
"version": "2.6.7",
|
||||
"resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-2.6.7.tgz",
|
||||
"integrity": "sha512-ZjMPFEfVx5j+y2yF35Kzx5sF7kDzxuDj6ziH4FFbOp87zKDZNx8yExJIb05OGF4Nlt9IHFIMBkRl41VdvcNdbQ==",
|
||||
"version": "3.2.10",
|
||||
"resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-3.2.10.tgz",
|
||||
"integrity": "sha512-MhuzNwdURnZ1Cp4XTazr69K0BTizsBroX7Zx3UgDSVcZYKF/6p0CBe4EUb/hLqmzVhl0UpYfgRljQ4yxE+iCxA==",
|
||||
"dev": true,
|
||||
"dependencies": {
|
||||
"whatwg-url": "^5.0.0"
|
||||
"data-uri-to-buffer": "^4.0.0",
|
||||
"fetch-blob": "^3.1.4",
|
||||
"formdata-polyfill": "^4.0.10"
|
||||
},
|
||||
"engines": {
|
||||
"node": "4.x || >=6.0.0"
|
||||
"node": "^12.20.0 || ^14.13.1 || >=16.0.0"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"encoding": "^0.1.0"
|
||||
},
|
||||
"peerDependenciesMeta": {
|
||||
"encoding": {
|
||||
"optional": true
|
||||
}
|
||||
"funding": {
|
||||
"type": "opencollective",
|
||||
"url": "https://opencollective.com/node-fetch"
|
||||
}
|
||||
},
|
||||
"node_modules/normalize-package-data": {
|
||||
|
@ -8399,6 +8421,15 @@
|
|||
"node": ">= 0.10"
|
||||
}
|
||||
},
|
||||
"node_modules/web-streams-polyfill": {
|
||||
"version": "3.2.1",
|
||||
"resolved": "https://registry.npmjs.org/web-streams-polyfill/-/web-streams-polyfill-3.2.1.tgz",
|
||||
"integrity": "sha512-e0MO3wdXWKrLbL0DgGnUV7WHVuw9OUvL4hjgnPkIeEvESk74gAITi5G606JtZPp39cd8HA9VQzCIvA49LpPN5Q==",
|
||||
"dev": true,
|
||||
"engines": {
|
||||
"node": ">= 8"
|
||||
}
|
||||
},
|
||||
"node_modules/webidl-conversions": {
|
||||
"version": "3.0.1",
|
||||
"resolved": "https://registry.npmjs.org/webidl-conversions/-/webidl-conversions-3.0.1.tgz",
|
||||
|
@ -8868,6 +8899,17 @@
|
|||
"is-plain-object": "^5.0.0",
|
||||
"node-fetch": "^2.6.7",
|
||||
"universal-user-agent": "^6.0.0"
|
||||
},
|
||||
"dependencies": {
|
||||
"node-fetch": {
|
||||
"version": "2.6.7",
|
||||
"resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-2.6.7.tgz",
|
||||
"integrity": "sha512-ZjMPFEfVx5j+y2yF35Kzx5sF7kDzxuDj6ziH4FFbOp87zKDZNx8yExJIb05OGF4Nlt9IHFIMBkRl41VdvcNdbQ==",
|
||||
"dev": true,
|
||||
"requires": {
|
||||
"whatwg-url": "^5.0.0"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"@octokit/request-error": {
|
||||
|
@ -8914,6 +8956,12 @@
|
|||
"integrity": "sha512-Q5Vn3yjTDyCMV50TB6VRIbQNxSE4OmZR86VSbGaNpfUolm0iePBB4KdEEHmxoY5sT2+2DIvXW0rvMDP2nHZ4Mg==",
|
||||
"dev": true
|
||||
},
|
||||
"@types/fancy-log": {
|
||||
"version": "2.0.0",
|
||||
"resolved": "https://registry.npmjs.org/@types/fancy-log/-/fancy-log-2.0.0.tgz",
|
||||
"integrity": "sha512-g39Vp8ZJ3D0gXhhkhDidVvdy4QajkF7/PV6HGn23FMaMqE/tLC1JNHUeQ7SshKLsBjucakZsXBLkWULbGLdL5g==",
|
||||
"dev": true
|
||||
},
|
||||
"@types/fs-extra": {
|
||||
"version": "9.0.13",
|
||||
"resolved": "https://registry.npmjs.org/@types/fs-extra/-/fs-extra-9.0.13.tgz",
|
||||
|
@ -9058,16 +9106,6 @@
|
|||
"integrity": "sha512-0os9vz6BpGwxGe9LOhgP/ncvYN5Tx1fNcd2TM3rD/aCGBkysb+ZWpXEocG24h6ZzOi13+VB8HndAQFezsSOw1w==",
|
||||
"dev": true
|
||||
},
|
||||
"@types/node-fetch": {
|
||||
"version": "2.6.2",
|
||||
"resolved": "https://registry.npmjs.org/@types/node-fetch/-/node-fetch-2.6.2.tgz",
|
||||
"integrity": "sha512-DHqhlq5jeESLy19TYhLakJ07kNumXWjcDdxXsLUMJZ6ue8VZJj4kLPQVE/2mdHh3xZziNF1xppu5lwmS53HR+A==",
|
||||
"dev": true,
|
||||
"requires": {
|
||||
"@types/node": "*",
|
||||
"form-data": "^3.0.0"
|
||||
}
|
||||
},
|
||||
"@types/source-map-support": {
|
||||
"version": "0.5.6",
|
||||
"resolved": "https://registry.npmjs.org/@types/source-map-support/-/source-map-support-0.5.6.tgz",
|
||||
|
@ -9115,6 +9153,12 @@
|
|||
"@types/vinyl": "*"
|
||||
}
|
||||
},
|
||||
"@types/which": {
|
||||
"version": "2.0.1",
|
||||
"resolved": "https://registry.npmjs.org/@types/which/-/which-2.0.1.tgz",
|
||||
"integrity": "sha512-Jjakcv8Roqtio6w1gr0D7y6twbhx6gGgFGF5BLwajPpnOIOxFkakFhCq+LmyyeAz7BX6ULrjBOxdKaCDy+4+dQ==",
|
||||
"dev": true
|
||||
},
|
||||
"@types/xml2js": {
|
||||
"version": "0.4.11",
|
||||
"resolved": "https://registry.npmjs.org/@types/xml2js/-/xml2js-0.4.11.tgz",
|
||||
|
@ -9530,12 +9574,6 @@
|
|||
"async-done": "^1.2.2"
|
||||
}
|
||||
},
|
||||
"asynckit": {
|
||||
"version": "0.4.0",
|
||||
"resolved": "https://registry.npmjs.org/asynckit/-/asynckit-0.4.0.tgz",
|
||||
"integrity": "sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q==",
|
||||
"dev": true
|
||||
},
|
||||
"at-least-node": {
|
||||
"version": "1.0.0",
|
||||
"resolved": "https://registry.npmjs.org/at-least-node/-/at-least-node-1.0.0.tgz",
|
||||
|
@ -9943,15 +9981,6 @@
|
|||
"integrity": "sha512-qiBjkpbMLO/HL68y+lh4q0/O1MZFj2RX6X/KmMa3+gJD3z+WwI1ZzDHysvqHGS3mP6mznPckpXmw1nI9cJjyRg==",
|
||||
"dev": true
|
||||
},
|
||||
"combined-stream": {
|
||||
"version": "1.0.8",
|
||||
"resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.8.tgz",
|
||||
"integrity": "sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg==",
|
||||
"dev": true,
|
||||
"requires": {
|
||||
"delayed-stream": "~1.0.0"
|
||||
}
|
||||
},
|
||||
"comment-parser": {
|
||||
"version": "1.3.1",
|
||||
"resolved": "https://registry.npmjs.org/comment-parser/-/comment-parser-1.3.1.tgz",
|
||||
|
@ -10054,6 +10083,12 @@
|
|||
"type": "^1.0.1"
|
||||
}
|
||||
},
|
||||
"data-uri-to-buffer": {
|
||||
"version": "4.0.0",
|
||||
"resolved": "https://registry.npmjs.org/data-uri-to-buffer/-/data-uri-to-buffer-4.0.0.tgz",
|
||||
"integrity": "sha512-Vr3mLBA8qWmcuschSLAOogKgQ/Jwxulv3RNE4FXnYWRGujzrRWQI4m12fQqRkwX06C0KanhLr4hK+GydchZsaA==",
|
||||
"dev": true
|
||||
},
|
||||
"debug": {
|
||||
"version": "4.3.4",
|
||||
"resolved": "https://registry.npmjs.org/debug/-/debug-4.3.4.tgz",
|
||||
|
@ -10171,12 +10206,6 @@
|
|||
"slash": "^3.0.0"
|
||||
}
|
||||
},
|
||||
"delayed-stream": {
|
||||
"version": "1.0.0",
|
||||
"resolved": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-1.0.0.tgz",
|
||||
"integrity": "sha512-ZySD7Nf91aLB0RxL4KGrKHBXl7Eds1DAmEdcoVawXnLD7SDhpNgtuII2aAkg7a7QS41jxPSZ17p4VdGnMHk3MQ==",
|
||||
"dev": true
|
||||
},
|
||||
"deprecation": {
|
||||
"version": "2.3.1",
|
||||
"resolved": "https://registry.npmjs.org/deprecation/-/deprecation-2.3.1.tgz",
|
||||
|
@ -10945,6 +10974,16 @@
|
|||
"reusify": "^1.0.4"
|
||||
}
|
||||
},
|
||||
"fetch-blob": {
|
||||
"version": "3.2.0",
|
||||
"resolved": "https://registry.npmjs.org/fetch-blob/-/fetch-blob-3.2.0.tgz",
|
||||
"integrity": "sha512-7yAQpD2UMJzLi1Dqv7qFYnPbaPx7ZfFK6PiIxQ4PfkGPyNyl2Ugx+a/umUonmKqjhM4DnfbMvdX6otXq83soQQ==",
|
||||
"dev": true,
|
||||
"requires": {
|
||||
"node-domexception": "^1.0.0",
|
||||
"web-streams-polyfill": "^3.0.3"
|
||||
}
|
||||
},
|
||||
"file-entry-cache": {
|
||||
"version": "6.0.1",
|
||||
"resolved": "https://registry.npmjs.org/file-entry-cache/-/file-entry-cache-6.0.1.tgz",
|
||||
|
@ -11208,15 +11247,13 @@
|
|||
"for-in": "^1.0.1"
|
||||
}
|
||||
},
|
||||
"form-data": {
|
||||
"version": "3.0.1",
|
||||
"resolved": "https://registry.npmjs.org/form-data/-/form-data-3.0.1.tgz",
|
||||
"integrity": "sha512-RHkBKtLWUVwd7SqRIvCZMEvAMoGUp0XU+seQiZejj0COz3RI3hWP4sCv3gZWWLjJTd7rGwcsF5eKZGii0r/hbg==",
|
||||
"formdata-polyfill": {
|
||||
"version": "4.0.10",
|
||||
"resolved": "https://registry.npmjs.org/formdata-polyfill/-/formdata-polyfill-4.0.10.tgz",
|
||||
"integrity": "sha512-buewHzMvYL29jdeQTVILecSaZKnt/RJWjoZCF5OW60Z67/GmSLBkOFM7qh1PI3zFNtJbaZL5eQu1vLfazOwj4g==",
|
||||
"dev": true,
|
||||
"requires": {
|
||||
"asynckit": "^0.4.0",
|
||||
"combined-stream": "^1.0.8",
|
||||
"mime-types": "^2.1.12"
|
||||
"fetch-blob": "^3.1.2"
|
||||
}
|
||||
},
|
||||
"fragment-cache": {
|
||||
|
@ -12929,21 +12966,6 @@
|
|||
"picomatch": "^2.3.1"
|
||||
}
|
||||
},
|
||||
"mime-db": {
|
||||
"version": "1.52.0",
|
||||
"resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.52.0.tgz",
|
||||
"integrity": "sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg==",
|
||||
"dev": true
|
||||
},
|
||||
"mime-types": {
|
||||
"version": "2.1.35",
|
||||
"resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.35.tgz",
|
||||
"integrity": "sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==",
|
||||
"dev": true,
|
||||
"requires": {
|
||||
"mime-db": "1.52.0"
|
||||
}
|
||||
},
|
||||
"minimatch": {
|
||||
"version": "3.1.2",
|
||||
"resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz",
|
||||
|
@ -13225,13 +13247,21 @@
|
|||
"integrity": "sha512-CXdUiJembsNjuToQvxayPZF9Vqht7hewsvy2sOWafLvi2awflj9mOC6bHIg50orX8IJvWKY9wYQ/zB2kogPslQ==",
|
||||
"dev": true
|
||||
},
|
||||
"node-domexception": {
|
||||
"version": "1.0.0",
|
||||
"resolved": "https://registry.npmjs.org/node-domexception/-/node-domexception-1.0.0.tgz",
|
||||
"integrity": "sha512-/jKZoMpw0F8GRwl4/eLROPA3cfcXtLApP0QzLmUT/HuPCZWyB7IY9ZrMeKw2O/nFIqPQB3PVM9aYm0F312AXDQ==",
|
||||
"dev": true
|
||||
},
|
||||
"node-fetch": {
|
||||
"version": "2.6.7",
|
||||
"resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-2.6.7.tgz",
|
||||
"integrity": "sha512-ZjMPFEfVx5j+y2yF35Kzx5sF7kDzxuDj6ziH4FFbOp87zKDZNx8yExJIb05OGF4Nlt9IHFIMBkRl41VdvcNdbQ==",
|
||||
"version": "3.2.10",
|
||||
"resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-3.2.10.tgz",
|
||||
"integrity": "sha512-MhuzNwdURnZ1Cp4XTazr69K0BTizsBroX7Zx3UgDSVcZYKF/6p0CBe4EUb/hLqmzVhl0UpYfgRljQ4yxE+iCxA==",
|
||||
"dev": true,
|
||||
"requires": {
|
||||
"whatwg-url": "^5.0.0"
|
||||
"data-uri-to-buffer": "^4.0.0",
|
||||
"fetch-blob": "^3.1.4",
|
||||
"formdata-polyfill": "^4.0.10"
|
||||
}
|
||||
},
|
||||
"normalize-package-data": {
|
||||
|
@ -15196,6 +15226,12 @@
|
|||
}
|
||||
}
|
||||
},
|
||||
"web-streams-polyfill": {
|
||||
"version": "3.2.1",
|
||||
"resolved": "https://registry.npmjs.org/web-streams-polyfill/-/web-streams-polyfill-3.2.1.tgz",
|
||||
"integrity": "sha512-e0MO3wdXWKrLbL0DgGnUV7WHVuw9OUvL4hjgnPkIeEvESk74gAITi5G606JtZPp39cd8HA9VQzCIvA49LpPN5Q==",
|
||||
"dev": true
|
||||
},
|
||||
"webidl-conversions": {
|
||||
"version": "3.0.1",
|
||||
"resolved": "https://registry.npmjs.org/webidl-conversions/-/webidl-conversions-3.0.1.tgz",
|
||||
|
|
|
@ -41,6 +41,7 @@
|
|||
"devDependencies": {
|
||||
"@octokit/rest": "latest",
|
||||
"@types/chai": "latest",
|
||||
"@types/fancy-log": "^2.0.0",
|
||||
"@types/fs-extra": "^9.0.13",
|
||||
"@types/glob": "latest",
|
||||
"@types/gulp": "^4.0.9",
|
||||
|
@ -55,8 +56,8 @@
|
|||
"@types/mocha": "latest",
|
||||
"@types/ms": "latest",
|
||||
"@types/node": "latest",
|
||||
"@types/node-fetch": "^2.6.2",
|
||||
"@types/source-map-support": "latest",
|
||||
"@types/which": "^2.0.1",
|
||||
"@types/xml2js": "^0.4.11",
|
||||
"@typescript-eslint/eslint-plugin": "^5.33.1",
|
||||
"@typescript-eslint/parser": "^5.33.1",
|
||||
|
@ -87,7 +88,7 @@
|
|||
"mocha": "latest",
|
||||
"mocha-fivemat-progress-reporter": "latest",
|
||||
"ms": "^2.1.3",
|
||||
"node-fetch": "^2.6.7",
|
||||
"node-fetch": "^3.2.10",
|
||||
"source-map-support": "latest",
|
||||
"typescript": "^4.8.4",
|
||||
"vinyl": "latest",
|
||||
|
@ -107,7 +108,7 @@
|
|||
"clean": "gulp clean",
|
||||
"gulp": "gulp",
|
||||
"lint": "gulp lint",
|
||||
"setup-hooks": "node scripts/link-hooks.js"
|
||||
"setup-hooks": "node scripts/link-hooks.mjs"
|
||||
},
|
||||
"browser": {
|
||||
"fs": false,
|
||||
|
|
|
@ -1,34 +1,36 @@
|
|||
// @ts-check
|
||||
const chalk = require("chalk");
|
||||
const { join } = require("path");
|
||||
const { readFileSync } = require("fs");
|
||||
import chalk from "chalk";
|
||||
import { join } from "path";
|
||||
import { readFileSync } from "fs";
|
||||
|
||||
let playwright;
|
||||
try {
|
||||
// eslint-disable-next-line import/no-extraneous-dependencies
|
||||
require("playwright");
|
||||
// @ts-ignore-error
|
||||
playwright = await import("playwright");
|
||||
}
|
||||
catch (error) {
|
||||
throw new Error("Playwright is expected to be installed manually before running this script");
|
||||
}
|
||||
|
||||
// eslint-disable-next-line import/no-extraneous-dependencies
|
||||
const playwright = require("playwright");
|
||||
|
||||
// Turning this on will leave the Chromium browser open, giving you the
|
||||
// chance to open up the web inspector.
|
||||
const debugging = false;
|
||||
|
||||
(async () => {
|
||||
for (const browserType of ["chromium", "firefox"]) {
|
||||
/** @type {["chromium", "firefox"]} */
|
||||
const browsers = ["chromium", "firefox"];
|
||||
|
||||
for (const browserType of browsers) {
|
||||
const browser = await playwright[browserType].launch({ headless: !debugging });
|
||||
const context = await browser.newContext();
|
||||
const page = await context.newPage();
|
||||
|
||||
/** @type {(err: Error) => void} */
|
||||
const errorCaught = err => {
|
||||
console.error(chalk.red("There was an error running built/typescript.js in " + browserType));
|
||||
console.log(err.toString());
|
||||
process.exitCode = 1;
|
||||
};
|
||||
|
||||
// @ts-ignore-error
|
||||
page.on("error", errorCaught);
|
||||
page.on("pageerror", errorCaught);
|
||||
|
||||
|
@ -45,13 +47,4 @@ const debugging = false;
|
|||
console.log("Not closing the browser, you'll need to exit the process in your terminal manually");
|
||||
}
|
||||
console.log(`${browserType} :+1:`);
|
||||
}
|
||||
})();
|
||||
|
||||
process.on("unhandledRejection", (/** @type {any}*/ err) => {
|
||||
if (err) {
|
||||
console.error(err.stack || err.message);
|
||||
}
|
||||
process.exit(1);
|
||||
});
|
||||
|
||||
}
|
|
@ -1,21 +0,0 @@
|
|||
const { join, resolve, dirname } = require("path");
|
||||
const { existsSync } = require("fs");
|
||||
|
||||
// search directories upward to avoid hard-wired paths based on the
|
||||
// build tree (same as src/harness/findUpDir.ts)
|
||||
|
||||
function findUpFile(name) {
|
||||
let dir = __dirname;
|
||||
while (true) {
|
||||
const fullPath = join(dir, name);
|
||||
if (existsSync(fullPath)) return fullPath;
|
||||
const up = resolve(dir, "..");
|
||||
if (up === dir) return name; // it'll fail anyway
|
||||
dir = up;
|
||||
}
|
||||
}
|
||||
exports.findUpFile = findUpFile;
|
||||
|
||||
const findUpRoot = () =>
|
||||
findUpRoot.cached || (findUpRoot.cached = dirname(findUpFile("Gulpfile.js")));
|
||||
exports.findUpRoot = findUpRoot;
|
|
@ -0,0 +1,29 @@
|
|||
import { join, resolve, dirname } from "path";
|
||||
import { existsSync } from "fs";
|
||||
import url from "url";
|
||||
|
||||
const __filename = url.fileURLToPath(new URL(import.meta.url));
|
||||
const __dirname = dirname(__filename);
|
||||
|
||||
// search directories upward to avoid hard-wired paths based on the
|
||||
// build tree (same as src/harness/findUpDir.ts)
|
||||
|
||||
/**
|
||||
* @param {string} name
|
||||
* @returns {string}
|
||||
*/
|
||||
export function findUpFile(name) {
|
||||
let dir = __dirname;
|
||||
while (true) {
|
||||
const fullPath = join(dir, name);
|
||||
if (existsSync(fullPath)) return fullPath;
|
||||
const up = resolve(dir, "..");
|
||||
if (up === dir) return name; // it'll fail anyway
|
||||
dir = up;
|
||||
}
|
||||
}
|
||||
|
||||
/** @type {string | undefined} */
|
||||
let findUpRootCache;
|
||||
|
||||
export const findUpRoot = () => findUpRootCache || (findUpRootCache = dirname(findUpFile("Gulpfile.mjs")));
|
|
@ -1,11 +1,9 @@
|
|||
// @ts-check
|
||||
const minimist = require("minimist");
|
||||
const os = require("os");
|
||||
import minimist from "minimist";
|
||||
import os from "os";
|
||||
|
||||
const ci = ["1", "true"].includes(process.env.CI);
|
||||
const ci = ["1", "true"].includes(process.env.CI ?? "");
|
||||
|
||||
/** @type {CommandLineOptions} */
|
||||
module.exports = minimist(process.argv.slice(2), {
|
||||
const parsed = minimist(process.argv.slice(2), {
|
||||
boolean: ["dirty", "light", "colors", "lkg", "soft", "fix", "failed", "keepFailed", "force", "built", "ci"],
|
||||
string: ["browser", "tests", "break", "host", "reporter", "stackTraceLimit", "timeout", "shards", "shardId"],
|
||||
alias: {
|
||||
|
@ -44,12 +42,19 @@ module.exports = minimist(process.argv.slice(2), {
|
|||
}
|
||||
});
|
||||
|
||||
if (module.exports.built) {
|
||||
module.exports.lkg = false;
|
||||
/** @type {CommandLineOptions} */
|
||||
const options = /** @type {any} */ (parsed);
|
||||
|
||||
if (options.built) {
|
||||
options.lkg = false;
|
||||
}
|
||||
|
||||
export default options;
|
||||
|
||||
|
||||
|
||||
/**
|
||||
* @typedef TypedOptions
|
||||
* @typedef CommandLineOptions
|
||||
* @property {boolean} dirty
|
||||
* @property {boolean} light
|
||||
* @property {boolean} colors
|
||||
|
@ -59,6 +64,7 @@ if (module.exports.built) {
|
|||
* @property {boolean} fix
|
||||
* @property {string} browser
|
||||
* @property {string} tests
|
||||
* @property {string | boolean} break
|
||||
* @property {string | boolean} inspect
|
||||
* @property {string} runners
|
||||
* @property {string|number} workers
|
||||
|
@ -69,7 +75,7 @@ if (module.exports.built) {
|
|||
* @property {boolean} failed
|
||||
* @property {boolean} keepFailed
|
||||
* @property {boolean} ci
|
||||
*
|
||||
* @typedef {import("minimist").ParsedArgs & TypedOptions} CommandLineOptions
|
||||
* @property {string} shards
|
||||
* @property {string} shardId
|
||||
*/
|
||||
void 0;
|
|
@ -1,18 +1,17 @@
|
|||
// @ts-check
|
||||
const stream = require("stream");
|
||||
const ts = require("../../lib/typescript");
|
||||
const fs = require("fs");
|
||||
const { base64VLQFormatEncode } = require("./sourcemaps");
|
||||
import stream from "stream";
|
||||
import ts from "../../lib/typescript.js";
|
||||
import fs from "fs";
|
||||
import { base64VLQFormatEncode } from "./sourcemaps.mjs";
|
||||
|
||||
/**
|
||||
* @param {string | ((file: import("vinyl")) => string)} data
|
||||
*/
|
||||
function prepend(data) {
|
||||
export function prepend(data) {
|
||||
return new stream.Transform({
|
||||
objectMode: true,
|
||||
/**
|
||||
* @param {string | Buffer | import("vinyl")} input
|
||||
* @param {(error: Error, data?: any) => void} cb
|
||||
* @param {(error: Error | null, data?: any) => void} cb
|
||||
*/
|
||||
transform(input, _, cb) {
|
||||
if (typeof input === "string" || Buffer.isBuffer(input)) return cb(new Error("Only Vinyl files are supported."));
|
||||
|
@ -22,7 +21,7 @@ function prepend(data) {
|
|||
const prependContent = typeof data === "function" ? data(input) : data;
|
||||
output.contents = Buffer.concat([Buffer.from(prependContent, "utf8"), input.contents]);
|
||||
if (input.sourceMap) {
|
||||
if (typeof input.sourceMap === "string") input.sourceMap = /**@type {import("./sourcemaps").RawSourceMap}*/(JSON.parse(input.sourceMap));
|
||||
if (typeof input.sourceMap === "string") input.sourceMap = /**@type {import("./sourcemaps.mjs").RawSourceMap}*/(JSON.parse(input.sourceMap));
|
||||
const lineStarts = /**@type {*}*/(ts).computeLineStarts(prependContent);
|
||||
let prependMappings = "";
|
||||
for (let i = 1; i < lineStarts.length; i++) {
|
||||
|
@ -46,19 +45,17 @@ function prepend(data) {
|
|||
return cb(null, output);
|
||||
}
|
||||
catch (e) {
|
||||
return cb(e);
|
||||
return cb(/** @type {Error} */(e));
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
exports.prepend = prepend;
|
||||
|
||||
/**
|
||||
* @param {string | ((file: import("vinyl")) => string)} file
|
||||
*/
|
||||
function prependFile(file) {
|
||||
export function prependFile(file) {
|
||||
const data = typeof file === "string" ? fs.readFileSync(file, "utf8") :
|
||||
vinyl => fs.readFileSync(file(vinyl), "utf8");
|
||||
(/** @type {import("vinyl")} */ vinyl) => fs.readFileSync(file(vinyl), "utf8");
|
||||
return prepend(data);
|
||||
}
|
||||
exports.prependFile = prependFile;
|
|
@ -1,7 +1,7 @@
|
|||
// @ts-check
|
||||
const { exec, Debouncer } = require("./utils");
|
||||
const { resolve } = require("path");
|
||||
const { findUpRoot } = require("./findUpDir");
|
||||
import { exec, Debouncer } from "./utils.mjs";
|
||||
import { resolve } from "path";
|
||||
import { findUpRoot } from "./findUpDir.mjs";
|
||||
import assert from "assert";
|
||||
|
||||
class ProjectQueue {
|
||||
/**
|
||||
|
@ -15,12 +15,13 @@ class ProjectQueue {
|
|||
|
||||
/**
|
||||
* @param {string} project
|
||||
* @param {object} options
|
||||
* @param {{ lkg?: boolean; force?: boolean; }} options
|
||||
*/
|
||||
enqueue(project, { lkg = true, force = false } = {}) {
|
||||
let entry = this._debouncers.find(entry => entry.lkg === lkg && entry.force === force);
|
||||
if (!entry) {
|
||||
const debouncer = new Debouncer(100, async () => {
|
||||
assert(entry);
|
||||
const projects = entry.projects;
|
||||
if (projects) {
|
||||
entry.projects = undefined;
|
||||
|
@ -49,14 +50,14 @@ const projectBuilder = new ProjectQueue((projects, lkg, force) => execTsc(lkg, .
|
|||
* @param {boolean} [options.lkg=true]
|
||||
* @param {boolean} [options.force=false]
|
||||
*/
|
||||
exports.buildProject = (project, { lkg, force } = {}) => projectBuilder.enqueue(project, { lkg, force });
|
||||
export const buildProject = (project, { lkg, force } = {}) => projectBuilder.enqueue(project, { lkg, force });
|
||||
|
||||
const projectCleaner = new ProjectQueue((projects, lkg) => execTsc(lkg, "--clean", ...projects));
|
||||
|
||||
/**
|
||||
* @param {string} project
|
||||
*/
|
||||
exports.cleanProject = (project) => projectCleaner.enqueue(project);
|
||||
export const cleanProject = (project) => projectCleaner.enqueue(project);
|
||||
|
||||
const projectWatcher = new ProjectQueue((projects) => execTsc(/*lkg*/ true, "--watch", ...projects));
|
||||
|
||||
|
@ -65,4 +66,4 @@ const projectWatcher = new ProjectQueue((projects) => execTsc(/*lkg*/ true, "--w
|
|||
* @param {object} options
|
||||
* @param {boolean} [options.lkg=true]
|
||||
*/
|
||||
exports.watchProject = (project, { lkg } = {}) => projectWatcher.enqueue(project, { lkg });
|
||||
export const watchProject = (project, { lkg } = {}) => projectWatcher.enqueue(project, { lkg });
|
|
@ -1,4 +1,3 @@
|
|||
// @ts-check
|
||||
/**
|
||||
* @param {string} message
|
||||
* @returns {never}
|
||||
|
@ -23,7 +22,7 @@ function base64FormatEncode(value) {
|
|||
/**
|
||||
* @param {number} value
|
||||
*/
|
||||
function base64VLQFormatEncode(value) {
|
||||
export function base64VLQFormatEncode(value) {
|
||||
if (value < 0) {
|
||||
value = ((-value) << 1) + 1;
|
||||
}
|
||||
|
@ -45,6 +44,5 @@ function base64VLQFormatEncode(value) {
|
|||
|
||||
return result;
|
||||
}
|
||||
exports.base64VLQFormatEncode = base64VLQFormatEncode;
|
||||
|
||||
/** @typedef {object} RawSourceMap */
|
|
@ -1,28 +1,27 @@
|
|||
// @ts-check
|
||||
const del = require("del");
|
||||
const fs = require("fs");
|
||||
const os = require("os");
|
||||
const path = require("path");
|
||||
const mkdirP = require("mkdirp");
|
||||
const log = require("fancy-log");
|
||||
const cmdLineOptions = require("./options");
|
||||
const { exec } = require("./utils");
|
||||
const { findUpFile } = require("./findUpDir");
|
||||
import del from "del";
|
||||
import fs from "fs";
|
||||
import os from "os";
|
||||
import path from "path";
|
||||
import mkdirP from "mkdirp";
|
||||
import log from "fancy-log";
|
||||
import cmdLineOptions from "./options.mjs";
|
||||
import { exec } from "./utils.mjs";
|
||||
import { findUpFile, findUpRoot } from "./findUpDir.mjs";
|
||||
|
||||
const mochaJs = require.resolve("mocha/bin/_mocha");
|
||||
exports.localBaseline = "tests/baselines/local/";
|
||||
exports.refBaseline = "tests/baselines/reference/";
|
||||
exports.localRwcBaseline = "internal/baselines/rwc/local";
|
||||
exports.refRwcBaseline = "internal/baselines/rwc/reference";
|
||||
exports.localTest262Baseline = "internal/baselines/test262/local";
|
||||
const mochaJs = path.resolve(findUpRoot(), "node_modules", "mocha", "bin", "_mocha");
|
||||
export const localBaseline = "tests/baselines/local/";
|
||||
export const refBaseline = "tests/baselines/reference/";
|
||||
export const localRwcBaseline = "internal/baselines/rwc/local";
|
||||
export const refRwcBaseline = "internal/baselines/rwc/reference";
|
||||
export const localTest262Baseline = "internal/baselines/test262/local";
|
||||
|
||||
/**
|
||||
* @param {string} runJs
|
||||
* @param {string} defaultReporter
|
||||
* @param {boolean} runInParallel
|
||||
* @param {boolean} watchMode
|
||||
* @param {boolean} _watchMode
|
||||
*/
|
||||
async function runConsoleTests(runJs, defaultReporter, runInParallel, watchMode) {
|
||||
export async function runConsoleTests(runJs, defaultReporter, runInParallel, _watchMode) {
|
||||
let testTimeout = cmdLineOptions.timeout;
|
||||
const tests = cmdLineOptions.tests;
|
||||
const inspect = cmdLineOptions.break || cmdLineOptions.inspect;
|
||||
|
@ -74,7 +73,7 @@ async function runConsoleTests(runJs, defaultReporter, runInParallel, watchMode)
|
|||
// default timeout is 2sec which really should be enough, but maybe we just need a small amount longer
|
||||
if (!runInParallel) {
|
||||
args.push(mochaJs);
|
||||
args.push("-R", findUpFile("scripts/failed-tests.js"));
|
||||
args.push("-R", findUpFile("scripts/failed-tests.cjs"));
|
||||
args.push("-O", '"reporter=' + reporter + (keepFailed ? ",keepFailed=true" : "") + '"');
|
||||
if (tests) {
|
||||
args.push("-g", `"${tests}"`);
|
||||
|
@ -123,7 +122,7 @@ async function runConsoleTests(runJs, defaultReporter, runInParallel, watchMode)
|
|||
errorStatus = exitCode;
|
||||
error = new Error(`Process exited with status code ${errorStatus}.`);
|
||||
}
|
||||
else if (cmdLineOptions.ci) {
|
||||
else if (cmdLineOptions.ci && runJs.startsWith("built")) {
|
||||
// finally, do a sanity check and build the compiler with the built version of itself
|
||||
log.info("Starting sanity check build...");
|
||||
// Cleanup everything except lint rules (we'll need those later and would rather not waste time rebuilding them)
|
||||
|
@ -137,7 +136,7 @@ async function runConsoleTests(runJs, defaultReporter, runInParallel, watchMode)
|
|||
}
|
||||
catch (e) {
|
||||
errorStatus = undefined;
|
||||
error = e;
|
||||
error = /** @type {Error} */ (e);
|
||||
}
|
||||
finally {
|
||||
restoreSavedNodeEnv();
|
||||
|
@ -151,14 +150,12 @@ async function runConsoleTests(runJs, defaultReporter, runInParallel, watchMode)
|
|||
throw error;
|
||||
}
|
||||
}
|
||||
exports.runConsoleTests = runConsoleTests;
|
||||
|
||||
async function cleanTestDirs() {
|
||||
await del([exports.localBaseline, exports.localRwcBaseline]);
|
||||
mkdirP.sync(exports.localRwcBaseline);
|
||||
mkdirP.sync(exports.localBaseline);
|
||||
export async function cleanTestDirs() {
|
||||
await del([localBaseline, localRwcBaseline]);
|
||||
mkdirP.sync(localRwcBaseline);
|
||||
mkdirP.sync(localBaseline);
|
||||
}
|
||||
exports.cleanTestDirs = cleanTestDirs;
|
||||
|
||||
/**
|
||||
* used to pass data from gulp command line directly to run.js
|
||||
|
@ -173,7 +170,7 @@ exports.cleanTestDirs = cleanTestDirs;
|
|||
* @param {number | undefined} [shards]
|
||||
* @param {number | undefined} [shardId]
|
||||
*/
|
||||
function writeTestConfigFile(tests, runners, light, taskConfigsFolder, workerCount, stackTraceLimit, timeout, keepFailed, shards, shardId) {
|
||||
export function writeTestConfigFile(tests, runners, light, taskConfigsFolder, workerCount, stackTraceLimit, timeout, keepFailed, shards, shardId) {
|
||||
const testConfigContents = JSON.stringify({
|
||||
test: tests ? [tests] : undefined,
|
||||
runners: runners ? runners.split(",") : undefined,
|
||||
|
@ -190,9 +187,8 @@ function writeTestConfigFile(tests, runners, light, taskConfigsFolder, workerCou
|
|||
log.info("Running tests with config: " + testConfigContents);
|
||||
fs.writeFileSync("test.config", testConfigContents);
|
||||
}
|
||||
exports.writeTestConfigFile = writeTestConfigFile;
|
||||
|
||||
/** @type {string} */
|
||||
/** @type {string | undefined} */
|
||||
let savedNodeEnv;
|
||||
function setNodeEnvToDevelopment() {
|
||||
savedNodeEnv = process.env.NODE_ENV;
|
||||
|
@ -204,9 +200,12 @@ function restoreSavedNodeEnv() {
|
|||
}
|
||||
|
||||
function deleteTemporaryProjectOutput() {
|
||||
return del(path.join(exports.localBaseline, "projectOutput/"));
|
||||
return del(path.join(localBaseline, "projectOutput/"));
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {string} text
|
||||
*/
|
||||
function regExpEscape(text) {
|
||||
return text.replace(/[.*+?^${}()|\[\]\\]/g, "\\$&");
|
||||
}
|
|
@ -1,20 +1,18 @@
|
|||
// @ts-check
|
||||
|
||||
/* eslint-disable no-restricted-globals */
|
||||
// eslint-disable-next-line @typescript-eslint/triple-slash-reference
|
||||
/// <reference path="../types/ambient.d.ts" />
|
||||
|
||||
const fs = require("fs");
|
||||
const path = require("path");
|
||||
const log = require("fancy-log");
|
||||
const mkdirp = require("mkdirp");
|
||||
const del = require("del");
|
||||
const File = require("vinyl");
|
||||
const ts = require("../../lib/typescript");
|
||||
const chalk = require("chalk");
|
||||
const which = require("which");
|
||||
const { spawn } = require("child_process");
|
||||
const { Readable, Duplex } = require("stream");
|
||||
import fs from "fs";
|
||||
import path from "path";
|
||||
import log from "fancy-log";
|
||||
import del from "del";
|
||||
import File from "vinyl";
|
||||
import ts from "../../lib/typescript.js";
|
||||
import chalk from "chalk";
|
||||
import which from "which";
|
||||
import { spawn } from "child_process";
|
||||
import { Duplex } from "stream";
|
||||
import assert from "assert";
|
||||
|
||||
/**
|
||||
* Executes the provided command once with the supplied arguments.
|
||||
|
@ -27,8 +25,8 @@ const { Readable, Duplex } = require("stream");
|
|||
* @property {boolean} [hidePrompt]
|
||||
* @property {boolean} [waitForExit=true]
|
||||
*/
|
||||
async function exec(cmd, args, options = {}) {
|
||||
return /**@type {Promise<{exitCode: number}>}*/(new Promise((resolve, reject) => {
|
||||
export async function exec(cmd, args, options = {}) {
|
||||
return /**@type {Promise<{exitCode?: number}>}*/(new Promise((resolve, reject) => {
|
||||
const { ignoreExitCode, waitForExit = true } = options;
|
||||
|
||||
if (!options.hidePrompt) log(`> ${chalk.green(cmd)} ${args.join(" ")}`);
|
||||
|
@ -36,7 +34,7 @@ async function exec(cmd, args, options = {}) {
|
|||
if (waitForExit) {
|
||||
proc.on("exit", exitCode => {
|
||||
if (exitCode === 0 || ignoreExitCode) {
|
||||
resolve({ exitCode });
|
||||
resolve({ exitCode: exitCode ?? undefined });
|
||||
}
|
||||
else {
|
||||
reject(new Error(`Process exited with code: ${exitCode}`));
|
||||
|
@ -53,7 +51,6 @@ async function exec(cmd, args, options = {}) {
|
|||
}
|
||||
}));
|
||||
}
|
||||
exports.exec = exec;
|
||||
|
||||
/**
|
||||
* @param {ts.Diagnostic[]} diagnostics
|
||||
|
@ -64,7 +61,6 @@ function formatDiagnostics(diagnostics, options) {
|
|||
? ts.formatDiagnosticsWithColorAndContext(diagnostics, getFormatDiagnosticsHost(options && options.cwd))
|
||||
: ts.formatDiagnostics(diagnostics, getFormatDiagnosticsHost(options && options.cwd));
|
||||
}
|
||||
exports.formatDiagnostics = formatDiagnostics;
|
||||
|
||||
/**
|
||||
* @param {ts.Diagnostic[]} diagnostics
|
||||
|
@ -73,7 +69,6 @@ exports.formatDiagnostics = formatDiagnostics;
|
|||
function reportDiagnostics(diagnostics, options) {
|
||||
log(formatDiagnostics(diagnostics, { cwd: options && options.cwd, pretty: process.stdout.isTTY }));
|
||||
}
|
||||
exports.reportDiagnostics = reportDiagnostics;
|
||||
|
||||
/**
|
||||
* @param {string | undefined} cwd
|
||||
|
@ -82,17 +77,16 @@ exports.reportDiagnostics = reportDiagnostics;
|
|||
function getFormatDiagnosticsHost(cwd) {
|
||||
return {
|
||||
getCanonicalFileName: fileName => fileName,
|
||||
getCurrentDirectory: () => cwd,
|
||||
getCurrentDirectory: () => cwd ?? process.cwd(),
|
||||
getNewLine: () => ts.sys.newLine,
|
||||
};
|
||||
}
|
||||
exports.getFormatDiagnosticsHost = getFormatDiagnosticsHost;
|
||||
|
||||
/**
|
||||
* Reads JSON data with optional comments using the LKG TypeScript compiler
|
||||
* @param {string} jsonPath
|
||||
*/
|
||||
function readJson(jsonPath) {
|
||||
export function readJson(jsonPath) {
|
||||
const jsonText = fs.readFileSync(jsonPath, "utf8");
|
||||
const result = ts.parseConfigFileTextToJson(jsonPath, jsonText);
|
||||
if (result.error) {
|
||||
|
@ -101,38 +95,13 @@ function readJson(jsonPath) {
|
|||
}
|
||||
return result.config;
|
||||
}
|
||||
exports.readJson = readJson;
|
||||
|
||||
/**
|
||||
* @param {File} file
|
||||
*/
|
||||
function streamFromFile(file) {
|
||||
return file.isBuffer() ? streamFromBuffer(file.contents) :
|
||||
file.isStream() ? file.contents :
|
||||
fs.createReadStream(file.path, { autoClose: true });
|
||||
}
|
||||
exports.streamFromFile = streamFromFile;
|
||||
|
||||
/**
|
||||
* @param {Buffer} buffer
|
||||
*/
|
||||
function streamFromBuffer(buffer) {
|
||||
return new Readable({
|
||||
read() {
|
||||
this.push(buffer);
|
||||
// eslint-disable-next-line no-null/no-null
|
||||
this.push(null);
|
||||
}
|
||||
});
|
||||
}
|
||||
exports.streamFromBuffer = streamFromBuffer;
|
||||
|
||||
/**
|
||||
* @param {string | string[]} source
|
||||
* @param {string | string[]} dest
|
||||
* @returns {boolean}
|
||||
*/
|
||||
function needsUpdate(source, dest) {
|
||||
export function needsUpdate(source, dest) {
|
||||
if (typeof source === "string" && typeof dest === "string") {
|
||||
if (fs.existsSync(dest)) {
|
||||
const {mtime: outTime} = fs.statSync(dest);
|
||||
|
@ -194,9 +163,8 @@ function needsUpdate(source, dest) {
|
|||
}
|
||||
return true;
|
||||
}
|
||||
exports.needsUpdate = needsUpdate;
|
||||
|
||||
function getDiffTool() {
|
||||
export function getDiffTool() {
|
||||
const program = process.env.DIFF;
|
||||
if (!program) {
|
||||
log.warn("Add the 'DIFF' environment variable to the path of the program you want to use.");
|
||||
|
@ -204,7 +172,6 @@ function getDiffTool() {
|
|||
}
|
||||
return program;
|
||||
}
|
||||
exports.getDiffTool = getDiffTool;
|
||||
|
||||
/**
|
||||
* Find the size of a directory recursively.
|
||||
|
@ -212,7 +179,7 @@ exports.getDiffTool = getDiffTool;
|
|||
* @param {string} root
|
||||
* @returns {number} bytes
|
||||
*/
|
||||
function getDirSize(root) {
|
||||
export function getDirSize(root) {
|
||||
const stats = fs.lstatSync(root);
|
||||
|
||||
if (!stats.isDirectory()) {
|
||||
|
@ -223,97 +190,12 @@ function getDirSize(root) {
|
|||
.map(file => getDirSize(path.join(root, file)))
|
||||
.reduce((acc, num) => acc + num, 0);
|
||||
}
|
||||
exports.getDirSize = getDirSize;
|
||||
|
||||
/**
|
||||
* Flattens a project with project references into a single project.
|
||||
* @param {string} projectSpec The path to a tsconfig.json file or its containing directory.
|
||||
* @param {string} flattenedProjectSpec The output path for the flattened tsconfig.json file.
|
||||
* @param {FlattenOptions} [options] Options used to flatten a project hierarchy.
|
||||
*
|
||||
* @typedef FlattenOptions
|
||||
* @property {string} [cwd] The path to use for the current working directory. Defaults to `process.cwd()`.
|
||||
* @property {import("../../lib/typescript").CompilerOptions} [compilerOptions] Compiler option overrides.
|
||||
* @property {boolean} [force] Forces creation of the output project.
|
||||
* @property {string[]} [exclude] Files to exclude (relative to `cwd`)
|
||||
*/
|
||||
function flatten(projectSpec, flattenedProjectSpec, options = {}) {
|
||||
const cwd = normalizeSlashes(options.cwd ? path.resolve(options.cwd) : process.cwd());
|
||||
const files = [];
|
||||
const resolvedOutputSpec = path.resolve(cwd, flattenedProjectSpec);
|
||||
const resolvedOutputDirectory = path.dirname(resolvedOutputSpec);
|
||||
const resolvedProjectSpec = resolveProjectSpec(projectSpec, cwd, /*referrer*/ undefined);
|
||||
const project = readJson(resolvedProjectSpec);
|
||||
const skipProjects = /**@type {Set<string>}*/(new Set());
|
||||
const skipFiles = new Set(options && options.exclude && options.exclude.map(file => normalizeSlashes(path.resolve(cwd, file))));
|
||||
recur(resolvedProjectSpec, project);
|
||||
|
||||
if (options.force || needsUpdate(files, resolvedOutputSpec)) {
|
||||
const config = {
|
||||
extends: normalizeSlashes(path.relative(resolvedOutputDirectory, resolvedProjectSpec)),
|
||||
compilerOptions: options.compilerOptions || {},
|
||||
files: files.map(file => normalizeSlashes(path.relative(resolvedOutputDirectory, file)))
|
||||
};
|
||||
mkdirp.sync(resolvedOutputDirectory);
|
||||
fs.writeFileSync(resolvedOutputSpec, JSON.stringify(config, undefined, 2), "utf8");
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {string} projectSpec
|
||||
* @param {object} project
|
||||
*/
|
||||
function recur(projectSpec, project) {
|
||||
if (skipProjects.has(projectSpec)) return;
|
||||
skipProjects.add(project);
|
||||
if (project.references) {
|
||||
for (const ref of project.references) {
|
||||
const referencedSpec = resolveProjectSpec(ref.path, cwd, projectSpec);
|
||||
const referencedProject = readJson(referencedSpec);
|
||||
recur(referencedSpec, referencedProject);
|
||||
}
|
||||
}
|
||||
if (project.include) {
|
||||
throw new Error("Flattened project may not have an 'include' list.");
|
||||
}
|
||||
if (!project.files) {
|
||||
throw new Error("Flattened project must have an explicit 'files' list.");
|
||||
}
|
||||
const projectDirectory = path.dirname(projectSpec);
|
||||
for (let file of project.files) {
|
||||
file = normalizeSlashes(path.resolve(projectDirectory, file));
|
||||
if (skipFiles.has(file)) continue;
|
||||
skipFiles.add(file);
|
||||
files.push(file);
|
||||
}
|
||||
}
|
||||
}
|
||||
exports.flatten = flatten;
|
||||
|
||||
/**
|
||||
* @param {string} file
|
||||
*/
|
||||
function normalizeSlashes(file) {
|
||||
return file.replace(/\\/g, "/");
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {string} projectSpec
|
||||
* @param {string} cwd
|
||||
* @param {string | undefined} referrer
|
||||
* @returns {string}
|
||||
*/
|
||||
function resolveProjectSpec(projectSpec, cwd, referrer) {
|
||||
const projectPath = normalizeSlashes(path.resolve(cwd, referrer ? path.dirname(referrer) : "", projectSpec));
|
||||
const stats = fs.statSync(projectPath);
|
||||
if (stats.isFile()) return normalizeSlashes(projectPath);
|
||||
return normalizeSlashes(path.resolve(cwd, projectPath, "tsconfig.json"));
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {string | ((file: File) => string) | { cwd?: string }} [dest]
|
||||
* @param {{ cwd?: string }} [opts]
|
||||
*/
|
||||
function rm(dest, opts) {
|
||||
export function rm(dest, opts) {
|
||||
if (dest && typeof dest === "object") {
|
||||
opts = dest;
|
||||
dest = undefined;
|
||||
|
@ -328,7 +210,9 @@ function rm(dest, opts) {
|
|||
const processDeleted = () => {
|
||||
if (failed) return;
|
||||
while (pending.length && pending[0].deleted) {
|
||||
const { file, cb } = pending.shift();
|
||||
const fileAndCallback = pending.shift();
|
||||
assert(fileAndCallback);
|
||||
const { file, cb } = fileAndCallback;
|
||||
duplex.push(file);
|
||||
cb();
|
||||
}
|
||||
|
@ -382,7 +266,6 @@ function rm(dest, opts) {
|
|||
});
|
||||
return duplex;
|
||||
}
|
||||
exports.rm = rm;
|
||||
|
||||
class Deferred {
|
||||
constructor() {
|
||||
|
@ -393,7 +276,7 @@ class Deferred {
|
|||
}
|
||||
}
|
||||
|
||||
class Debouncer {
|
||||
export class Debouncer {
|
||||
/**
|
||||
* @param {number} timeout
|
||||
* @param {() => Promise<any>} action
|
||||
|
@ -424,8 +307,8 @@ class Debouncer {
|
|||
}
|
||||
|
||||
const deferred = this._deferred;
|
||||
assert(deferred);
|
||||
this._deferred = undefined;
|
||||
this._projects = undefined;
|
||||
try {
|
||||
deferred.resolve(this._action());
|
||||
}
|
||||
|
@ -434,4 +317,3 @@ class Debouncer {
|
|||
}
|
||||
}
|
||||
}
|
||||
exports.Debouncer = Debouncer;
|
|
@ -1,25 +1,59 @@
|
|||
/// <reference types="node"/>
|
||||
import ts from "../lib/typescript.js";
|
||||
import path from "path";
|
||||
import assert from "assert";
|
||||
|
||||
import * as ts from "../lib/typescript";
|
||||
import * as path from "path";
|
||||
|
||||
function endsWith(s: string, suffix: string) {
|
||||
/**
|
||||
*
|
||||
* @param {string} s
|
||||
* @param {string} suffix
|
||||
* @returns {boolean}
|
||||
*/
|
||||
function endsWith(s, suffix) {
|
||||
return s.lastIndexOf(suffix, s.length - suffix.length) !== -1;
|
||||
}
|
||||
|
||||
function isStringEnum(declaration: ts.EnumDeclaration) {
|
||||
return declaration.members.length && declaration.members.every(m => !!m.initializer && m.initializer.kind === ts.SyntaxKind.StringLiteral);
|
||||
/**
|
||||
* @param {ts.EnumDeclaration} declaration
|
||||
* @returns {boolean}
|
||||
*/
|
||||
function isStringEnum(declaration) {
|
||||
return !!declaration.members.length && declaration.members.every(m => !!m.initializer && m.initializer.kind === ts.SyntaxKind.StringLiteral);
|
||||
}
|
||||
|
||||
class DeclarationsWalker {
|
||||
private visitedTypes: ts.Type[] = [];
|
||||
private text = "";
|
||||
private removedTypes: ts.Type[] = [];
|
||||
/**
|
||||
* @type {ts.Type[]}
|
||||
* @private
|
||||
*/
|
||||
visitedTypes = [];
|
||||
/**
|
||||
* @type {string}
|
||||
* @private
|
||||
*/
|
||||
text = "";
|
||||
/**
|
||||
* @type {ts.Type[]}
|
||||
* @private
|
||||
*/
|
||||
removedTypes = [];
|
||||
|
||||
private constructor(private typeChecker: ts.TypeChecker, private protocolFile: ts.SourceFile) {
|
||||
/**
|
||||
* @param {ts.TypeChecker} typeChecker
|
||||
* @param {ts.SourceFile} protocolFile
|
||||
* @private
|
||||
*/
|
||||
constructor(typeChecker, protocolFile) {
|
||||
this.typeChecker = typeChecker;
|
||||
this.protocolFile = protocolFile;
|
||||
}
|
||||
|
||||
static getExtraDeclarations(typeChecker: ts.TypeChecker, protocolFile: ts.SourceFile): string {
|
||||
/**
|
||||
*
|
||||
* @param {ts.TypeChecker} typeChecker
|
||||
* @param {ts.SourceFile} protocolFile
|
||||
* @returns {string}
|
||||
*/
|
||||
static getExtraDeclarations(typeChecker, protocolFile) {
|
||||
const walker = new DeclarationsWalker(typeChecker, protocolFile);
|
||||
walker.visitTypeNodes(protocolFile);
|
||||
let text = walker.text
|
||||
|
@ -36,7 +70,12 @@ class DeclarationsWalker {
|
|||
return text;
|
||||
}
|
||||
|
||||
private processType(type: ts.Type): void {
|
||||
/**
|
||||
* @param {ts.Type} type
|
||||
* @returns {void}
|
||||
* @private
|
||||
*/
|
||||
processType(type) {
|
||||
if (this.visitedTypes.indexOf(type) >= 0) {
|
||||
return;
|
||||
}
|
||||
|
@ -47,7 +86,7 @@ class DeclarationsWalker {
|
|||
}
|
||||
if (s.name === "Array" || s.name === "ReadOnlyArray") {
|
||||
// we should process type argument instead
|
||||
return this.processType((type as any).typeArguments[0]);
|
||||
return this.processType(/** @type {any} */(type).typeArguments[0]);
|
||||
}
|
||||
else {
|
||||
const declarations = s.getDeclarations();
|
||||
|
@ -57,7 +96,7 @@ class DeclarationsWalker {
|
|||
if (sourceFile === this.protocolFile || /lib(\..+)?\.d.ts/.test(path.basename(sourceFile.fileName))) {
|
||||
return;
|
||||
}
|
||||
if (decl.kind === ts.SyntaxKind.EnumDeclaration && !isStringEnum(decl as ts.EnumDeclaration)) {
|
||||
if (ts.isEnumDeclaration(decl) && !isStringEnum(decl)) {
|
||||
this.removedTypes.push(type);
|
||||
return;
|
||||
}
|
||||
|
@ -74,7 +113,11 @@ class DeclarationsWalker {
|
|||
}
|
||||
}
|
||||
|
||||
private visitTypeNodes(node: ts.Node) {
|
||||
/**
|
||||
* @param {ts.Node} node
|
||||
* @private
|
||||
*/
|
||||
visitTypeNodes(node) {
|
||||
if (node.parent) {
|
||||
switch (node.parent.kind) {
|
||||
case ts.SyntaxKind.VariableDeclaration:
|
||||
|
@ -84,12 +127,13 @@ class DeclarationsWalker {
|
|||
case ts.SyntaxKind.PropertySignature:
|
||||
case ts.SyntaxKind.Parameter:
|
||||
case ts.SyntaxKind.IndexSignature:
|
||||
if (((node.parent as ts.VariableDeclaration | ts.MethodDeclaration | ts.PropertyDeclaration | ts.ParameterDeclaration | ts.PropertySignature | ts.MethodSignature | ts.IndexSignatureDeclaration).type) === node) {
|
||||
const parent = /** @type {ts.VariableDeclaration | ts.MethodDeclaration | ts.PropertyDeclaration | ts.ParameterDeclaration | ts.PropertySignature | ts.MethodSignature | ts.IndexSignatureDeclaration} */ (node.parent);
|
||||
if (parent.type === node) {
|
||||
this.processTypeOfNode(node);
|
||||
}
|
||||
break;
|
||||
case ts.SyntaxKind.InterfaceDeclaration:
|
||||
const heritageClauses = (node.parent as ts.InterfaceDeclaration).heritageClauses;
|
||||
const heritageClauses = /** @type {ts.InterfaceDeclaration} */ (node.parent).heritageClauses;
|
||||
if (heritageClauses) {
|
||||
if (heritageClauses[0].token !== ts.SyntaxKind.ExtendsKeyword) {
|
||||
throw new Error(`Unexpected kind of heritage clause: ${ts.SyntaxKind[heritageClauses[0].kind]}`);
|
||||
|
@ -104,9 +148,13 @@ class DeclarationsWalker {
|
|||
ts.forEachChild(node, n => this.visitTypeNodes(n));
|
||||
}
|
||||
|
||||
private processTypeOfNode(node: ts.Node): void {
|
||||
/**
|
||||
* @param {ts.Node} node
|
||||
* @private
|
||||
*/
|
||||
processTypeOfNode(node) {
|
||||
if (node.kind === ts.SyntaxKind.UnionType) {
|
||||
for (const t of (node as ts.UnionTypeNode).types) {
|
||||
for (const t of /** @type {ts.UnionTypeNode} */ (node).types) {
|
||||
this.processTypeOfNode(t);
|
||||
}
|
||||
}
|
||||
|
@ -119,8 +167,14 @@ class DeclarationsWalker {
|
|||
}
|
||||
}
|
||||
|
||||
function writeProtocolFile(outputFile: string, protocolTs: string, typeScriptServicesDts: string) {
|
||||
const options = { target: ts.ScriptTarget.ES5, declaration: true, noResolve: false, types: [] as string[], stripInternal: true };
|
||||
/**
|
||||
* @param {string} outputFile
|
||||
* @param {string} protocolTs
|
||||
* @param {string} typeScriptServicesDts
|
||||
*/
|
||||
function writeProtocolFile(outputFile, protocolTs, typeScriptServicesDts) {
|
||||
/** @type {ts.CompilerOptions} */
|
||||
const options = { target: ts.ScriptTarget.ES5, declaration: true, noResolve: false, types: [], stripInternal: true };
|
||||
|
||||
/**
|
||||
* 1st pass - generate a program from protocol.ts and typescriptservices.d.ts and emit core version of protocol.d.ts with all internal members stripped
|
||||
|
@ -129,7 +183,8 @@ function writeProtocolFile(outputFile: string, protocolTs: string, typeScriptSer
|
|||
function getInitialDtsFileForProtocol() {
|
||||
const program = ts.createProgram([protocolTs, typeScriptServicesDts, path.join(typeScriptServicesDts, "../lib.es5.d.ts")], options);
|
||||
|
||||
let protocolDts: string | undefined;
|
||||
/** @type {string | undefined} */
|
||||
let protocolDts;
|
||||
const emitResult = program.emit(program.getSourceFile(protocolTs), (file, content) => {
|
||||
if (endsWith(file, ".d.ts")) {
|
||||
protocolDts = content;
|
||||
|
@ -137,7 +192,8 @@ function writeProtocolFile(outputFile: string, protocolTs: string, typeScriptSer
|
|||
});
|
||||
|
||||
if (protocolDts === undefined) {
|
||||
const diagHost: ts.FormatDiagnosticsHost = {
|
||||
/** @type {ts.FormatDiagnosticsHost} */
|
||||
const diagHost = {
|
||||
getCanonicalFileName(f) { return f; },
|
||||
getCurrentDirectory() { return "."; },
|
||||
getNewLine() { return "\r\n"; }
|
||||
|
@ -151,12 +207,15 @@ function writeProtocolFile(outputFile: string, protocolTs: string, typeScriptSer
|
|||
const protocolFileName = "protocol.d.ts";
|
||||
/**
|
||||
* Second pass - generate a program from protocol.d.ts and typescriptservices.d.ts, then augment core protocol.d.ts with extra types from typescriptservices.d.ts
|
||||
* @param {string} protocolDts
|
||||
* @param {boolean} includeTypeScriptServices
|
||||
*/
|
||||
function getProgramWithProtocolText(protocolDts: string, includeTypeScriptServices: boolean) {
|
||||
function getProgramWithProtocolText(protocolDts, includeTypeScriptServices) {
|
||||
const host = ts.createCompilerHost(options);
|
||||
const originalGetSourceFile = host.getSourceFile;
|
||||
host.getSourceFile = (fileName) => {
|
||||
if (fileName === protocolFileName) {
|
||||
assert(options.target !== undefined);
|
||||
return ts.createSourceFile(fileName, protocolDts, options.target);
|
||||
}
|
||||
return originalGetSourceFile.apply(host, [fileName, ts.ScriptTarget.Latest]);
|
||||
|
@ -168,7 +227,8 @@ function writeProtocolFile(outputFile: string, protocolTs: string, typeScriptSer
|
|||
let protocolDts = getInitialDtsFileForProtocol();
|
||||
const program = getProgramWithProtocolText(protocolDts, /*includeTypeScriptServices*/ true);
|
||||
|
||||
const protocolFile = program.getSourceFile("protocol.d.ts")!;
|
||||
const protocolFile = program.getSourceFile("protocol.d.ts");
|
||||
assert(protocolFile);
|
||||
const extraDeclarations = DeclarationsWalker.getExtraDeclarations(program.getTypeChecker(), protocolFile);
|
||||
if (extraDeclarations) {
|
||||
protocolDts += extraDeclarations;
|
|
@ -1,18 +1,20 @@
|
|||
/// <reference types="node"/>
|
||||
import { normalize, relative } from "path";
|
||||
import * as assert from "assert";
|
||||
import assert from "assert";
|
||||
import { readFileSync, writeFileSync } from "fs";
|
||||
import url from "url";
|
||||
|
||||
const __filename = url.fileURLToPath(new URL(import.meta.url));
|
||||
|
||||
/**
|
||||
* A minimal description for a parsed package.json object.
|
||||
*/
|
||||
interface PackageJson {
|
||||
* @typedef {{
|
||||
name: string;
|
||||
version: string;
|
||||
keywords: string[];
|
||||
}
|
||||
}} PackageJson
|
||||
*/
|
||||
|
||||
function main(): void {
|
||||
function main() {
|
||||
const args = process.argv.slice(2);
|
||||
if (args.length < 3) {
|
||||
const thisProgramName = relative(process.cwd(), __filename);
|
||||
|
@ -28,7 +30,8 @@ function main(): void {
|
|||
|
||||
// Acquire the version from the package.json file and modify it appropriately.
|
||||
const packageJsonFilePath = normalize(args[1]);
|
||||
const packageJsonValue: PackageJson = JSON.parse(readFileSync(packageJsonFilePath).toString());
|
||||
/** @type {PackageJson} */
|
||||
const packageJsonValue = JSON.parse(readFileSync(packageJsonFilePath).toString());
|
||||
|
||||
const { majorMinor, patch } = parsePackageJsonVersion(packageJsonValue.version);
|
||||
const prereleasePatch = getPrereleasePatch(tag, patch);
|
||||
|
@ -53,17 +56,25 @@ function main(): void {
|
|||
}
|
||||
|
||||
/* eslint-disable no-null/no-null */
|
||||
function updateTsFile(tsFilePath: string, tsFileContents: string, majorMinor: string, patch: string, nightlyPatch: string): string {
|
||||
/**
|
||||
* @param {string} tsFilePath
|
||||
* @param {string} tsFileContents
|
||||
* @param {string} majorMinor
|
||||
* @param {string} patch
|
||||
* @param {string} nightlyPatch
|
||||
* @returns {string}
|
||||
*/
|
||||
function updateTsFile(tsFilePath, tsFileContents, majorMinor, patch, nightlyPatch) {
|
||||
const majorMinorRgx = /export const versionMajorMinor = "(\d+\.\d+)"/;
|
||||
const majorMinorMatch = majorMinorRgx.exec(tsFileContents);
|
||||
assert(majorMinorMatch !== null, `The file '${tsFilePath}' seems to no longer have a string matching '${majorMinorRgx}'.`);
|
||||
const parsedMajorMinor = majorMinorMatch![1];
|
||||
const parsedMajorMinor = majorMinorMatch[1];
|
||||
assert(parsedMajorMinor === majorMinor, `versionMajorMinor does not match. ${tsFilePath}: '${parsedMajorMinor}'; package.json: '${majorMinor}'`);
|
||||
|
||||
const versionRgx = /export const version(?:: string)? = `\$\{versionMajorMinor\}\.(\d)(-\w+)?`;/;
|
||||
const patchMatch = versionRgx.exec(tsFileContents);
|
||||
assert(patchMatch !== null, `The file '${tsFilePath}' seems to no longer have a string matching '${versionRgx.toString()}'.`);
|
||||
const parsedPatch = patchMatch![1];
|
||||
const parsedPatch = patchMatch[1];
|
||||
if (parsedPatch !== patch) {
|
||||
throw new Error(`patch does not match. ${tsFilePath}: '${parsedPatch}; package.json: '${patch}'`);
|
||||
}
|
||||
|
@ -71,16 +82,25 @@ function updateTsFile(tsFilePath: string, tsFileContents: string, majorMinor: st
|
|||
return tsFileContents.replace(versionRgx, `export const version: string = \`\${versionMajorMinor}.${nightlyPatch}\`;`);
|
||||
}
|
||||
|
||||
function parsePackageJsonVersion(versionString: string): { majorMinor: string, patch: string } {
|
||||
/**
|
||||
* @param {string} versionString
|
||||
* @returns {{ majorMinor: string, patch: string }}
|
||||
*/
|
||||
function parsePackageJsonVersion(versionString) {
|
||||
const versionRgx = /(\d+\.\d+)\.(\d+)($|\-)/;
|
||||
const match = versionString.match(versionRgx);
|
||||
assert(match !== null, "package.json 'version' should match " + versionRgx.toString());
|
||||
return { majorMinor: match![1], patch: match![2] };
|
||||
return { majorMinor: match[1], patch: match[2] };
|
||||
}
|
||||
/* eslint-enable no-null/no-null */
|
||||
|
||||
/** e.g. 0-dev.20170707 */
|
||||
function getPrereleasePatch(tag: string, plainPatch: string): string {
|
||||
/**
|
||||
* e.g. 0-dev.20170707
|
||||
* @param {string} tag
|
||||
* @param {string} plainPatch
|
||||
* @returns {string}
|
||||
*/
|
||||
function getPrereleasePatch(tag, plainPatch) {
|
||||
// We're going to append a representation of the current time at the end of the current version.
|
||||
// String.prototype.toISOString() returns a 24-character string formatted as 'YYYY-MM-DDTHH:mm:ss.sssZ',
|
||||
// but we'd prefer to just remove separators and limit ourselves to YYYYMMDD.
|
|
@ -1,8 +1,7 @@
|
|||
/* eslint-disable */
|
||||
// @ts-check
|
||||
|
||||
/** Run via:
|
||||
node scripts/createPlaygroundBuild.js
|
||||
node scripts/createPlaygroundBuild.mjs
|
||||
*/
|
||||
|
||||
// This script does two things:
|
||||
|
@ -20,12 +19,13 @@
|
|||
* Licensed under the MIT License. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
|
||||
const path = require('path');
|
||||
const fs = require('fs');
|
||||
const child_process = require('child_process');
|
||||
const http = require('http');
|
||||
const url = require('url');
|
||||
const nodeFetch = require("node-fetch").default
|
||||
import path from 'path';
|
||||
import fs from 'fs';
|
||||
import child_process from 'child_process';
|
||||
import http from 'http';
|
||||
import url from 'url';
|
||||
import nodeFetch from "node-fetch";
|
||||
import assert from 'assert';
|
||||
|
||||
function updateTSDist() {
|
||||
// This code is a direct port of a script from monaco-typescript
|
||||
|
@ -169,6 +169,9 @@ function updateTSDist() {
|
|||
})();
|
||||
|
||||
function importLibs() {
|
||||
/**
|
||||
* @param {string} name
|
||||
*/
|
||||
function readLibFile(name) {
|
||||
const srcPath = path.join(TYPESCRIPT_LIB_SOURCE, name);
|
||||
return fs.readFileSync(srcPath).toString();
|
||||
|
@ -193,6 +196,7 @@ function updateTSDist() {
|
|||
const dtsFiles = fs.readdirSync(TYPESCRIPT_LIB_SOURCE).filter((f) => f.includes('lib.'));
|
||||
while (dtsFiles.length > 0) {
|
||||
const name = dtsFiles.shift();
|
||||
assert(name !== undefined);
|
||||
const output = readLibFile(name).replace(/\r\n/g, '\n');
|
||||
strLibResult += `libFileMap['${name}'] = "${escapeText(output)}";\n`;
|
||||
strIndexResult += `libFileSet['${name}'] = true;\n`;
|
||||
|
@ -204,6 +208,7 @@ function updateTSDist() {
|
|||
|
||||
/**
|
||||
* Escape text such that it can be used in a javascript string enclosed by double quotes (")
|
||||
* @param {string} text
|
||||
*/
|
||||
function escapeText(text) {
|
||||
// See http://www.javascriptkit.com/jsref/escapesequence.shtml
|
||||
|
@ -265,6 +270,9 @@ function updateTSDist() {
|
|||
return resultPieces.join('');
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {string} str
|
||||
*/
|
||||
function stripSourceMaps(str) {
|
||||
return str.replace(/\/\/# sourceMappingURL[^\n]+/gm, '');
|
||||
}
|
||||
|
@ -283,14 +291,15 @@ fs.watchFile(services, () =>{
|
|||
// closest version to your dev build
|
||||
let latestStable = "4.3.2"
|
||||
nodeFetch('https://typescript.azureedge.net/indexes/releases.json').then(req => req.json()).then(releases => {
|
||||
latestStable = releases.versions.pop()
|
||||
latestStable = /** @type {any} */ (releases).versions.pop()
|
||||
});
|
||||
|
||||
http.createServer(function (req, res) {
|
||||
res.setHeader("Access-Control-Allow-Origin", "*")
|
||||
|
||||
assert(req.url);
|
||||
const incoming = url.parse(req.url)
|
||||
if (incoming.path.endsWith("typescriptServices.js")) {
|
||||
if (incoming.path && incoming.path.endsWith("typescriptServices.js")) {
|
||||
// Use the built version
|
||||
res.writeHead(200, {"Content-Type": "text/javascript"});
|
||||
const amdPath = path.join(__dirname, '../internal/lib/typescriptServices-amd.js');
|
|
@ -1,7 +1,7 @@
|
|||
import * as fs from "fs";
|
||||
import * as fsPromises from "fs/promises";
|
||||
import * as _glob from "glob";
|
||||
import * as util from "util";
|
||||
import fs from "fs";
|
||||
import fsPromises from "fs/promises";
|
||||
import _glob from "glob";
|
||||
import util from "util";
|
||||
|
||||
const glob = util.promisify(_glob);
|
||||
|
||||
|
@ -24,7 +24,7 @@ async function checkErrorBaselines() {
|
|||
fs.readFile(baseDir + f, "utf-8", (err, baseline) => {
|
||||
if (err) throw err;
|
||||
|
||||
let g: RegExpExecArray | null;
|
||||
let g;
|
||||
while (g = errRegex.exec(baseline)) {
|
||||
const errCode = +g[1];
|
||||
const msg = keys.filter(k => messages[k].code === errCode)[0];
|
||||
|
@ -48,8 +48,8 @@ async function checkSourceFiles() {
|
|||
const data = await fsPromises.readFile("src/compiler/diagnosticInformationMap.generated.ts", "utf-8");
|
||||
|
||||
const errorRegexp = /\s(\w+): \{ code/g;
|
||||
const errorNames: string[] = [];
|
||||
let errMatch: RegExpExecArray | null;
|
||||
const errorNames = [];
|
||||
let errMatch;
|
||||
while (errMatch = errorRegexp.exec(data)) {
|
||||
errorNames.push(errMatch[1]);
|
||||
}
|
|
@ -1,5 +1,5 @@
|
|||
const { AST_NODE_TYPES, TSESTree } = require("@typescript-eslint/utils");
|
||||
const { createRule } = require("./utils");
|
||||
const { createRule } = require("./utils.cjs");
|
||||
|
||||
module.exports = createRule({
|
||||
name: "boolean-trivia",
|
|
@ -1,5 +1,5 @@
|
|||
const { AST_NODE_TYPES, TSESTree } = require("@typescript-eslint/utils");
|
||||
const { createRule } = require("./utils");
|
||||
const { createRule } = require("./utils.cjs");
|
||||
|
||||
module.exports = createRule({
|
||||
name: "debug-assert",
|
|
@ -1,5 +1,5 @@
|
|||
const { TSESTree, AST_NODE_TYPES } = require("@typescript-eslint/utils");
|
||||
const { createRule } = require("./utils");
|
||||
const { createRule } = require("./utils.cjs");
|
||||
|
||||
module.exports = createRule({
|
||||
name: "no-double-space",
|
|
@ -1,5 +1,5 @@
|
|||
const { TSESTree } = require("@typescript-eslint/utils");
|
||||
const { createRule } = require("./utils");
|
||||
const { createRule } = require("./utils.cjs");
|
||||
|
||||
module.exports = createRule({
|
||||
name: "no-in-operator",
|
|
@ -1,5 +1,5 @@
|
|||
const { TSESTree, AST_NODE_TYPES } = require("@typescript-eslint/utils");
|
||||
const { createRule } = require("./utils");
|
||||
const { createRule } = require("./utils.cjs");
|
||||
|
||||
module.exports = createRule({
|
||||
name: "no-keywords",
|
|
@ -1,5 +1,5 @@
|
|||
const { TSESTree } = require("@typescript-eslint/utils");
|
||||
const { createRule } = require("./utils");
|
||||
const { createRule } = require("./utils.cjs");
|
||||
|
||||
module.exports = createRule({
|
||||
name: "no-type-assertion-whitespace",
|
|
@ -1,5 +1,5 @@
|
|||
const { TSESTree } = require("@typescript-eslint/utils");
|
||||
const { createRule } = require("./utils");
|
||||
const { createRule } = require("./utils.cjs");
|
||||
|
||||
module.exports = createRule({
|
||||
name: "object-literal-surrounding-space",
|
|
@ -1,5 +1,5 @@
|
|||
const { AST_NODE_TYPES, TSESTree } = require("@typescript-eslint/utils");
|
||||
const { createRule } = require("./utils");
|
||||
const { createRule } = require("./utils.cjs");
|
||||
|
||||
module.exports = createRule({
|
||||
name: "one-namespace-per-file",
|
|
@ -1,5 +1,5 @@
|
|||
const { AST_NODE_TYPES, TSESTree } = require("@typescript-eslint/utils");
|
||||
const { createRule } = require("./utils");
|
||||
const { createRule } = require("./utils.cjs");
|
||||
|
||||
module.exports = createRule({
|
||||
name: "only-arrow-functions",
|
|
@ -1,5 +1,5 @@
|
|||
const { TSESTree } = require("@typescript-eslint/utils");
|
||||
const { createRule } = require("./utils");
|
||||
const { createRule } = require("./utils.cjs");
|
||||
|
||||
module.exports = createRule({
|
||||
name: "simple-indent",
|
|
@ -1,5 +1,5 @@
|
|||
const { TSESTree, AST_TOKEN_TYPES } = require("@typescript-eslint/utils");
|
||||
const { createRule } = require("./utils");
|
||||
const { createRule } = require("./utils.cjs");
|
||||
|
||||
module.exports = createRule({
|
||||
name: "type-operator-spacing",
|
|
@ -1,5 +1,5 @@
|
|||
import { RuleTester } from "./support/RuleTester";
|
||||
import rule = require("../rules/boolean-trivia");
|
||||
const { RuleTester } = require("./support/RuleTester.cjs");
|
||||
const rule = require("../rules/boolean-trivia.cjs");
|
||||
|
||||
const ruleTester = new RuleTester({
|
||||
parserOptions: {
|
|
@ -1,5 +1,5 @@
|
|||
import { RuleTester } from "./support/RuleTester";
|
||||
import rule = require("../rules/debug-assert");
|
||||
const { RuleTester } = require("./support/RuleTester.cjs");
|
||||
const rule = require("../rules/debug-assert.cjs");
|
||||
|
||||
const ruleTester = new RuleTester({
|
||||
parserOptions: {
|
|
@ -1,5 +1,5 @@
|
|||
import { RuleTester } from "./support/RuleTester";
|
||||
import rule = require("../rules/no-double-space");
|
||||
const { RuleTester } = require("./support/RuleTester.cjs");
|
||||
const rule = require("../rules/no-double-space.cjs");
|
||||
|
||||
const ruleTester = new RuleTester({
|
||||
parser: require.resolve("@typescript-eslint/parser"),
|
|
@ -1,5 +1,5 @@
|
|||
import { RuleTester } from "./support/RuleTester";
|
||||
import rule = require("../rules/no-in-operator");
|
||||
const { RuleTester } = require("./support/RuleTester.cjs");
|
||||
const rule = require("../rules/no-in-operator.cjs");
|
||||
|
||||
const ruleTester = new RuleTester({
|
||||
parserOptions: {
|
|
@ -1,5 +1,5 @@
|
|||
import { RuleTester } from "./support/RuleTester";
|
||||
import rule = require("../rules/no-keywords");
|
||||
const { RuleTester } = require("./support/RuleTester.cjs");
|
||||
const rule = require("../rules/no-keywords.cjs");
|
||||
|
||||
const ruleTester = new RuleTester({
|
||||
parserOptions: {
|
|
@ -1,5 +1,5 @@
|
|||
import { RuleTester } from "./support/RuleTester";
|
||||
import rule = require("../rules/no-type-assertion-whitespace");
|
||||
const { RuleTester } = require("./support/RuleTester.cjs");
|
||||
const rule = require("../rules/no-type-assertion-whitespace.cjs");
|
||||
|
||||
const ruleTester = new RuleTester({
|
||||
parserOptions: {
|
|
@ -1,5 +1,5 @@
|
|||
import { RuleTester } from "./support/RuleTester";
|
||||
import rule = require("../rules/object-literal-surrounding-space");
|
||||
const { RuleTester } = require("./support/RuleTester.cjs");
|
||||
const rule = require("../rules/object-literal-surrounding-space.cjs");
|
||||
|
||||
const ruleTester = new RuleTester({
|
||||
parserOptions: {
|
|
@ -1,5 +1,5 @@
|
|||
import { RuleTester } from "./support/RuleTester";
|
||||
import rule = require("../rules/only-arrow-functions");
|
||||
const { RuleTester } = require("./support/RuleTester.cjs");
|
||||
const rule = require("../rules/only-arrow-functions.cjs");
|
||||
|
||||
const ruleTester = new RuleTester({
|
||||
parserOptions: {
|
|
@ -1,5 +1,5 @@
|
|||
import { RuleTester } from "./support/RuleTester";
|
||||
import rule = require("../rules/simple-indent");
|
||||
const { RuleTester } = require("./support/RuleTester.cjs");
|
||||
const rule = require("../rules/simple-indent.cjs");
|
||||
|
||||
const ruleTester = new RuleTester({
|
||||
parserOptions: {
|
|
@ -0,0 +1,6 @@
|
|||
const path = require("path");
|
||||
const { TSESLint } = require("@typescript-eslint/utils");
|
||||
|
||||
module.exports.ROOT_DIR = path.join(process.cwd(), "scripts", "eslint", "tests", "fixtures");
|
||||
module.exports.FILENAME = path.join(module.exports.ROOT_DIR, "file.ts");
|
||||
module.exports.RuleTester = TSESLint.RuleTester;
|
|
@ -1,6 +0,0 @@
|
|||
import * as path from "path";
|
||||
import { TSESLint } from "@typescript-eslint/utils";
|
||||
|
||||
export const ROOT_DIR = path.join(process.cwd(), "scripts", "eslint", "tests", "fixtures");
|
||||
export const FILENAME = path.join(ROOT_DIR, "file.ts");
|
||||
export const RuleTester = TSESLint.RuleTester;
|
|
@ -1,5 +1,5 @@
|
|||
import { RuleTester } from "./support/RuleTester";
|
||||
import rule = require("../rules/type-operator-spacing");
|
||||
const { RuleTester } = require("./support/RuleTester.cjs");
|
||||
const rule = require("../rules/type-operator-spacing.cjs");
|
||||
|
||||
const ruleTester = new RuleTester({
|
||||
parserOptions: {
|
|
@ -1,30 +0,0 @@
|
|||
{
|
||||
"compilerOptions": {
|
||||
"lib": [
|
||||
"es6"
|
||||
],
|
||||
"module": "commonjs",
|
||||
"target": "es6",
|
||||
"outDir": "./built",
|
||||
"declaration": false,
|
||||
|
||||
"strict": true,
|
||||
"esModuleInterop": true,
|
||||
"skipLibCheck": true,
|
||||
"forceConsistentCasingInFileNames": true,
|
||||
"moduleResolution": "node",
|
||||
"allowUnusedLabels": false,
|
||||
"noImplicitOverride": true,
|
||||
"noImplicitReturns": true,
|
||||
"noPropertyAccessFromIndexSignature": true,
|
||||
"noUnusedLocals": true,
|
||||
"noUnusedParameters": true,
|
||||
"allowJs": true,
|
||||
"checkJs": true
|
||||
},
|
||||
|
||||
"include": [
|
||||
"rules",
|
||||
"tests"
|
||||
]
|
||||
}
|
|
@ -1,15 +1,19 @@
|
|||
// @ts-check
|
||||
const assert = require("assert");
|
||||
const Mocha = require("mocha");
|
||||
const path = require("path");
|
||||
const fs = require("fs");
|
||||
const os = require("os");
|
||||
|
||||
const failingHookRegExp = /^(.*) "(before|after) (all|each)" hook$/;
|
||||
/** @typedef {{
|
||||
file?: string;
|
||||
keepFailed?: boolean;
|
||||
reporter?: Mocha.ReporterConstructor | keyof Mocha.reporters;
|
||||
reporterOptions?: any; // TODO(jakebailey): what?
|
||||
}} ReporterOptions */
|
||||
|
||||
/**
|
||||
* .failed-tests reporter
|
||||
*
|
||||
* @typedef {Object} ReporterOptions
|
||||
* @property {string} [file]
|
||||
* @property {boolean} [keepFailed]
|
||||
* @property {string|Mocha.ReporterConstructor} [reporter]
|
||||
|
@ -24,7 +28,7 @@ class FailedTestsReporter extends Mocha.reporters.Base {
|
|||
super(runner, options);
|
||||
if (!runner) return;
|
||||
|
||||
const reporterOptions = this.reporterOptions = options.reporterOptions || {};
|
||||
const reporterOptions = this.reporterOptions = options?.reporterOptions || {};
|
||||
if (reporterOptions.file === undefined) reporterOptions.file = ".failed-tests";
|
||||
if (reporterOptions.keepFailed === undefined) reporterOptions.keepFailed = false;
|
||||
if (reporterOptions.reporter) {
|
||||
|
@ -67,7 +71,7 @@ class FailedTestsReporter extends Mocha.reporters.Base {
|
|||
* @param {ReadonlyArray<Mocha.Test>} passes
|
||||
* @param {ReadonlyArray<Mocha.Test | Mocha.Hook>} failures
|
||||
* @param {boolean} keepFailed
|
||||
* @param {(err?: NodeJS.ErrnoException) => void} done
|
||||
* @param {(err?: NodeJS.ErrnoException | null) => void} done
|
||||
*/
|
||||
static writeFailures(file, passes, failures, keepFailed, done) {
|
||||
const failingTests = new Set(fs.existsSync(file) ? readTests() : undefined);
|
||||
|
@ -78,6 +82,7 @@ class FailedTestsReporter extends Mocha.reporters.Base {
|
|||
if (failingTests.size > 0 && !keepFailed) {
|
||||
for (const test of passes) {
|
||||
failingTests.delete(test.fullTitle().trim());
|
||||
assert(test.parent);
|
||||
possiblyPassingSuites.add(test.parent.fullTitle().trim());
|
||||
}
|
||||
}
|
||||
|
@ -86,6 +91,7 @@ class FailedTestsReporter extends Mocha.reporters.Base {
|
|||
// containing suite as failing. If the suite for a test or hook was
|
||||
// possibly passing then it is now definitely failing.
|
||||
for (const test of failures) {
|
||||
assert(test.parent);
|
||||
const suiteTitle = test.parent.fullTitle().trim();
|
||||
if (test.type === "test") {
|
||||
failingTests.add(test.fullTitle().trim());
|
||||
|
@ -126,8 +132,11 @@ class FailedTestsReporter extends Mocha.reporters.Base {
|
|||
/**
|
||||
* @param {number} failures
|
||||
* @param {(failures: number) => void} [fn]
|
||||
* @override
|
||||
*/
|
||||
done(failures, fn) {
|
||||
assert(this.reporterOptions);
|
||||
assert(this.reporterOptions.file);
|
||||
FailedTestsReporter.writeFailures(this.reporterOptions.file, this.passes, this.failures, this.reporterOptions.keepFailed || this.stats.tests === 0, (err) => {
|
||||
const reporter = this.reporter;
|
||||
if (reporter && reporter.done) {
|
|
@ -0,0 +1,52 @@
|
|||
export = FailedTestsReporter;
|
||||
/** @typedef {{
|
||||
file?: string;
|
||||
keepFailed?: boolean;
|
||||
reporter?: Mocha.ReporterConstructor | keyof Mocha.reporters;
|
||||
reporterOptions?: any; // TODO(jakebailey): what?
|
||||
}} ReporterOptions */
|
||||
/**
|
||||
* .failed-tests reporter
|
||||
*
|
||||
* @property {string} [file]
|
||||
* @property {boolean} [keepFailed]
|
||||
* @property {string|Mocha.ReporterConstructor} [reporter]
|
||||
* @property {*} [reporterOptions]
|
||||
*/
|
||||
declare class FailedTestsReporter extends Mocha.reporters.Base {
|
||||
/**
|
||||
* @param {string} file
|
||||
* @param {ReadonlyArray<Mocha.Test>} passes
|
||||
* @param {ReadonlyArray<Mocha.Test | Mocha.Hook>} failures
|
||||
* @param {boolean} keepFailed
|
||||
* @param {(err?: NodeJS.ErrnoException | null) => void} done
|
||||
*/
|
||||
static writeFailures(file: string, passes: ReadonlyArray<Mocha.Test>, failures: ReadonlyArray<Mocha.Test | Mocha.Hook>, keepFailed: boolean, done: (err?: NodeJS.ErrnoException | null) => void): void;
|
||||
/**
|
||||
* @param {Mocha.Runner} runner
|
||||
* @param {{ reporterOptions?: ReporterOptions }} [options]
|
||||
*/
|
||||
constructor(runner: Mocha.Runner, options?: {
|
||||
reporterOptions?: ReporterOptions | undefined;
|
||||
} | undefined);
|
||||
reporterOptions: ReporterOptions | undefined;
|
||||
reporter: Mocha.reporters.Base | undefined;
|
||||
/** @type {Mocha.Test[]} */
|
||||
passes: Mocha.Test[];
|
||||
/**
|
||||
* @param {number} failures
|
||||
* @param {(failures: number) => void} [fn]
|
||||
* @override
|
||||
*/
|
||||
override done(failures: number, fn?: ((failures: number) => void) | undefined): void;
|
||||
}
|
||||
declare namespace FailedTestsReporter {
|
||||
export { ReporterOptions };
|
||||
}
|
||||
import Mocha = require("mocha");
|
||||
type ReporterOptions = {
|
||||
file?: string;
|
||||
keepFailed?: boolean;
|
||||
reporter?: Mocha.ReporterConstructor | keyof typeof Mocha.reporters;
|
||||
reporterOptions?: any;
|
||||
};
|
|
@ -1,22 +0,0 @@
|
|||
import * as Mocha from "mocha";
|
||||
|
||||
export = FailedTestsReporter;
|
||||
|
||||
declare class FailedTestsReporter extends Mocha.reporters.Base {
|
||||
passes: Mocha.Test[];
|
||||
failures: Mocha.Test[];
|
||||
reporterOptions: FailedTestsReporter.ReporterOptions;
|
||||
reporter?: Mocha.reporters.Base;
|
||||
constructor(runner: Mocha.Runner, options?: { reporterOptions?: FailedTestsReporter.ReporterOptions });
|
||||
static writeFailures(file: string, passes: readonly Mocha.Test[], failures: readonly Mocha.Test[], keepFailed: boolean, done: (err?: NodeJS.ErrnoException) => void): void;
|
||||
done(failures: number, fn?: (failures: number) => void): void;
|
||||
}
|
||||
|
||||
declare namespace FailedTestsReporter {
|
||||
interface ReporterOptions {
|
||||
file?: string;
|
||||
keepFailed?: boolean;
|
||||
reporter?: string | Mocha.ReporterConstructor;
|
||||
reporterOptions?: any;
|
||||
}
|
||||
}
|
|
@ -1,14 +1,14 @@
|
|||
// @ts-check
|
||||
// This file requires a modern version of node 14+, and grep to be available.
|
||||
|
||||
// node scripts/find-unused-diagnostic-messages.mjs
|
||||
import { readFileSync } from "fs";
|
||||
import {EOL} from "os";
|
||||
import { EOL } from "os";
|
||||
import { execSync } from "child_process";
|
||||
|
||||
const diags = readFileSync("src/compiler/diagnosticInformationMap.generated.ts", "utf8");
|
||||
const startOfDiags = diags.split("export const Diagnostics")[1];
|
||||
|
||||
/** @type {string[]} */
|
||||
const missingNames = [];
|
||||
startOfDiags.split(EOL).forEach(line => {
|
||||
if (!line.includes(":")) return;
|
||||
|
|
|
@ -1,8 +1,8 @@
|
|||
import * as fs from "fs";
|
||||
import * as path from "path";
|
||||
import * as xml2js from "xml2js";
|
||||
import fs from "fs";
|
||||
import path from "path";
|
||||
import xml2js from "xml2js";
|
||||
|
||||
function main(): void {
|
||||
function main() {
|
||||
const args = process.argv.slice(2);
|
||||
if (args.length !== 3) {
|
||||
console.log("Usage:");
|
||||
|
@ -25,7 +25,10 @@ function main(): void {
|
|||
|
||||
return;
|
||||
|
||||
function visitDirectory(name: string) {
|
||||
/**
|
||||
* @param {string} name
|
||||
*/
|
||||
function visitDirectory(name) {
|
||||
const inputFilePath = path.join(inputPath, name, "diagnosticMessages", "diagnosticMessages.generated.json.lcl");
|
||||
|
||||
fs.readFile(inputFilePath, (err, data) => {
|
||||
|
@ -63,8 +66,10 @@ function main(): void {
|
|||
*
|
||||
* Most of the languages we support are neutral locales, so we want to use the language name.
|
||||
* There are three exceptions, zh-CN, zh-TW and pt-BR.
|
||||
*
|
||||
* @param {string} localeName
|
||||
*/
|
||||
function getPreferredLocaleName(localeName: string) {
|
||||
function getPreferredLocaleName(localeName) {
|
||||
switch (localeName) {
|
||||
case "zh-CN":
|
||||
case "zh-TW":
|
||||
|
@ -75,15 +80,22 @@ function main(): void {
|
|||
}
|
||||
}
|
||||
|
||||
function handleError(err: null | object) {
|
||||
/**
|
||||
* @param {null | object} err
|
||||
*/
|
||||
function handleError(err) {
|
||||
if (err) {
|
||||
console.error(err);
|
||||
process.exit(1);
|
||||
}
|
||||
}
|
||||
|
||||
function xmlObjectToString(o: any) {
|
||||
const out: any = {};
|
||||
/**
|
||||
* @param {any} o
|
||||
*/
|
||||
function xmlObjectToString(o) {
|
||||
/** @type {any} */
|
||||
const out = {};
|
||||
for (const item of o.LCX.Item[0].Item[0].Item) {
|
||||
let ItemId = item.$.ItemId;
|
||||
let val = item.Str[0].Tgt ? item.Str[0].Tgt[0].Val[0] : item.Str[0].Val[0];
|
||||
|
@ -104,7 +116,11 @@ function main(): void {
|
|||
}
|
||||
|
||||
|
||||
function ensureDirectoryExists(directoryPath: string, action: () => void) {
|
||||
/**
|
||||
* @param {string} directoryPath
|
||||
* @param {() => void} action
|
||||
*/
|
||||
function ensureDirectoryExists(directoryPath, action) {
|
||||
fs.exists(directoryPath, exists => {
|
||||
if (!exists) {
|
||||
const basePath = path.dirname(directoryPath);
|
||||
|
@ -116,14 +132,21 @@ function main(): void {
|
|||
});
|
||||
}
|
||||
|
||||
function writeFile(fileName: string, contents: string) {
|
||||
/**
|
||||
* @param {string} fileName
|
||||
* @param {string} contents
|
||||
*/
|
||||
function writeFile(fileName, contents) {
|
||||
ensureDirectoryExists(path.dirname(fileName), () => {
|
||||
fs.writeFile(fileName, contents, handleError);
|
||||
});
|
||||
}
|
||||
|
||||
function objectToList(o: Record<string, string>) {
|
||||
const list: { key: string, value: string }[] = [];
|
||||
/**
|
||||
* @param {Record<string, string>} o
|
||||
*/
|
||||
function objectToList(o) {
|
||||
const list = [];
|
||||
for (const key in o) {
|
||||
list.push({ key, value: o[key] });
|
||||
}
|
||||
|
@ -142,7 +165,11 @@ function main(): void {
|
|||
));
|
||||
});
|
||||
|
||||
function getItemXML(key: string, value: string) {
|
||||
/**
|
||||
* @param {string} key
|
||||
* @param {string} value
|
||||
*/
|
||||
function getItemXML(key, value) {
|
||||
// escape entrt value
|
||||
value = value.replace(/]/, "]5D;");
|
||||
|
||||
|
@ -155,7 +182,10 @@ function main(): void {
|
|||
</Item>`;
|
||||
}
|
||||
|
||||
function getLCGFileXML(items: string) {
|
||||
/**
|
||||
* @param {string} items
|
||||
*/
|
||||
function getLCGFileXML(items) {
|
||||
return `<?xml version="1.0" encoding="utf-8"?>
|
||||
<LCX SchemaVersion="6.0" Name="diagnosticMessages.generated.json" PsrId="306" FileType="1" SrcCul="en-US" xmlns="http://schemas.microsoft.com/locstudio/2006/6/lcx">
|
||||
<OwnedComments>
|
|
@ -1,17 +1,11 @@
|
|||
import * as fs from "fs";
|
||||
import * as path from "path";
|
||||
import * as os from "os";
|
||||
import * as childProcess from "child_process";
|
||||
import url from "url";
|
||||
|
||||
|
||||
interface Map<T> {
|
||||
[key: string]: T;
|
||||
}
|
||||
|
||||
declare let process: {
|
||||
argv: string[];
|
||||
env: Map<string>;
|
||||
exit(exitCode?: number): void;
|
||||
};
|
||||
const __filename = url.fileURLToPath(new URL(import.meta.url));
|
||||
const __dirname = path.dirname(__filename);
|
||||
|
||||
main();
|
||||
function main() {
|
||||
|
@ -35,25 +29,41 @@ function main() {
|
|||
importDefinitelyTypedTests(tscPath, rwcTestPath, resolvedDefinitelyTypedRoot);
|
||||
}
|
||||
|
||||
function filePathEndsWith(path: string, endingString: string): boolean {
|
||||
/**
|
||||
* @param {string} path
|
||||
* @param {string} endingString
|
||||
* @returns {boolean}
|
||||
*/
|
||||
function filePathEndsWith(path, endingString) {
|
||||
const pathLen = path.length;
|
||||
const extLen = endingString.length;
|
||||
return pathLen > extLen && path.substr(pathLen - extLen, extLen).toLocaleLowerCase() === endingString.toLocaleLowerCase();
|
||||
}
|
||||
|
||||
function copyFileSync(source: string, destination: string) {
|
||||
/**
|
||||
* @param {string} source
|
||||
* @param {string} destination
|
||||
*/
|
||||
function copyFileSync(source, destination) {
|
||||
const text = fs.readFileSync(source);
|
||||
fs.writeFileSync(destination, text);
|
||||
}
|
||||
|
||||
function importDefinitelyTypedTest(tscPath: string, rwcTestPath: string, testCaseName: string, testFiles: string[], responseFile: string | undefined) {
|
||||
/**
|
||||
* @param {string} tscPath
|
||||
* @param {string} rwcTestPath
|
||||
* @param {string} testCaseName
|
||||
* @param {string[]} testFiles
|
||||
* @param {string | undefined} responseFile
|
||||
*/
|
||||
function importDefinitelyTypedTest(tscPath, rwcTestPath, testCaseName, testFiles, responseFile) {
|
||||
let cmd = "node " + tscPath + " --module commonjs " + testFiles.join(" ");
|
||||
if (responseFile) {
|
||||
cmd += " @" + responseFile;
|
||||
}
|
||||
|
||||
const testDirectoryName = testCaseName + "_" + Math.floor((Math.random() * 10000) + 1);
|
||||
const testDirectoryPath = path.join(process.env.temp, testDirectoryName);
|
||||
const testDirectoryPath = path.join(os.tmpdir(), testDirectoryName);
|
||||
if (fs.existsSync(testDirectoryPath)) {
|
||||
throw new Error("Could not create test directory");
|
||||
}
|
||||
|
@ -94,13 +104,18 @@ function importDefinitelyTypedTest(tscPath: string, rwcTestPath: string, testCas
|
|||
}
|
||||
//console.log("\r\n");
|
||||
|
||||
}).on("error", (error: any) => {
|
||||
}).on("error", (error) => {
|
||||
console.log("==> error " + JSON.stringify(error));
|
||||
console.log("\r\n");
|
||||
});
|
||||
}
|
||||
|
||||
function importDefinitelyTypedTests(tscPath: string, rwcTestPath: string, definitelyTypedRoot: string): void {
|
||||
/**
|
||||
* @param {string} tscPath
|
||||
* @param {string} rwcTestPath
|
||||
* @param {string} definitelyTypedRoot
|
||||
*/
|
||||
function importDefinitelyTypedTests(tscPath, rwcTestPath, definitelyTypedRoot) {
|
||||
fs.readdir(definitelyTypedRoot, (err, subDirectories) => {
|
||||
if (err) {
|
||||
throw err;
|
||||
|
@ -120,9 +135,12 @@ function importDefinitelyTypedTests(tscPath: string, rwcTestPath: string, defini
|
|||
throw err;
|
||||
}
|
||||
|
||||
const tsFiles: string[] = [];
|
||||
const testFiles: string[] = [];
|
||||
let paramFile: string | undefined;
|
||||
/** @type {string[]} */
|
||||
const tsFiles = [];
|
||||
/** @type {string[]} */
|
||||
const testFiles = [];
|
||||
/** @type {string | undefined} */
|
||||
let paramFile;
|
||||
|
||||
for (const filePath of files.map(f => path.join(directoryPath, f))) {
|
||||
if (filePathEndsWith(filePath, ".ts")) {
|
|
@ -1,18 +0,0 @@
|
|||
{
|
||||
"compilerOptions": {
|
||||
"module": "commonjs",
|
||||
"target": "ES5",
|
||||
"outDir": "./",
|
||||
"rootDir": ".",
|
||||
"newLine": "lf",
|
||||
"noImplicitAny": true,
|
||||
"strictNullChecks": true,
|
||||
"sourceMap": false
|
||||
},
|
||||
"files": [
|
||||
"../typings/node/node.d.ts",
|
||||
"importDefinitelyTypedTests.ts"
|
||||
],
|
||||
"exclude": [
|
||||
]
|
||||
}
|
|
@ -1,20 +0,0 @@
|
|||
var fs = require("fs");
|
||||
var path = require("path");
|
||||
|
||||
var hooks = [
|
||||
"post-checkout"
|
||||
];
|
||||
|
||||
hooks.forEach(function (hook) {
|
||||
var hookInSourceControl = path.resolve(__dirname, "hooks", hook);
|
||||
|
||||
if (fs.existsSync(hookInSourceControl)) {
|
||||
var hookInHiddenDirectory = path.resolve(__dirname, "..", ".git", "hooks", hook);
|
||||
|
||||
if (fs.existsSync(hookInHiddenDirectory)) {
|
||||
fs.unlinkSync(hookInHiddenDirectory);
|
||||
}
|
||||
|
||||
fs.linkSync(hookInSourceControl, hookInHiddenDirectory);
|
||||
}
|
||||
});
|
|
@ -0,0 +1,25 @@
|
|||
import fs from "fs";
|
||||
import path from "path";
|
||||
import url from "url";
|
||||
import { findUpRoot } from "./build/findUpDir.mjs";
|
||||
|
||||
const __filename = url.fileURLToPath(new URL(import.meta.url));
|
||||
const __dirname = path.dirname(__filename);
|
||||
|
||||
const hooks = [
|
||||
"post-checkout"
|
||||
];
|
||||
|
||||
hooks.forEach((hook) => {
|
||||
const hookInSourceControl = path.resolve(__dirname, "hooks", hook);
|
||||
|
||||
if (fs.existsSync(hookInSourceControl)) {
|
||||
const hookInHiddenDirectory = path.resolve(findUpRoot(), ".git", "hooks", hook);
|
||||
|
||||
if (fs.existsSync(hookInHiddenDirectory)) {
|
||||
fs.unlinkSync(hookInHiddenDirectory);
|
||||
}
|
||||
|
||||
fs.linkSync(hookInSourceControl, hookInHiddenDirectory);
|
||||
}
|
||||
});
|
|
@ -1,11 +1,11 @@
|
|||
/// <reference lib="esnext.asynciterable" />
|
||||
// Must reference esnext.asynciterable lib, since octokit uses AsyncIterable internally
|
||||
/// <reference types="node" />
|
||||
|
||||
import { Octokit } from "@octokit/rest";
|
||||
import { runSequence } from "./run-sequence";
|
||||
import * as fs from "fs";
|
||||
import * as path from "path";
|
||||
import { runSequence } from "./run-sequence.mjs";
|
||||
import fs from "fs";
|
||||
import path from "path";
|
||||
import url from "url";
|
||||
|
||||
const __filename = url.fileURLToPath(new URL(import.meta.url));
|
||||
const __dirname = path.dirname(__filename);
|
||||
|
||||
const userName = process.env.GH_USERNAME;
|
||||
const reviewers = process.env.REQUESTING_USER ? [process.env.REQUESTING_USER] : ["weswigham", "RyanCavanaugh"];
|
|
@ -1,12 +1,8 @@
|
|||
/// <reference lib="esnext.asynciterable" />
|
||||
/// <reference lib="es2015.promise" />
|
||||
// Must reference esnext.asynciterable lib, since octokit uses AsyncIterable internally
|
||||
import { Octokit } from "@octokit/rest";
|
||||
import { runSequence } from "./run-sequence";
|
||||
import { runSequence } from "./run-sequence.mjs";
|
||||
|
||||
const userName = process.env.GH_USERNAME || "typescript-bot";
|
||||
const reviewers = process.env.REQUESTING_USER ? [process.env.REQUESTING_USER] : ["weswigham", "sandersn", "RyanCavanaugh"];
|
||||
const now = new Date();
|
||||
const masterBranchname = `user-baseline-updates`;
|
||||
const targetBranch = process.env.TARGET_BRANCH || "main";
|
||||
const branchName = process.env.TARGET_FORK?.toLowerCase() === "microsoft" && (targetBranch === "main" || targetBranch === "refs/heads/main")
|
||||
|
@ -22,7 +18,7 @@ runSequence([
|
|||
["node", ["./node_modules/gulp/bin/gulp.js", "baseline-accept"]], // accept baselines
|
||||
["git", ["checkout", "-b", branchName]], // create a branch
|
||||
["git", ["add", "."]], // Add all changes
|
||||
["git", ["commit", "-m", `"Update user baselines${+process.env.SOURCE_ISSUE! === 33716 ? " +cc @sandersn" : ""}"`]], // Commit all changes (ping nathan if we would post to CI thread)
|
||||
["git", ["commit", "-m", `"Update user baselines${+(process.env.SOURCE_ISSUE ?? 0) === 33716 ? " +cc @sandersn" : ""}"`]], // Commit all changes (ping nathan if we would post to CI thread)
|
||||
["git", ["push", "--set-upstream", "fork", branchName, "-f"]] // push the branch
|
||||
]);
|
||||
|
|
@ -1,10 +1,8 @@
|
|||
// @ts-check
|
||||
/// <reference lib="esnext.asynciterable" />
|
||||
// Must reference esnext.asynciterable lib, since octokit uses AsyncIterable internally
|
||||
const { Octokit } = require("@octokit/rest");
|
||||
const fs = require("fs");
|
||||
const ado = require("azure-devops-node-api");
|
||||
const { default: fetch } = require("node-fetch");
|
||||
import { Octokit } from "@octokit/rest";
|
||||
import fs from "fs";
|
||||
import ado from "azure-devops-node-api";
|
||||
import fetch from "node-fetch";
|
||||
import assert from "assert";
|
||||
|
||||
|
||||
async function main() {
|
||||
|
@ -15,7 +13,7 @@ async function main() {
|
|||
if (!requester) throw new Error("REQUESTING_USER environment variable not set.");
|
||||
|
||||
const buildId = process.env.BUILD_BUILDID;
|
||||
if (!requester) throw new Error("BUILD_BUILDID environment variable not set.");
|
||||
if (!buildId) throw new Error("BUILD_BUILDID environment variable not set.");
|
||||
|
||||
const postedComment = process.env.STATUS_COMMENT;
|
||||
if (!postedComment) throw new Error("STATUS_COMMENT environment variable not set.");
|
||||
|
@ -36,10 +34,11 @@ async function main() {
|
|||
const cli = new ado.WebApi("https://typescript.visualstudio.com/defaultcollection", ado.getHandlerFromToken("")); // Empty token, anon auth
|
||||
const build = await cli.getBuildApi();
|
||||
const artifact = await build.getArtifact("typescript", +buildId, "benchmark");
|
||||
assert(artifact.resource?.url);
|
||||
const updatedUrl = new URL(artifact.resource.url);
|
||||
updatedUrl.search = `artifactName=benchmark&fileId=${artifact.resource.data}&fileName=manifest`;
|
||||
const resp = await (await fetch(`${updatedUrl}`)).json();
|
||||
for (const file of resp.items) {
|
||||
for (const file of /** @type {any} */ (resp).items) {
|
||||
if (/[\\/]linux\.benchmark$/.test(file.path)) {
|
||||
const benchmarkUrl = new URL(artifact.resource.url);
|
||||
benchmarkUrl.search = `artifactName=benchmark&fileId=${file.blob.id}&fileName=linux.benchmark`;
|
|
@ -1,9 +1,7 @@
|
|||
// @ts-check
|
||||
/// <reference lib="esnext.asynciterable" />
|
||||
// Must reference esnext.asynciterable lib, since octokit uses AsyncIterable internally
|
||||
const { Octokit } = require("@octokit/rest");
|
||||
const ado = require("azure-devops-node-api");
|
||||
const { default: fetch } = require("node-fetch");
|
||||
import { Octokit } from "@octokit/rest";
|
||||
import assert from "assert";
|
||||
import ado from "azure-devops-node-api";
|
||||
import fetch from "node-fetch";
|
||||
|
||||
async function main() {
|
||||
if (!process.env.SOURCE_ISSUE) {
|
||||
|
@ -16,10 +14,11 @@ async function main() {
|
|||
const cli = new ado.WebApi("https://typescript.visualstudio.com/defaultcollection", ado.getHandlerFromToken("")); // Empty token, anon auth
|
||||
const build = await cli.getBuildApi();
|
||||
const artifact = await build.getArtifact("typescript", +process.env.BUILD_BUILDID, "tgz");
|
||||
assert(artifact.resource?.url);
|
||||
const updatedUrl = new URL(artifact.resource.url);
|
||||
updatedUrl.search = `artifactName=tgz&fileId=${artifact.resource.data}&fileName=manifest`;
|
||||
const resp = await (await fetch(`${updatedUrl}`)).json();
|
||||
const file = resp.items[0];
|
||||
const file = /** @type {any} */ (resp).items[0];
|
||||
const tgzUrl = new URL(artifact.resource.url);
|
||||
tgzUrl.search = `artifactName=tgz&fileId=${file.blob.id}&fileName=${file.path}`;
|
||||
const link = "" + tgzUrl;
|
|
@ -1,25 +1,29 @@
|
|||
import * as path from "path";
|
||||
import * as fs from "fs";
|
||||
import path from "path";
|
||||
import fs from "fs";
|
||||
|
||||
interface DiagnosticDetails {
|
||||
/** @typedef {{
|
||||
category: string;
|
||||
code: number;
|
||||
reportsUnnecessary?: {};
|
||||
reportsDeprecated?: {};
|
||||
isEarly?: boolean;
|
||||
elidedInCompatabilityPyramid?: boolean;
|
||||
}
|
||||
}} DiagnosticDetails */
|
||||
|
||||
type InputDiagnosticMessageTable = Map<string, DiagnosticDetails>;
|
||||
/** @typedef {Map<string, DiagnosticDetails>} InputDiagnosticMessageTable */
|
||||
|
||||
function main(): void {
|
||||
function main() {
|
||||
if (process.argv.length < 3) {
|
||||
console.log("Usage:");
|
||||
console.log("\tnode processDiagnosticMessages.js <diagnostic-json-input-file>");
|
||||
console.log("\tnode processDiagnosticMessages.mjs <diagnostic-json-input-file>");
|
||||
return;
|
||||
}
|
||||
|
||||
function writeFile(fileName: string, contents: string) {
|
||||
/**
|
||||
* @param {string} fileName
|
||||
* @param {string} contents
|
||||
*/
|
||||
function writeFile(fileName, contents) {
|
||||
fs.writeFile(path.join(path.dirname(inputFilePath), fileName), contents, { encoding: "utf-8" }, err => {
|
||||
if (err) throw err;
|
||||
});
|
||||
|
@ -29,9 +33,11 @@ function main(): void {
|
|||
console.log(`Reading diagnostics from ${inputFilePath}`);
|
||||
const inputStr = fs.readFileSync(inputFilePath, { encoding: "utf-8" });
|
||||
|
||||
const diagnosticMessagesJson: { [key: string]: DiagnosticDetails } = JSON.parse(inputStr);
|
||||
/** @type {{ [key: string]: DiagnosticDetails }} */
|
||||
const diagnosticMessagesJson = JSON.parse(inputStr);
|
||||
|
||||
const diagnosticMessages: InputDiagnosticMessageTable = new Map();
|
||||
/** @type {InputDiagnosticMessageTable} */
|
||||
const diagnosticMessages = new Map();
|
||||
for (const key in diagnosticMessagesJson) {
|
||||
if (Object.hasOwnProperty.call(diagnosticMessagesJson, key)) {
|
||||
diagnosticMessages.set(key, diagnosticMessagesJson[key]);
|
||||
|
@ -49,8 +55,12 @@ function main(): void {
|
|||
writeFile("diagnosticMessages.generated.json", messageOutput);
|
||||
}
|
||||
|
||||
function checkForUniqueCodes(diagnosticTable: InputDiagnosticMessageTable) {
|
||||
const allCodes: { [key: number]: true | undefined } = [];
|
||||
/**
|
||||
* @param {InputDiagnosticMessageTable} diagnosticTable
|
||||
*/
|
||||
function checkForUniqueCodes(diagnosticTable) {
|
||||
/** @type {Record<number, true | undefined>} */
|
||||
const allCodes = [];
|
||||
diagnosticTable.forEach(({ code }) => {
|
||||
if (allCodes[code]) {
|
||||
throw new Error(`Diagnostic code ${code} appears more than once.`);
|
||||
|
@ -59,7 +69,13 @@ function checkForUniqueCodes(diagnosticTable: InputDiagnosticMessageTable) {
|
|||
});
|
||||
}
|
||||
|
||||
function buildInfoFileOutput(messageTable: InputDiagnosticMessageTable, inputFilePathRel: string, thisFilePathRel: string): string {
|
||||
/**
|
||||
* @param {InputDiagnosticMessageTable} messageTable
|
||||
* @param {string} inputFilePathRel
|
||||
* @param {string} thisFilePathRel
|
||||
* @returns {string}
|
||||
*/
|
||||
function buildInfoFileOutput(messageTable, inputFilePathRel, thisFilePathRel) {
|
||||
let result =
|
||||
"// <auto-generated />\r\n" +
|
||||
"// generated from '" + inputFilePathRel + "' in '" + thisFilePathRel.replace(/\\/g, "/") + "'\r\n" +
|
||||
|
@ -83,7 +99,11 @@ function buildInfoFileOutput(messageTable: InputDiagnosticMessageTable, inputFil
|
|||
return result;
|
||||
}
|
||||
|
||||
function buildDiagnosticMessageOutput(messageTable: InputDiagnosticMessageTable): string {
|
||||
/**
|
||||
* @param {InputDiagnosticMessageTable} messageTable
|
||||
* @returns {string}
|
||||
*/
|
||||
function buildDiagnosticMessageOutput(messageTable) {
|
||||
let result = "{";
|
||||
messageTable.forEach(({ code }, name) => {
|
||||
const propName = convertPropertyName(name);
|
||||
|
@ -99,11 +119,21 @@ function buildDiagnosticMessageOutput(messageTable: InputDiagnosticMessageTable)
|
|||
return result;
|
||||
}
|
||||
|
||||
function createKey(name: string, code: number): string {
|
||||
/**
|
||||
*
|
||||
* @param {string} name
|
||||
* @param {number} code
|
||||
* @returns {string}
|
||||
*/
|
||||
function createKey(name, code) {
|
||||
return name.slice(0, 100) + "_" + code;
|
||||
}
|
||||
|
||||
function convertPropertyName(origName: string): string {
|
||||
/**
|
||||
* @param {string} origName
|
||||
* @returns {string}
|
||||
*/
|
||||
function convertPropertyName(origName) {
|
||||
let result = origName.split("").map(char => {
|
||||
if (char === "*") return "_Asterisk";
|
||||
if (char === "/") return "_Slash";
|
|
@ -1,9 +1,11 @@
|
|||
/// <reference types="node" />
|
||||
import childProcess from "child_process";
|
||||
import fs from "fs-extra";
|
||||
import path from "path";
|
||||
import glob from "glob";
|
||||
import url from "url";
|
||||
|
||||
import * as childProcess from "child_process";
|
||||
import * as fs from "fs-extra";
|
||||
import * as path from "path";
|
||||
import * as glob from "glob";
|
||||
const __filename = url.fileURLToPath(new URL(import.meta.url));
|
||||
const __dirname = path.dirname(__filename);
|
||||
|
||||
const root = path.join(__dirname, "..");
|
||||
const source = path.join(root, "built/local");
|
||||
|
@ -45,7 +47,7 @@ async function copyTypesMap() {
|
|||
}
|
||||
|
||||
async function buildProtocol() {
|
||||
const protocolScript = path.join(__dirname, "buildProtocol.js");
|
||||
const protocolScript = path.join(__dirname, "buildProtocol.mjs");
|
||||
if (!fs.existsSync(protocolScript)) {
|
||||
throw new Error(`Expected protocol script ${protocolScript} to exist`);
|
||||
}
|
||||
|
@ -80,16 +82,26 @@ async function writeGitAttributes() {
|
|||
await fs.writeFile(path.join(dest, ".gitattributes"), `* text eol=lf`, "utf-8");
|
||||
}
|
||||
|
||||
async function copyWithCopyright(fileName: string, destName = fileName) {
|
||||
/**
|
||||
* @param {string} fileName
|
||||
* @param {string} destName
|
||||
*/
|
||||
async function copyWithCopyright(fileName, destName = fileName) {
|
||||
const content = await fs.readFile(path.join(source, fileName), "utf-8");
|
||||
await fs.writeFile(path.join(dest, destName), copyright + "\n" + content);
|
||||
}
|
||||
|
||||
async function copyFromBuiltLocal(fileName: string) {
|
||||
/**
|
||||
* @param {string} fileName
|
||||
*/
|
||||
async function copyFromBuiltLocal(fileName) {
|
||||
await fs.copy(path.join(source, fileName), path.join(dest, fileName));
|
||||
}
|
||||
|
||||
async function copyFilesWithGlob(pattern: string) {
|
||||
/**
|
||||
* @param {string} pattern
|
||||
*/
|
||||
async function copyFilesWithGlob(pattern) {
|
||||
const files = glob.sync(pattern, { cwd: source }).map(f => path.basename(f));
|
||||
for (const f of files) {
|
||||
await copyFromBuiltLocal(f);
|
||||
|
@ -97,7 +109,11 @@ async function copyFilesWithGlob(pattern: string) {
|
|||
console.log(`Copied ${files.length} files matching pattern ${pattern}`);
|
||||
}
|
||||
|
||||
async function exec(path: string, args: string[] = []) {
|
||||
/**
|
||||
* @param {string} path
|
||||
* @param {string[]} args
|
||||
*/
|
||||
async function exec(path, args = []) {
|
||||
const cmdLine = ["node", path, ...args].join(" ");
|
||||
console.log(cmdLine);
|
||||
childProcess.execSync(cmdLine);
|
|
@ -1,6 +1,8 @@
|
|||
|
||||
const MAX_UNICODE_CODEPOINT = 0x10FFFF;
|
||||
/** @type {(c: string) => boolean} */
|
||||
const isStart = c => /[\p{ID_Start}\u{2118}\u{212E}\u{309B}\u{309C}]/u.test(c); // Other_ID_Start explicitly included for back compat - see http://www.unicode.org/reports/tr31/#Introduction
|
||||
/** @type {(c: string) => boolean} */
|
||||
const isPart = c => /[\p{ID_Continue}\u{00B7}\u{0387}\u{19DA}\u{1369}\u{136A}\u{136B}\u{136C}\u{136D}\u{136E}\u{136F}\u{1370}\u{1371}]/u.test(c) || isStart(c); // Likewise for Other_ID_Continue
|
||||
const parts = [];
|
||||
let partsActive = false;
|
|
@ -1,7 +1,5 @@
|
|||
/// <reference lib="esnext.asynciterable" />
|
||||
/// <reference lib="es2015.promise" />
|
||||
import { Octokit } from "@octokit/rest";
|
||||
import * as minimist from "minimist";
|
||||
import minimist from "minimist";
|
||||
|
||||
const options = minimist(process.argv.slice(2), {
|
||||
boolean: ["help"],
|
||||
|
@ -55,7 +53,10 @@ async function main() {
|
|||
}
|
||||
}
|
||||
|
||||
function printHelpAndExit(exitCode: number) {
|
||||
/**
|
||||
* @param {number} exitCode
|
||||
*/
|
||||
function printHelpAndExit(exitCode) {
|
||||
console.log(`
|
||||
usage: request-pr-review.js [options]
|
||||
|
|
@ -1,3 +0,0 @@
|
|||
import { SpawnSyncOptions } from "child_process";
|
||||
|
||||
export function runSequence(tasks: [string, string[]][], opts?: SpawnSyncOptions): string;
|
|
@ -1,10 +1,12 @@
|
|||
// @ts-check
|
||||
const cp = require("child_process");
|
||||
import assert from "assert";
|
||||
import cp from "child_process";
|
||||
|
||||
/**
|
||||
* @param {[string, string[]][]} tasks
|
||||
* @param {cp.SpawnSyncOptions} opts
|
||||
* @returns {string}
|
||||
*/
|
||||
function runSequence(tasks, opts = { timeout: 100000, shell: true }) {
|
||||
export function runSequence(tasks, opts = { timeout: 100000, shell: true }) {
|
||||
let lastResult;
|
||||
for (const task of tasks) {
|
||||
console.log(`${task[0]} ${task[1].join(" ")}`);
|
||||
|
@ -13,7 +15,7 @@ function runSequence(tasks, opts = { timeout: 100000, shell: true }) {
|
|||
console.log(result.stdout && result.stdout.toString());
|
||||
lastResult = result;
|
||||
}
|
||||
return lastResult && lastResult.stdout && lastResult.stdout.toString();
|
||||
const out = lastResult?.stdout?.toString();
|
||||
assert(out !== undefined);
|
||||
return out;
|
||||
}
|
||||
|
||||
exports.runSequence = runSequence;
|
|
@ -1,15 +1,28 @@
|
|||
{
|
||||
"compilerOptions": {
|
||||
"lib": [
|
||||
"es2018"
|
||||
],
|
||||
"module": "Node16",
|
||||
"moduleResolution": "Node16",
|
||||
"target": "es2018",
|
||||
"noEmit": true,
|
||||
// "declaration": true,
|
||||
// "emitDeclarationOnly": true,
|
||||
|
||||
"strict": true,
|
||||
"removeComments": false,
|
||||
"declaration": false,
|
||||
"sourceMap": true,
|
||||
"newLine": "lf",
|
||||
"target": "es6",
|
||||
"module": "commonjs",
|
||||
"types": ["node"],
|
||||
"lib": ["es6", "scripthost"],
|
||||
"skipLibCheck": true,
|
||||
"forceConsistentCasingInFileNames": true,
|
||||
"allowUnusedLabels": false,
|
||||
"noImplicitOverride": true,
|
||||
"noImplicitReturns": true,
|
||||
"noUnusedLocals": true,
|
||||
"noUnusedParameters": true,
|
||||
"allowJs": true,
|
||||
"checkJs": true
|
||||
},
|
||||
|
||||
"include": ["*.ts"]
|
||||
"include": [
|
||||
"**/*.mjs", "**/*.cjs"
|
||||
]
|
||||
}
|
||||
|
|
|
@ -1,7 +1,5 @@
|
|||
// @ts-check
|
||||
/// <reference lib="esnext.asynciterable" />
|
||||
const { Octokit } = require("@octokit/rest");
|
||||
const { runSequence } = require("./run-sequence");
|
||||
import { Octokit } from "@octokit/rest";
|
||||
import { runSequence } from "./run-sequence.mjs";
|
||||
|
||||
// The first is used by bot-based kickoffs, the second by automatic triggers
|
||||
const triggeredPR = process.env.SOURCE_ISSUE || process.env.SYSTEM_PULLREQUEST_PULLREQUESTNUMBER;
|
||||
|
@ -46,7 +44,7 @@ async function main() {
|
|||
const inputPR = await gh.pulls.get({ owner: "Microsoft", repo: "TypeScript", pull_number: num });
|
||||
// GH calculates the rebaseable-ness of a PR into its target, so we can just use that here
|
||||
if (!inputPR.data.rebaseable) {
|
||||
if (+triggeredPR === num) {
|
||||
if (+(triggeredPR ?? 0) === num) {
|
||||
await gh.issues.createComment({
|
||||
owner: "Microsoft",
|
||||
repo: "TypeScript",
|
|
@ -0,0 +1,122 @@
|
|||
declare namespace Word {
|
||||
export interface Collection<T> {
|
||||
count: number;
|
||||
item(index: number): T;
|
||||
}
|
||||
|
||||
export interface Font {
|
||||
bold: boolean;
|
||||
italic: boolean;
|
||||
subscript: boolean;
|
||||
superscript: boolean;
|
||||
}
|
||||
|
||||
export interface Find {
|
||||
font: Font;
|
||||
format: boolean;
|
||||
replacement: Replacement;
|
||||
style: any;
|
||||
text: string;
|
||||
clearFormatting(): void;
|
||||
execute(
|
||||
findText: string,
|
||||
matchCase: boolean,
|
||||
matchWholeWord: boolean,
|
||||
matchWildcards: boolean,
|
||||
matchSoundsLike: boolean,
|
||||
matchAllWordForms: boolean,
|
||||
forward: boolean,
|
||||
wrap: number,
|
||||
format: boolean,
|
||||
replaceWith: string,
|
||||
replace: number): boolean;
|
||||
}
|
||||
|
||||
export interface Replacement {
|
||||
font: Font;
|
||||
style: any;
|
||||
text: string;
|
||||
clearFormatting(): void;
|
||||
}
|
||||
|
||||
export interface ListFormat {
|
||||
listLevelNumber: number;
|
||||
listString: string;
|
||||
}
|
||||
|
||||
export interface Column {
|
||||
}
|
||||
|
||||
export interface Columns extends Collection<Column> {
|
||||
}
|
||||
|
||||
export interface Table {
|
||||
columns: Columns;
|
||||
}
|
||||
|
||||
export interface Tables extends Collection<Table> {
|
||||
}
|
||||
|
||||
export interface Range {
|
||||
find: Find;
|
||||
listFormat: ListFormat;
|
||||
tables: Tables;
|
||||
text: string;
|
||||
textRetrievalMode: {
|
||||
includeHiddenText: boolean;
|
||||
}
|
||||
words: Ranges;
|
||||
}
|
||||
|
||||
export interface Ranges extends Collection<Range> {
|
||||
}
|
||||
|
||||
export interface Style {
|
||||
nameLocal: string;
|
||||
}
|
||||
|
||||
export interface Paragraph {
|
||||
alignment: number;
|
||||
range: Range;
|
||||
style: Style;
|
||||
next(): Paragraph;
|
||||
}
|
||||
|
||||
export interface Paragraphs extends Collection<Paragraph> {
|
||||
first: Paragraph;
|
||||
}
|
||||
|
||||
export interface Field {
|
||||
}
|
||||
|
||||
export interface Fields extends Collection<Field> {
|
||||
toggleShowCodes(): void;
|
||||
}
|
||||
|
||||
export interface Hyperlink {
|
||||
address: string;
|
||||
textToDisplay: string;
|
||||
range: Range;
|
||||
}
|
||||
|
||||
export interface Hyperlinks extends Collection<Hyperlink> {
|
||||
}
|
||||
|
||||
export interface Document {
|
||||
fields: Fields;
|
||||
paragraphs: Paragraphs;
|
||||
hyperlinks: Hyperlinks;
|
||||
builtInDocumentProperties: Collection<any>;
|
||||
close(saveChanges: boolean): void;
|
||||
range(): Range;
|
||||
}
|
||||
|
||||
export interface Documents extends Collection<Document> {
|
||||
open(filename: string): Document;
|
||||
}
|
||||
|
||||
export interface Application {
|
||||
documents: Documents;
|
||||
quit(): void;
|
||||
}
|
||||
}
|
|
@ -7,145 +7,32 @@
|
|||
// as a command line argument and the resulting Markdown is written to standard output. The
|
||||
// tool recognizes the specific Word styles used in the TypeScript Language Specification.
|
||||
|
||||
namespace Word {
|
||||
export interface Collection<T> {
|
||||
count: number;
|
||||
item(index: number): T;
|
||||
}
|
||||
|
||||
export interface Font {
|
||||
bold: boolean;
|
||||
italic: boolean;
|
||||
subscript: boolean;
|
||||
superscript: boolean;
|
||||
}
|
||||
|
||||
export interface Find {
|
||||
font: Font;
|
||||
format: boolean;
|
||||
replacement: Replacement;
|
||||
style: any;
|
||||
text: string;
|
||||
clearFormatting(): void;
|
||||
execute(
|
||||
findText: string,
|
||||
matchCase: boolean,
|
||||
matchWholeWord: boolean,
|
||||
matchWildcards: boolean,
|
||||
matchSoundsLike: boolean,
|
||||
matchAllWordForms: boolean,
|
||||
forward: boolean,
|
||||
wrap: number,
|
||||
format: boolean,
|
||||
replaceWith: string,
|
||||
replace: number): boolean;
|
||||
}
|
||||
|
||||
export interface Replacement {
|
||||
font: Font;
|
||||
style: any;
|
||||
text: string;
|
||||
clearFormatting(): void;
|
||||
}
|
||||
|
||||
export interface ListFormat {
|
||||
listLevelNumber: number;
|
||||
listString: string;
|
||||
}
|
||||
|
||||
export interface Column {
|
||||
}
|
||||
|
||||
export interface Columns extends Collection<Column> {
|
||||
}
|
||||
|
||||
export interface Table {
|
||||
columns: Columns;
|
||||
}
|
||||
|
||||
export interface Tables extends Collection<Table> {
|
||||
}
|
||||
|
||||
export interface Range {
|
||||
find: Find;
|
||||
listFormat: ListFormat;
|
||||
tables: Tables;
|
||||
text: string;
|
||||
textRetrievalMode: {
|
||||
includeHiddenText: boolean;
|
||||
}
|
||||
words: Ranges;
|
||||
}
|
||||
|
||||
export interface Ranges extends Collection<Range> {
|
||||
}
|
||||
|
||||
export interface Style {
|
||||
nameLocal: string;
|
||||
}
|
||||
|
||||
export interface Paragraph {
|
||||
alignment: number;
|
||||
range: Range;
|
||||
style: Style;
|
||||
next(): Paragraph;
|
||||
}
|
||||
|
||||
export interface Paragraphs extends Collection<Paragraph> {
|
||||
first: Paragraph;
|
||||
}
|
||||
|
||||
export interface Field {
|
||||
}
|
||||
|
||||
export interface Fields extends Collection<Field> {
|
||||
toggleShowCodes(): void;
|
||||
}
|
||||
|
||||
export interface Hyperlink {
|
||||
address: string;
|
||||
textToDisplay: string;
|
||||
range: Range;
|
||||
}
|
||||
|
||||
export interface Hyperlinks extends Collection<Hyperlink> {
|
||||
}
|
||||
|
||||
export interface Document {
|
||||
fields: Fields;
|
||||
paragraphs: Paragraphs;
|
||||
hyperlinks: Hyperlinks;
|
||||
builtInDocumentProperties: Collection<any>;
|
||||
close(saveChanges: boolean): void;
|
||||
range(): Range;
|
||||
}
|
||||
|
||||
export interface Documents extends Collection<Document> {
|
||||
open(filename: string): Document;
|
||||
}
|
||||
|
||||
export interface Application {
|
||||
documents: Documents;
|
||||
quit(): void;
|
||||
}
|
||||
}
|
||||
/// <reference lib="scripthost" />
|
||||
// eslint-disable-next-line @typescript-eslint/triple-slash-reference
|
||||
/// <reference path="./word.d.ts" />
|
||||
|
||||
/** @type {{
|
||||
args: string[];
|
||||
createObject: (typeName: string) => any;
|
||||
write(s: string): void;
|
||||
writeFile: (fileName: string, data: string) => void;
|
||||
}} */
|
||||
const sys = (() => {
|
||||
const fileStream = new ActiveXObject("ADODB.Stream");
|
||||
fileStream.Type = 2 /* text */;
|
||||
const binaryStream = new ActiveXObject("ADODB.Stream");
|
||||
binaryStream.Type = 1 /* binary */;
|
||||
const args: string[] = [];
|
||||
const args = [];
|
||||
for (let i = 0; i < WScript.Arguments.length; i++) {
|
||||
args[i] = WScript.Arguments.Item(i);
|
||||
}
|
||||
return {
|
||||
args,
|
||||
createObject: (typeName: string) => new ActiveXObject(typeName),
|
||||
write(s: string): void {
|
||||
createObject: (typeName) => new ActiveXObject(typeName),
|
||||
write(s) {
|
||||
WScript.StdOut.Write(s);
|
||||
},
|
||||
writeFile: (fileName: string, data: string): void => {
|
||||
writeFile: (fileName, data) => {
|
||||
fileStream.Open();
|
||||
binaryStream.Open();
|
||||
try {
|
||||
|
@ -165,25 +52,37 @@ const sys = (() => {
|
|||
};
|
||||
})();
|
||||
|
||||
interface FindReplaceOptions {
|
||||
/** @typedef {{
|
||||
style?: any;
|
||||
font?: {
|
||||
bold?: boolean;
|
||||
italic?: boolean;
|
||||
subscript?: boolean;
|
||||
};
|
||||
}
|
||||
}} FindReplaceOptions */
|
||||
|
||||
function convertDocumentToMarkdown(doc: Word.Document): string {
|
||||
|
||||
const columnAlignment: number[] = [];
|
||||
let tableColumnCount: number;
|
||||
let tableCellIndex: number;
|
||||
let lastInTable: boolean;
|
||||
let lastStyle: string;
|
||||
/**
|
||||
* @param {Word.Document} doc
|
||||
* @returns {string}
|
||||
*/
|
||||
function convertDocumentToMarkdown(doc) {
|
||||
/** @type {number[]} */
|
||||
const columnAlignment = [];
|
||||
/** @type {number} */
|
||||
let tableColumnCount;
|
||||
/** @type {number} */
|
||||
let tableCellIndex;
|
||||
/** @type {boolean} */
|
||||
let lastInTable;
|
||||
/** @type {string} */
|
||||
let lastStyle;
|
||||
let result = "";
|
||||
|
||||
function setProperties(target: any, properties: any) {
|
||||
/**
|
||||
* @param {any} target
|
||||
* @param {any} properties
|
||||
*/
|
||||
function setProperties(target, properties) {
|
||||
for (const name in properties) {
|
||||
if (Object.prototype.hasOwnProperty.call(properties, name)) {
|
||||
const value = properties[name];
|
||||
|
@ -197,7 +96,13 @@ function convertDocumentToMarkdown(doc: Word.Document): string {
|
|||
}
|
||||
}
|
||||
|
||||
function findReplace(findText: string, findOptions: FindReplaceOptions, replaceText: string, replaceOptions: FindReplaceOptions) {
|
||||
/**
|
||||
* @param {string} findText
|
||||
* @param {FindReplaceOptions} findOptions
|
||||
* @param {string} replaceText
|
||||
* @param {FindReplaceOptions} replaceOptions
|
||||
*/
|
||||
function findReplace(findText, findOptions, replaceText, replaceOptions) {
|
||||
const find = doc.range().find;
|
||||
find.clearFormatting();
|
||||
setProperties(find, findOptions);
|
||||
|
@ -230,7 +135,10 @@ function convertDocumentToMarkdown(doc: Word.Document): string {
|
|||
}
|
||||
}
|
||||
|
||||
function write(s: string) {
|
||||
/**
|
||||
* @param {string} s
|
||||
*/
|
||||
function write(s) {
|
||||
result += s;
|
||||
}
|
||||
|
||||
|
@ -250,7 +158,10 @@ function convertDocumentToMarkdown(doc: Word.Document): string {
|
|||
write("|\n");
|
||||
}
|
||||
|
||||
function trimEndFormattingMarks(text: string) {
|
||||
/**
|
||||
* @param {string} text
|
||||
*/
|
||||
function trimEndFormattingMarks(text) {
|
||||
let i = text.length;
|
||||
while (i > 0 && text.charCodeAt(i - 1) < 0x20) i--;
|
||||
return text.substr(0, i);
|
||||
|
@ -269,7 +180,10 @@ function convertDocumentToMarkdown(doc: Word.Document): string {
|
|||
}
|
||||
}
|
||||
|
||||
function writeParagraph(p: Word.Paragraph) {
|
||||
/**
|
||||
* @param {Word.Paragraph} p
|
||||
*/
|
||||
function writeParagraph(p) {
|
||||
|
||||
const range = p.range;
|
||||
const inTable = range.tables.count > 0;
|
||||
|
@ -408,12 +322,17 @@ function convertDocumentToMarkdown(doc: Word.Document): string {
|
|||
return result;
|
||||
}
|
||||
|
||||
function main(args: string[]) {
|
||||
/**
|
||||
* @param {string[]} args
|
||||
*/
|
||||
function main(args) {
|
||||
if (args.length !== 2) {
|
||||
sys.write("Syntax: word2md <inputfile> <outputfile>\n");
|
||||
return;
|
||||
}
|
||||
const app: Word.Application = sys.createObject("Word.Application");
|
||||
|
||||
/** @type {Word.Application} */
|
||||
const app = sys.createObject("Word.Application");
|
||||
const doc = app.documents.open(args[0]);
|
||||
sys.writeFile(args[1], convertDocumentToMarkdown(doc));
|
||||
doc.close(/* saveChanges */ false);
|
|
@ -17,5 +17,5 @@ namespace Utils {
|
|||
}
|
||||
|
||||
export const findUpRoot: { (): string; cached?: string; } = () =>
|
||||
findUpRoot.cached ||= dirname(findUpFile("Gulpfile.js"));
|
||||
findUpRoot.cached ||= dirname(findUpFile("Gulpfile.mjs"));
|
||||
}
|
||||
|
|
|
@ -14,7 +14,7 @@ namespace Harness.Parallel.Host {
|
|||
const { statSync } = require("fs") as typeof import("fs");
|
||||
|
||||
// NOTE: paths for module and types for FailedTestReporter _do not_ line up due to our use of --outFile for run.js
|
||||
const FailedTestReporter = require(Utils.findUpFile("scripts/failed-tests.js")) as typeof import("../../../scripts/failed-tests");
|
||||
const FailedTestReporter = require(Utils.findUpFile("scripts/failed-tests.cjs")) as typeof import("../../../scripts/failed-tests.cjs");
|
||||
|
||||
const perfdataFileNameFragment = ".parallelperf";
|
||||
const perfData = readSavedPerfData(configOption);
|
||||
|
@ -534,7 +534,7 @@ namespace Harness.Parallel.Host {
|
|||
patchStats(consoleReporter.stats);
|
||||
|
||||
let xunitReporter: import("mocha").reporters.XUnit | undefined;
|
||||
let failedTestReporter: import("../../../scripts/failed-tests") | undefined;
|
||||
let failedTestReporter: import("../../../scripts/failed-tests.cjs") | undefined;
|
||||
if (process.env.CI === "true") {
|
||||
xunitReporter = new Mocha.reporters.XUnit(replayRunner, {
|
||||
reporterOptions: {
|
||||
|
|
Загрузка…
Ссылка в новой задаче