This commit is contained in:
Ken Chau 2020-05-18 13:16:27 -07:00
Родитель 9c9dd0b868
Коммит c023e9e56f
26 изменённых файлов: 5051 добавлений и 103 удалений

107
.gitignore поставляемый
Просмотреть файл

@ -1,104 +1,5 @@
# Logs node_modules
logs src/**/*.js
*.log
npm-debug.log*
yarn-debug.log*
yarn-error.log*
lerna-debug.log*
# Diagnostic reports (https://nodejs.org/api/report.html)
report.[0-9]*.[0-9]*.[0-9]*.[0-9]*.json
# Runtime data
pids
*.pid
*.seed
*.pid.lock
# Directory for instrumented libs generated by jscoverage/JSCover
lib-cov
# Coverage directory used by tools like istanbul
coverage
*.lcov
# nyc test coverage
.nyc_output
# Grunt intermediate storage (https://gruntjs.com/creating-plugins#storing-task-files)
.grunt
# Bower dependency directory (https://bower.io/)
bower_components
# node-waf configuration
.lock-wscript
# Compiled binary addons (https://nodejs.org/api/addons.html)
build/Release
# Dependency directories
node_modules/
jspm_packages/
# TypeScript v1 declaration files
typings/
# TypeScript cache
*.tsbuildinfo
# Optional npm cache directory
.npm
# Optional eslint cache
.eslintcache
# Microbundle cache
.rpt2_cache/
.rts2_cache_cjs/
.rts2_cache_es/
.rts2_cache_umd/
# Optional REPL history
.node_repl_history
# Output of 'npm pack'
*.tgz
# Yarn Integrity file
.yarn-integrity
# dotenv environment variables file
.env
.env.test
# parcel-bundler cache (https://parceljs.org/)
.cache
# Next.js build output
.next
# Nuxt.js build / generate output
.nuxt
dist dist
*.log
# Gatsby files .DS_Store
.cache/
# Comment in the public line in if your project uses Gatsby and *not* Next.js
# https://nextjs.org/blog/next-9-1#public-directory-support
# public
# vuepress build output
.vuepress/dist
# Serverless directories
.serverless/
# FuseBox cache
.fusebox/
# DynamoDB Local files
.dynamodb/
# TernJS port file
.tern-port

2
.npmignore Normal file
Просмотреть файл

@ -0,0 +1,2 @@
src
change

335
CHANGELOG.json Normal file
Просмотреть файл

@ -0,0 +1,335 @@
{
"name": "lage",
"entries": [
{
"date": "Mon, 18 May 2020 16:25:15 GMT",
"tag": "lage_v0.4.7",
"version": "0.4.7",
"comments": {
"patch": [
{
"comment": "update backfill to latest",
"author": "kchau@microsoft.com",
"commit": "74758ecbd8c72565e10f8c41684719fd09f762c4",
"package": "lage"
}
]
}
},
{
"date": "Mon, 18 May 2020 16:25:09 GMT",
"tag": "lage_v0.4.7",
"version": "0.4.7",
"comments": {
"patch": [
{
"comment": "update backfill to latest",
"author": "kchau@microsoft.com",
"commit": "74758ecbd8c72565e10f8c41684719fd09f762c4",
"package": "lage"
}
]
}
},
{
"date": "Wed, 13 May 2020 23:30:10 GMT",
"tag": "lage_v0.4.6",
"version": "0.4.6",
"comments": {
"patch": [
{
"comment": "fixing up npm cmd call so it's platform independent",
"author": "kchau@microsoft.com",
"commit": "4d7cdb84d605e8a898efea6f263786b4fd20dec9",
"package": "lage"
}
]
}
},
{
"date": "Wed, 13 May 2020 23:30:04 GMT",
"tag": "lage_v0.4.6",
"version": "0.4.6",
"comments": {
"patch": [
{
"comment": "fixing up npm cmd call so it's platform independent",
"author": "kchau@microsoft.com",
"commit": "4d7cdb84d605e8a898efea6f263786b4fd20dec9",
"package": "lage"
}
]
}
},
{
"date": "Wed, 13 May 2020 22:42:02 GMT",
"tag": "lage_v0.4.5",
"version": "0.4.5",
"comments": {
"patch": [
{
"comment": "more event listeners",
"author": "kchau@microsoft.com",
"commit": "3a4d16d7e49f63ec7304e5520ff2d42082c7d1a0",
"package": "lage"
}
]
}
},
{
"date": "Wed, 13 May 2020 22:41:56 GMT",
"tag": "lage_v0.4.5",
"version": "0.4.5",
"comments": {
"patch": [
{
"comment": "more event listeners",
"author": "kchau@microsoft.com",
"commit": "3a4d16d7e49f63ec7304e5520ff2d42082c7d1a0",
"package": "lage"
}
]
}
},
{
"date": "Wed, 13 May 2020 18:47:23 GMT",
"tag": "lage_v0.4.4",
"version": "0.4.4",
"comments": {
"patch": [
{
"comment": "allow tasks with no deps to run as well",
"author": "kchau@microsoft.com",
"commit": "446a56a95b97f048967565eb19f093d09fb15cd1",
"package": "lage"
}
]
}
},
{
"date": "Wed, 13 May 2020 18:47:15 GMT",
"tag": "lage_v0.4.4",
"version": "0.4.4",
"comments": {
"patch": [
{
"comment": "allow tasks with no deps to run as well",
"author": "kchau@microsoft.com",
"commit": "446a56a95b97f048967565eb19f093d09fb15cd1",
"package": "lage"
}
]
}
},
{
"date": "Wed, 13 May 2020 16:37:33 GMT",
"tag": "lage_v0.4.3",
"version": "0.4.3",
"comments": {
"patch": [
{
"comment": "truly respects the profile flag",
"author": "kchau@microsoft.com",
"commit": "2b8ff2e0edd4c41bf42e529a6935bd3798ff68ab",
"package": "lage"
}
]
}
},
{
"date": "Wed, 13 May 2020 16:37:26 GMT",
"tag": "lage_v0.4.3",
"version": "0.4.3",
"comments": {
"patch": [
{
"comment": "truly respects the profile flag",
"author": "kchau@microsoft.com",
"commit": "2b8ff2e0edd4c41bf42e529a6935bd3798ff68ab",
"package": "lage"
}
]
}
},
{
"date": "Wed, 13 May 2020 16:28:31 GMT",
"tag": "lage_v0.4.2",
"version": "0.4.2",
"comments": {
"patch": [
{
"comment": "added option to do profiling or not",
"author": "kchau@microsoft.com",
"commit": "c97bbc12ec78fd771555e8d5ca8e724a81fb40ec",
"package": "lage"
}
]
}
},
{
"date": "Wed, 13 May 2020 16:28:24 GMT",
"tag": "lage_v0.4.2",
"version": "0.4.2",
"comments": {
"patch": [
{
"comment": "added option to do profiling or not",
"author": "kchau@microsoft.com",
"commit": "c97bbc12ec78fd771555e8d5ca8e724a81fb40ec",
"package": "lage"
}
]
}
},
{
"date": "Wed, 13 May 2020 01:58:36 GMT",
"tag": "lage_v0.4.1",
"version": "0.4.1",
"comments": {
"patch": [
{
"comment": "added usage stuff",
"author": "kchau@microsoft.com",
"commit": "f3a38f646f77488406dacc4d65f6097a1608de23",
"package": "lage"
}
]
}
},
{
"date": "Wed, 13 May 2020 01:58:30 GMT",
"tag": "lage_v0.4.1",
"version": "0.4.1",
"comments": {
"patch": [
{
"comment": "added usage stuff",
"author": "kchau@microsoft.com",
"commit": "f3a38f646f77488406dacc4d65f6097a1608de23",
"package": "lage"
}
]
}
},
{
"date": "Wed, 13 May 2020 01:55:26 GMT",
"tag": "lage_v0.4.0",
"version": "0.4.0",
"comments": {
"minor": [
{
"comment": "better logging",
"author": "kchau@microsoft.com",
"commit": "11d6d5576e2f79c1fa9d45cfee45eb48132ab835",
"package": "lage"
}
]
}
},
{
"date": "Wed, 13 May 2020 01:55:19 GMT",
"tag": "lage_v0.4.0",
"version": "0.4.0",
"comments": {
"minor": [
{
"comment": "better logging",
"author": "kchau@microsoft.com",
"commit": "11d6d5576e2f79c1fa9d45cfee45eb48132ab835",
"package": "lage"
}
]
}
},
{
"date": "Wed, 13 May 2020 01:33:46 GMT",
"tag": "lage_v0.3.0",
"version": "0.3.0",
"comments": {
"minor": [
{
"comment": "adding verbose logging",
"author": "kchau@microsoft.com",
"commit": "d982c9c511420c1a936deeedbb1db6a6d6fe4f51",
"package": "lage"
}
]
}
},
{
"date": "Wed, 13 May 2020 01:33:40 GMT",
"tag": "lage_v0.3.0",
"version": "0.3.0",
"comments": {
"minor": [
{
"comment": "adding verbose logging",
"author": "kchau@microsoft.com",
"commit": "d982c9c511420c1a936deeedbb1db6a6d6fe4f51",
"package": "lage"
}
]
}
},
{
"date": "Wed, 13 May 2020 01:23:45 GMT",
"tag": "lage_v0.2.1",
"version": "0.2.1",
"comments": {
"patch": [
{
"comment": "fixes the binary script",
"author": "kchau@microsoft.com",
"commit": "a70613e7fe0f2e901d9af7ed6ac597898f1c451f",
"package": "lage"
}
]
}
},
{
"date": "Wed, 13 May 2020 01:23:37 GMT",
"tag": "lage_v0.2.1",
"version": "0.2.1",
"comments": {
"patch": [
{
"comment": "fixes the binary script",
"author": "kchau@microsoft.com",
"commit": "a70613e7fe0f2e901d9af7ed6ac597898f1c451f",
"package": "lage"
}
]
}
},
{
"date": "Wed, 13 May 2020 01:14:27 GMT",
"tag": "lage_v0.2.0",
"version": "0.2.0",
"comments": {
"minor": [
{
"comment": "updated with latest and greatest backfill",
"author": "kchau@microsoft.com",
"commit": "bc784ccaeaf9e61ed9adb9ef268c899f2ea48440",
"package": "lage"
}
]
}
},
{
"date": "Wed, 13 May 2020 01:14:21 GMT",
"tag": "lage_v0.2.0",
"version": "0.2.0",
"comments": {
"minor": [
{
"comment": "updated with latest and greatest backfill",
"author": "kchau@microsoft.com",
"commit": "bc784ccaeaf9e61ed9adb9ef268c899f2ea48440",
"package": "lage"
}
]
}
}
]
}

93
CHANGELOG.md Normal file
Просмотреть файл

@ -0,0 +1,93 @@
# Change Log - lage
This log was last generated on Mon, 18 May 2020 16:25:15 GMT and should not be manually modified.
<!-- Start content -->
## 0.4.7
Mon, 18 May 2020 16:25:15 GMT
### Patches
- update backfill to latest (kchau@microsoft.com)
## 0.4.6
Wed, 13 May 2020 23:30:10 GMT
### Patches
- fixing up npm cmd call so it's platform independent (kchau@microsoft.com)
## 0.4.5
Wed, 13 May 2020 22:42:02 GMT
### Patches
- more event listeners (kchau@microsoft.com)
## 0.4.4
Wed, 13 May 2020 18:47:23 GMT
### Patches
- allow tasks with no deps to run as well (kchau@microsoft.com)
## 0.4.3
Wed, 13 May 2020 16:37:33 GMT
### Patches
- truly respects the profile flag (kchau@microsoft.com)
## 0.4.2
Wed, 13 May 2020 16:28:31 GMT
### Patches
- added option to do profiling or not (kchau@microsoft.com)
## 0.4.1
Wed, 13 May 2020 01:58:36 GMT
### Patches
- added usage stuff (kchau@microsoft.com)
## 0.4.0
Wed, 13 May 2020 01:55:26 GMT
### Minor changes
- better logging (kchau@microsoft.com)
## 0.3.0
Wed, 13 May 2020 01:33:46 GMT
### Minor changes
- adding verbose logging (kchau@microsoft.com)
## 0.2.1
Wed, 13 May 2020 01:23:45 GMT
### Patches
- fixes the binary script (kchau@microsoft.com)
## 0.2.0
Wed, 13 May 2020 01:14:27 GMT
### Minor changes
- updated with latest and greatest backfill (kchau@microsoft.com)

3
bin/lage.js Normal file
Просмотреть файл

@ -0,0 +1,3 @@
#!/usr/bin/env node
require("../dist/index.js");

41
package.json Normal file
Просмотреть файл

@ -0,0 +1,41 @@
{
"name": "lage",
"description": "A monorepo task runner",
"version": "0.4.7",
"license": "MIT",
"main": "dist/index.js",
"bin": {
"lage": "bin/lage.js"
},
"scripts": {
"build": "tsc",
"start": "tsc -w --preserveWatchOutput",
"change": "beachball change",
"release": "yarn build && beachball publish"
},
"dependencies": {
"@lerna/profiler": "^3.20.0",
"chalk": "^4.0.0",
"backfill": "5.0.0-alpha.0",
"cosmiconfig": "^6.0.0",
"git-url-parse": "^11.1.2",
"npmlog": "^4.1.2",
"p-graph": "^0.4.0",
"p-queue": "^6.4.0",
"yargs-parser": "^18.1.3",
"workspace-tools": "^0.4.0"
},
"devDependencies": {
"@types/chalk": "^2.2.0",
"@types/cosmiconfig": "^6.0.0",
"@types/git-url-parse": "^9.0.0",
"@types/jju": "^1.4.1",
"@types/minimatch": "^3.0.3",
"@types/npmlog": "^4.1.2",
"@types/node": "^13.13.2",
"@types/p-queue": "^3.2.1",
"@types/yargs-parser": "^15.0.0",
"beachball": "^1.31.0",
"typescript": "^3.8.3"
}
}

46
src/cache/backfill.ts поставляемый Normal file
Просмотреть файл

@ -0,0 +1,46 @@
import * as backfill from "backfill/lib/api";
import { PackageInfo } from "workspace-tools";
import path from "path";
import { RunContext } from "../types/RunContext";
const hashes: { [key: string]: string } = {};
const cacheHits: { [key: string]: boolean } = {};
export async function computeHash(info: PackageInfo, context: RunContext) {
const logger = backfill.makeLogger("warn", process.stdout, process.stderr);
const name = info.name;
logger.setName(name);
const hash = await backfill.computeHash(
path.dirname(info.packageJsonPath),
logger,
context.command + context.args.join(" ")
);
hashes[info.name] = hash;
}
export async function fetchBackfill(info: PackageInfo) {
const logger = backfill.makeLogger("warn", process.stdout, process.stderr);
const hash = hashes[info.name];
const cwd = path.dirname(info.packageJsonPath);
const cacheHit = await backfill.fetch(cwd, hash, logger);
cacheHits[info.name] = cacheHit;
}
export async function putBackfill(info: PackageInfo) {
const logger = backfill.makeLogger("warn", process.stdout, process.stderr);
const hash = hashes[info.name];
const cwd = path.dirname(info.packageJsonPath);
try {
await backfill.put(cwd, hash, logger);
} catch (e) {
// here we swallow put errors because backfill will throw just because the output directories didn't exist
}
}
export { cacheHits };

43
src/cache/cacheTasks.ts поставляемый Normal file
Просмотреть файл

@ -0,0 +1,43 @@
import { RunContext } from "../types/RunContext";
import { getTaskId, getPackageTaskFromId } from "../task/taskId";
import { generateTask } from "../task/generateTask";
export const ComputeHashTask = "??computeHash";
export const CacheFetchTask = "??fetch";
export const CachePutTask = "??put";
export function isCacheTask(task: string) {
return (
task === ComputeHashTask || task === CacheFetchTask || task === CachePutTask
);
}
export function generateCacheTasks(context: RunContext) {
const { tasks, taskDepsGraph, cache } = context;
if (context.cache) {
for (const taskId of tasks.keys()) {
const [pkg, task] = getPackageTaskFromId(taskId);
if (
task !== CacheFetchTask &&
task !== CachePutTask &&
task !== ComputeHashTask &&
pkg
) {
const hashTaskId = getTaskId(pkg, ComputeHashTask);
const fetchTaskId = getTaskId(pkg, CacheFetchTask);
const putTaskId = getTaskId(pkg, CachePutTask);
// set up the graph
taskDepsGraph.push([hashTaskId, fetchTaskId]);
tasks.set(hashTaskId, () => generateTask(hashTaskId, context));
taskDepsGraph.push([fetchTaskId, taskId]);
tasks.set(fetchTaskId, () => generateTask(fetchTaskId, context));
taskDepsGraph.push([taskId, putTaskId]);
tasks.set(putTaskId, () => generateTask(putTaskId, context));
}
}
}
}

376
src/git/index.ts Normal file
Просмотреть файл

@ -0,0 +1,376 @@
import { spawnSync } from 'child_process';
import fs from 'fs';
import path from 'path';
import { findGitRoot } from '../paths';
import gitUrlParse from 'git-url-parse';
/**
* Runs git command - use this for read only commands
*/
export function git(args: string[], options?: { cwd: string }) {
const results = spawnSync('git', args, options);
if (results.status === 0) {
return {
stderr: results.stderr.toString().trimRight(),
stdout: results.stdout.toString().trimRight(),
success: true,
};
} else {
return {
stderr: results.stderr.toString().trimRight(),
stdout: results.stdout.toString().trimRight(),
success: false,
};
}
}
/**
* Runs git command - use this for commands that makes changes to the file system
*/
export function gitFailFast(args: string[], options?: { cwd: string }) {
const gitResult = git(args, options);
if (!gitResult.success) {
console.error(`CRITICAL ERROR: running git command: git ${args.join(' ')}!`);
console.error(gitResult.stdout && gitResult.stdout.toString().trimRight());
console.error(gitResult.stderr && gitResult.stderr.toString().trimRight());
process.exit(1);
}
}
export function getUntrackedChanges(cwd: string) {
try {
const results = git(['status', '-z'], { cwd });
if (!results.success) {
return [];
}
const changes = results.stdout;
if (changes.length == 0) {
return [];
}
const lines = changes.split(/\0/).filter(line => line) || [];
const untracked: string[] = [];
for (let i = 0; i < lines.length; i++) {
const line = lines[i];
if (line[0] === ' ' || line[0] === '?') {
untracked.push(line.substr(3));
} else if (line[0] === 'R') {
i++;
}
}
return untracked;
} catch (e) {
console.error('Cannot gather information about changes: ', e.message);
}
}
export function fetchRemote(remote: string, cwd: string) {
const results = git(['fetch', remote], { cwd });
if (!results.success) {
console.error(`Cannot fetch remote: ${remote}`);
throw new Error('Cannot fetch');
}
}
export function getChanges(branch: string, cwd: string) {
try {
const results = git(['--no-pager', 'diff', '--name-only', branch + '...'], { cwd });
if (!results.success) {
return [];
}
let changes = results.stdout;
let lines = changes.split(/\n/) || [];
return lines
.filter(line => line.trim() !== '')
.map(line => line.trim())
.filter(line => !line.includes('node_modules'));
} catch (e) {
console.error('Cannot gather information about changes: ', e.message);
}
}
export function getStagedChanges(branch: string, cwd: string) {
try {
const results = git(['--no-pager', 'diff', '--staged', '--name-only'], { cwd });
if (!results.success) {
return [];
}
let changes = results.stdout;
let lines = changes.split(/\n/) || [];
return lines
.filter(line => line.trim() !== '')
.map(line => line.trim())
.filter(line => !line.includes('node_modules'));
} catch (e) {
console.error('Cannot gather information about changes: ', e.message);
}
}
export function getRecentCommitMessages(branch: string, cwd: string) {
try {
const results = git(['log', '--decorate', '--pretty=format:%s', branch, 'HEAD'], { cwd });
if (!results.success) {
return [];
}
let changes = results.stdout;
let lines = changes.split(/\n/) || [];
return lines.map(line => line.trim());
} catch (e) {
console.error('Cannot gather information about recent commits: ', e.message);
}
}
export function getUserEmail(cwd: string) {
try {
const results = git(['config', 'user.email'], { cwd });
if (!results.success) {
return null;
}
return results.stdout;
} catch (e) {
console.error('Cannot gather information about user.email: ', e.message);
}
}
export function getBranchName(cwd: string) {
try {
const results = git(['rev-parse', '--abbrev-ref', 'HEAD'], { cwd });
if (results.success) {
return results.stdout;
}
} catch (e) {
console.error('Cannot get branch name: ', e.message);
}
return null;
}
export function getFullBranchRef(branch: string, cwd: string) {
const showRefResults = git(['show-ref', '--heads', branch], { cwd });
if (showRefResults.success) {
return showRefResults.stdout.split(' ')[1];
}
return null;
}
export function getShortBranchName(fullBranchRef: string, cwd: string) {
const showRefResults = git(['name-rev', '--name-only', fullBranchRef], { cwd });
if (showRefResults.success) {
return showRefResults.stdout;
}
return null;
}
export function getCurrentHash(cwd: string) {
try {
const results = git(['rev-parse', 'HEAD'], { cwd });
if (results.success) {
return results.stdout;
}
} catch (e) {
console.error('Cannot get current git hash');
}
return null;
}
/**
* Get the commit hash in which the file was first added.
*/
export function getFileAddedHash(filename: string, cwd: string) {
const results = git(['rev-list', 'HEAD', filename], { cwd });
if (results.success) {
return results.stdout
.trim()
.split('\n')
.slice(-1)[0];
}
return undefined;
}
export function stageAndCommit(patterns: string[], message: string, cwd: string) {
try {
patterns.forEach(pattern => {
git(['add', pattern], { cwd });
});
const commitResults = git(['commit', '-m', message], { cwd });
if (!commitResults.success) {
console.error('Cannot commit changes');
console.log(commitResults.stdout);
console.error(commitResults.stderr);
}
} catch (e) {
console.error('Cannot stage and commit changes', e.message);
}
}
export function revertLocalChanges(cwd: string) {
const stash = `beachball_${new Date().getTime()}`;
git(['stash', 'push', '-u', '-m', stash], { cwd });
const results = git(['stash', 'list']);
if (results.success) {
const lines = results.stdout.split(/\n/);
const foundLine = lines.find(line => line.includes(stash));
if (foundLine) {
const matched = foundLine.match(/^[^:]+/);
if (matched) {
git(['stash', 'drop', matched[0]]);
return true;
}
}
}
return false;
}
export function getParentBranch(cwd: string) {
const branchName = getBranchName(cwd);
if (!branchName || branchName === 'HEAD') {
return null;
}
const showBranchResult = git(['show-branch', '-a'], { cwd });
if (showBranchResult.success) {
const showBranchLines = showBranchResult.stdout.split(/\n/);
const parentLine = showBranchLines.find(
line => line.indexOf('*') > -1 && line.indexOf(branchName) < 0 && line.indexOf('publish_') < 0
);
if (!parentLine) {
return null;
}
const matched = parentLine.match(/\[(.*)\]/);
if (!matched) {
return null;
}
return matched[1];
}
return null;
}
export function getRemoteBranch(branch: string, cwd: string) {
const results = git(['rev-parse', '--abbrev-ref', '--symbolic-full-name', `${branch}@\{u\}`], { cwd });
if (results.success) {
return results.stdout.trim();
}
return null;
}
export function parseRemoteBranch(branch: string) {
const firstSlashPos = branch.indexOf('/', 0);
const remote = branch.substring(0, firstSlashPos);
const remoteBranch = branch.substring(firstSlashPos + 1);
return {
remote,
remoteBranch,
};
}
function normalizeRepoUrl(repositoryUrl: string) {
try {
const parsed = gitUrlParse(repositoryUrl);
return parsed
.toString('https')
.replace(/\.git$/, '')
.toLowerCase();
} catch (e) {
return '';
}
}
export function getDefaultRemoteBranch(branch: string = 'master', cwd: string) {
const defaultRemote = getDefaultRemote(cwd);
return `${defaultRemote}/${branch}`;
}
export function getDefaultRemote(cwd: string) {
let packageJson: any;
try {
packageJson = JSON.parse(fs.readFileSync(path.join(findGitRoot(cwd)!, 'package.json')).toString());
} catch (e) {
console.log('failed to read package.json');
throw new Error('invalid package.json detected');
}
const { repository } = packageJson;
let repositoryUrl = '';
if (typeof repository === 'string') {
repositoryUrl = repository;
} else if (repository && repository.url) {
repositoryUrl = repository.url;
}
const normalizedUrl = normalizeRepoUrl(repositoryUrl);
const remotesResult = git(['remote', '-v'], { cwd });
if (remotesResult.success) {
const allRemotes: { [url: string]: string } = {};
remotesResult.stdout.split('\n').forEach(line => {
const parts = line.split(/\s+/);
allRemotes[normalizeRepoUrl(parts[1])] = parts[0];
});
if (Object.keys(allRemotes).length > 0) {
const remote = allRemotes[normalizedUrl];
if (remote) {
return remote;
}
}
}
console.log(`Defaults to "origin"`);
return 'origin';
}
export function listAllTrackedFiles(patterns: string[], cwd: string) {
if (patterns) {
const results = git(['ls-files', ...patterns], { cwd });
if (results.success) {
return results.stdout.split(/\n/);
}
}
return [];
}

136
src/index.ts Normal file
Просмотреть файл

@ -0,0 +1,136 @@
import { cosmiconfigSync } from "cosmiconfig";
import { discoverTaskDeps } from "./task/discoverTaskDeps";
import { EventEmitter } from "events";
import { getPackageInfos, findGitRoot } from "workspace-tools";
import { initialize } from "./logger";
import { RunContext } from "./types/RunContext";
import { runTasks } from "./task/taskRunner";
import log from "npmlog";
import os from "os";
import PQueue from "p-queue/dist";
import Profiler from "@lerna/profiler";
import yargsParser from "yargs-parser";
const parsedArgs = yargsParser(process.argv.slice(2));
const root = findGitRoot(process.cwd());
if (!root) {
throw new Error("This must be called inside a git-controlled repo");
}
const ConfigModuleName = "lage";
const configResults = cosmiconfigSync(ConfigModuleName).search(
root || process.cwd()
);
const concurrency = os.cpus().length - 1;
const command = parsedArgs._[0];
const events = new EventEmitter();
const context: RunContext = {
allPackages: getPackageInfos(root),
command,
concurrency,
defaultPipeline: configResults?.config.pipeline || {
build: ["^build"],
clean: [],
},
taskDepsGraph: [],
tasks: new Map(),
deps: parsedArgs.deps || configResults?.config.deps || false,
scope: parsedArgs.scope || configResults?.config.scope || [],
measures: {
start: [0, 0],
duration: [0, 0],
taskStats: [],
failedTask: undefined,
},
profiler: new Profiler({
concurrency,
outputDirectory: process.cwd(),
}),
taskLogs: new Map(),
queue: new PQueue({ concurrency }),
cache: parsedArgs.cache === false ? false : true,
nodeArgs: parsedArgs.nodeArgs ? arrifyArgs(parsedArgs.nodeArgs) : [],
args: getPassThroughArgs(parsedArgs),
events,
verbose: parsedArgs.verbose,
profile: parsedArgs.profile,
};
initialize(context);
if (context.verbose) {
log.level = "verbose";
}
console.log(`🧱 Lage task runner 🧱`);
console.log(``);
validateInput(context);
discoverTaskDeps(context);
events.setMaxListeners(context.tasks.size);
(async () => {
await runTasks(context);
})();
function arrifyArgs(args: { [key: string]: string | string[] }) {
const argsArray: string[] = [];
for (const [key, val] of Object.entries(args)) {
if (Array.isArray(val)) {
for (const item of val) {
pushValue(key, item);
}
} else {
pushValue(key, val);
}
}
return argsArray;
function pushValue(key: string, value: string) {
let keyArg = "";
if (typeof value === "boolean") {
if (key.length === 1 && value) {
keyArg = `-${key}`;
} else if (value) {
keyArg = `--${key}`;
} else {
keyArg = `--no-${key}`;
}
argsArray.push(keyArg);
} else {
argsArray.push(keyArg, value);
}
}
}
function getPassThroughArgs(args: { [key: string]: string | string[] }) {
let result: string[] = [];
result = result.concat(args._.slice(1));
let {
nodeArgs: _nodeArgValues,
scope: _scopeArg,
deps: _depsArg,
cache: _cacheArg,
_: _positionals,
...filtered
} = args;
result = result.concat(arrifyArgs(filtered));
return result;
}
function validateInput(context: RunContext) {
if (parsedArgs._.length < 1) {
console.log("Usage: lage [command]");
process.exit(0);
}
}

Просмотреть файл

@ -0,0 +1,11 @@
export function formatDuration(hrtime: [number, number]) {
let raw = hrtime[0] + hrtime[1] / 1e9;
if (raw > 60) {
const minutes = Math.floor(raw / 60);
const seconds = (raw - minutes * 60).toFixed(2);
return `${minutes}m ${seconds}s`;
} else {
const seconds = raw.toFixed(2);
return `${seconds}s`;
}
}

79
src/logger/index.ts Normal file
Просмотреть файл

@ -0,0 +1,79 @@
import log from "npmlog";
import { getPackageTaskFromId } from "../task/taskId";
import { RunContext } from "../types/RunContext";
import { Writable } from "stream";
import chalk from "chalk";
let _context: RunContext;
export function initialize(context: RunContext) {
_context = context;
}
export function getTaskLogPrefix(taskId: string) {
const [pkg, task] = getPackageTaskFromId(taskId);
return `${pkg} ${chalk.green(task)}`;
}
function addToTaskLog(taskId: string, message: string) {
const { taskLogs } = _context;
if (!taskLogs.has(taskId)) {
taskLogs.set(taskId, []);
}
taskLogs.get(taskId)?.push(message);
}
export function info(taskId: string, message: string, ...args: any) {
addToTaskLog(taskId, message);
return log.info(getTaskLogPrefix(taskId), chalk.cyan(message), ...args);
}
export function warn(taskId: string, message: string, ...args: any) {
addToTaskLog(taskId, message);
return log.warns(getTaskLogPrefix(taskId), chalk.yellow(message), ...args);
}
export function error(taskId: string, message: string, ...args: any) {
addToTaskLog(taskId, message);
return log.error(getTaskLogPrefix(taskId), chalk.red(message), ...args);
}
export function verbose(taskId: string, message: string, ...args: any) {
addToTaskLog(taskId, message);
return log.verbose(
getTaskLogPrefix(taskId),
chalk.underline(message),
...args
);
}
export class NpmLogWritable extends Writable {
private buffer: string = "";
constructor(private taskId: string) {
super();
}
_write(
chunk: Buffer,
encoding: string,
callback: (error?: Error | null) => void
) {
let prev = 0;
let curr = 0;
while (curr < chunk.byteLength) {
if (chunk[curr] === 13 || (chunk[curr] === 10 && curr - prev > 1)) {
this.buffer = this.buffer + chunk.slice(prev, curr).toString().trim();
addToTaskLog(this.taskId, this.buffer);
log.verbose(getTaskLogPrefix(this.taskId), this.buffer);
this.buffer = "";
prev = curr;
}
curr++;
}
callback();
}
}
export default { info, warn, error, verbose };

Просмотреть файл

@ -0,0 +1,53 @@
import { RunContext } from "../types/RunContext";
import { getPackageTaskFromId } from "../task/taskId";
import log from "npmlog";
import chalk from "chalk";
import { formatDuration } from "./formatDuration";
import { info } from "./index";
function hr() {
log.info("", "----------------------------------------------");
}
export async function reportSummary(context: RunContext) {
const { command, measures, taskLogs } = context;
const statusColorFn = {
success: chalk.greenBright,
failed: chalk.redBright,
skipped: chalk.gray,
};
hr();
log.info("", chalk.cyanBright(`🏗 Summary\n`));
if (measures.failedTask) {
const [pkg, task] = getPackageTaskFromId(measures.failedTask);
log.error("", `ERROR DETECTED IN ${pkg} ${task}`);
log.error("", taskLogs.get(measures.failedTask)!.join("\n"));
hr();
}
if (measures.taskStats.length > 0) {
for (const stats of measures.taskStats) {
const colorFn = statusColorFn[stats.status];
info(
stats.taskId,
colorFn(`${stats.status}, took ${formatDuration(stats.duration)}`)
);
}
} else {
log.warn("", "Nothing has been run. Check the scope or the command name");
}
hr();
log.info(
"",
`The command "${command}" took a total of ${formatDuration(
measures.duration
)} to complete`
);
}

51
src/paths.ts Normal file
Просмотреть файл

@ -0,0 +1,51 @@
import path from 'path';
import fs from 'fs';
/**
* Starting from `cwd`, searches up the directory hierarchy for `pathName`
* @param pathName
* @param cwd
*/
export function searchUp(pathName: string, cwd: string) {
const root = path.parse(cwd).root;
let found = false;
while (!found && cwd !== root) {
if (fs.existsSync(path.join(cwd, pathName))) {
found = true;
break;
}
cwd = path.dirname(cwd);
}
if (found) {
return cwd;
}
return null;
}
export function findGitRoot(cwd: string) {
return searchUp('.git', cwd);
}
export function findPackageRoot(cwd: string) {
return searchUp('package.json', cwd);
}
export function getChangePath(cwd: string) {
const gitRoot = findGitRoot(cwd);
if (gitRoot) {
return path.join(gitRoot, 'change');
}
return null;
}
export function isChildOf(child: string, parent: string) {
const relativePath = path.relative(child, parent);
return /^[.\/\\]+$/.test(relativePath);
}

5
src/task/abortSignal.ts Normal file
Просмотреть файл

@ -0,0 +1,5 @@
import { RunContext } from "../types/RunContext";
export function abort(context: RunContext) {
context.events.emit("abort");
}

Просмотреть файл

@ -0,0 +1,169 @@
import {
getScopedPackages,
getTransitiveDependencies,
PackageInfos,
getDependentMap,
} from "workspace-tools";
import { getTaskId, getPackageTaskFromId } from "./taskId";
import { RunContext } from "../types/RunContext";
import { TaskId } from "../types/Task";
import { generateTask } from "./generateTask";
import path from "path";
import {
ComputeHashTask,
CachePutTask,
CacheFetchTask,
} from "../cache/cacheTasks";
import { cosmiconfigSync } from "cosmiconfig";
const ConfigModuleName = "lage";
function filterPackages(context: RunContext) {
const { allPackages, scope, deps: withDeps } = context;
let scopes = ([] as string[]).concat(scope);
let scopedPackages =
scopes && scopes.length > 0
? getScopedPackages(scopes, allPackages)
: Object.keys(allPackages);
if (withDeps) {
scopedPackages = scopedPackages.concat(
getTransitiveDependencies(scopedPackages, allPackages)
);
}
return scopedPackages;
}
function getPipeline(pkg: string, context: RunContext) {
const { allPackages, defaultPipeline } = context;
const info = allPackages[pkg];
const results = cosmiconfigSync(ConfigModuleName).search(
path.dirname(info.packageJsonPath)
);
let pipeline = defaultPipeline;
if (results && results.config) {
pipeline = results.config.pipeline;
}
return pipeline;
}
/**
* Gather all the task dependencies defined by the "pipeline" setting, generates a list of edges
* @param targetTask
* @param pipeline
*/
function generateTaskDepsGraph(
targetTask: string,
pipeline: { [key: string]: string[] }
) {
const queue = [targetTask];
const visited = new Set<string>();
const graph: [string, string][] = [];
while (queue.length > 0) {
const task = queue.shift()!;
if (!visited.has(task)) {
visited.add(task);
if (Array.isArray(pipeline[task])) {
if (pipeline[task].length > 0) {
for (const depTask of pipeline[task]) {
graph.push([depTask, task]);
queue.push(depTask);
}
} else {
graph.push(["", task]);
}
}
}
}
return graph;
}
/**
* identify and create a realized task dependency map (discovering)
*
* This function will traverse the package dependency graph, and will end up traverse the task depenendencies within the same package (2 layered traversal)
*/
export function discoverTaskDeps(context: RunContext) {
const { allPackages, command } = context;
const filteredPackages = filterPackages(context);
// initialize a queue for a breadth first approach
const traversalQueue = filteredPackages;
const visited = new Set<string>();
const dependentMap = getDependentMap(allPackages);
while (traversalQueue.length > 0) {
const pkg = traversalQueue.shift()!;
if (!visited.has(pkg)) {
visited.add(pkg);
// get pipeline
const pipeline = getPipeline(pkg, context);
// establish task graph; push dependents in the traversal queue
const depTaskGraph = generateTaskDepsGraph(command, pipeline);
for (const [from, to] of depTaskGraph) {
const dependentPkgs = dependentMap.get(pkg);
const toTaskId = getTaskId(pkg, to);
if (from.startsWith("^") && dependentPkgs !== undefined) {
// add task dep from all the package deps within repo
for (const depPkg of dependentPkgs!) {
const fromTaskId = getTaskId(depPkg, from.slice(1));
createDep(fromTaskId, toTaskId, context);
}
// now push the dependents in the traversal queue
traversalQueue.push(pkg);
} else {
const fromTaskId = getTaskId(pkg, from);
// add task dep from same package
createDep(fromTaskId, toTaskId, context);
}
}
}
}
}
function isValidTaskId(taskId: string, allPackages: PackageInfos) {
const [pkg, task] = getPackageTaskFromId(taskId);
return (
taskId === "" ||
task === "" ||
[ComputeHashTask, CachePutTask, CacheFetchTask].includes(task) ||
Object.keys(allPackages[pkg].scripts || {}).includes(task)
);
}
function createDep(fromTaskId: TaskId, toTaskId: TaskId, context: RunContext) {
const { tasks, taskDepsGraph, allPackages } = context;
if (
!isValidTaskId(fromTaskId, allPackages) ||
!isValidTaskId(toTaskId, allPackages)
) {
return;
}
taskDepsGraph.push([fromTaskId, toTaskId]);
if (!tasks.get(fromTaskId)) {
tasks.set(fromTaskId, () => generateTask(fromTaskId, context));
}
if (!tasks.get(toTaskId)) {
tasks.set(toTaskId, () => generateTask(toTaskId, context));
}
}

41
src/task/generateTask.ts Normal file
Просмотреть файл

@ -0,0 +1,41 @@
import { getPackageTaskFromId } from "./taskId";
import { RunContext } from "../types/RunContext";
import {
CacheFetchTask,
CachePutTask,
ComputeHashTask,
} from "../cache/cacheTasks";
import { fetchBackfill, putBackfill, computeHash } from "../cache/backfill";
import { npmTask } from "./npmTask";
import { taskWrapper } from "./taskWrapper";
const EmptyTask = "";
/**
* Create task wraps the queueing, returns the promise for completion of the task ultimately
* @param taskId
* @param context
*/
export function generateTask(taskId: string, context: RunContext) {
const [_, task] = getPackageTaskFromId(taskId);
// Special case, we use this as a dummy to give tasks with no dependencies to be attached in the graph
if (task === EmptyTask) {
return Promise.resolve();
}
switch (task) {
case ComputeHashTask:
return taskWrapper(taskId, computeHash, context);
case CacheFetchTask:
return taskWrapper(taskId, fetchBackfill, context);
case CachePutTask:
return taskWrapper(taskId, putBackfill, context);
default:
return npmTask(taskId, context);
}
}

70
src/task/npmTask.ts Normal file
Просмотреть файл

@ -0,0 +1,70 @@
import { TaskId } from "../types/Task";
import { getPackageTaskFromId } from "./taskId";
import { spawn } from "child_process";
import path from "path";
import { RunContext } from "../types/RunContext";
import logger, { NpmLogWritable } from "../logger";
import { taskWrapper } from "./taskWrapper";
import { abort } from "./abortSignal";
import os from "os";
export function npmTask(taskId: TaskId, context: RunContext) {
const [pkg, task] = getPackageTaskFromId(taskId);
const { allPackages, queue } = context;
const npmCmd = path.join(
path.dirname(process.execPath),
os.platform() === "win32" ? "npm.cmd" : "npm"
);
const npmArgs = [...context.nodeArgs, "run", task, "--", ...context.args];
return queue.add(() =>
taskWrapper(
taskId,
() =>
new Promise((resolve, reject) => {
if (!allPackages[pkg].scripts || !allPackages[pkg].scripts![task]) {
logger.info(taskId, `Empty script detected, skipping`);
return resolve();
}
logger.verbose(taskId, `Running ${[npmCmd, ...npmArgs].join(" ")}`);
const cp = spawn(npmCmd, npmArgs, {
cwd: path.dirname(allPackages[pkg].packageJsonPath),
stdio: "pipe",
});
context.events.once("abort", terminate);
const stdoutLogger = new NpmLogWritable(taskId);
cp.stdout.pipe(stdoutLogger);
const stderrLogger = new NpmLogWritable(taskId);
cp.stderr.pipe(stderrLogger);
cp.on("exit", (code) => {
context.events.off("off", terminate);
if (code === 0) {
return resolve();
}
context.measures.failedTask = taskId;
abort(context);
reject();
});
function terminate() {
queue.pause();
queue.clear();
cp.kill("SIGKILL");
}
}),
context
)
);
}

7
src/task/taskId.ts Normal file
Просмотреть файл

@ -0,0 +1,7 @@
export function getTaskId(pkg: string, task: string) {
return `${pkg}###${task}`;
}
export function getPackageTaskFromId(id: string) {
return id.split("###");
}

28
src/task/taskRunner.ts Normal file
Просмотреть файл

@ -0,0 +1,28 @@
import { RunContext } from "../types/RunContext";
import pGraph from "p-graph";
import { generateCacheTasks } from "../cache/cacheTasks";
import { reportSummary } from "../logger/reportSummary";
export async function runTasks(context: RunContext) {
const { command, profiler } = context;
context.measures.start = process.hrtime();
console.log(`Executing command "${command}"`);
generateCacheTasks(context);
try {
await pGraph(context.tasks, context.taskDepsGraph).run();
} catch {
// passthru - we always want to print out the summary ourselves
}
if (context.profile) {
profiler.output();
}
context.measures.duration = process.hrtime(context.measures.start);
await reportSummary(context);
}

43
src/task/taskWrapper.ts Normal file
Просмотреть файл

@ -0,0 +1,43 @@
import { TaskId } from "../types/Task";
import { PackageInfo } from "workspace-tools";
import { getPackageTaskFromId } from "./taskId";
import { RunContext } from "../types/RunContext";
import { cacheHits } from "../cache/backfill";
import { info } from "../logger";
import { isCacheTask } from "../cache/cacheTasks";
import { formatDuration } from "../logger/formatDuration";
export async function taskWrapper(
taskId: TaskId,
fn: (info: PackageInfo, context: RunContext) => void | Promise<void>,
context: RunContext
) {
const { allPackages, profiler, measures, queue } = context;
const [pkg, task] = getPackageTaskFromId(taskId);
const start = process.hrtime();
if (!cacheHits[pkg]) {
if (!isCacheTask(task)) {
info(taskId, "started");
}
try {
await profiler.run(() => fn(allPackages[pkg], context), taskId);
const duration = process.hrtime(start);
if (!isCacheTask(task)) {
measures.taskStats.push({ taskId, start, duration, status: "success" });
info(taskId, `done - took ${formatDuration(duration)}`);
}
} catch (e) {
const duration = process.hrtime(start);
measures.taskStats.push({ taskId, start, duration, status: "failed" });
throw e;
}
} else if (!isCacheTask(task)) {
const duration = process.hrtime(start);
measures.taskStats.push({ taskId, start, duration, status: "skipped" });
info(taskId, "skipped");
}
}

40
src/types/RunContext.ts Normal file
Просмотреть файл

@ -0,0 +1,40 @@
import { TaskDepsGraph, Tasks, TaskId } from "./Task";
import { PackageInfos } from "workspace-tools";
import Profiler from "@lerna/profiler";
import PQueue from "p-queue";
import { EventEmitter } from "events";
interface TaskStats {
taskId: TaskId;
start: [number, number];
duration: [number, number];
status: "failed" | "skipped" | "success" | "not started";
}
interface Measures {
start: [number, number];
duration: [number, number];
failedTask?: string;
taskStats: TaskStats[];
}
export interface RunContext {
taskDepsGraph: TaskDepsGraph;
tasks: Tasks;
allPackages: PackageInfos;
command: string;
concurrency: number;
scope: string[];
deps: boolean;
defaultPipeline: { [task: string]: string[] };
measures: Measures;
profiler: Profiler;
taskLogs: Map<TaskId, string[]>;
queue: PQueue;
cache: boolean;
nodeArgs: string[];
args: any;
events: EventEmitter;
verbose: boolean;
profile: boolean;
}

7
src/types/Task.ts Normal file
Просмотреть файл

@ -0,0 +1,7 @@
export type TaskId = string;
export type TaskDeps = TaskId[];
/** subject, dependent (e.g. [test, build]) */
export type TaskDepsGraph = [TaskId, TaskId][];
export type Tasks = Map<TaskId, (TaskId) => Promise<unknown>>;

5
src/types/ToolOptions.ts Normal file
Просмотреть файл

@ -0,0 +1,5 @@
export interface ToolOptions {
pipeline: { [task: string]: string[] };
cache: boolean;
scopes: string[];
}

70
tsconfig.json Normal file
Просмотреть файл

@ -0,0 +1,70 @@
{
"compilerOptions": {
/* Basic Options */
// "incremental": true, /* Enable incremental compilation */
"target": "es2017" /* Specify ECMAScript target version: 'ES3' (default), 'ES5', 'ES2015', 'ES2016', 'ES2017', 'ES2018', 'ES2019', 'ES2020', or 'ESNEXT'. */,
"module": "commonjs" /* Specify module code generation: 'none', 'commonjs', 'amd', 'system', 'umd', 'es2015', 'es2020', or 'ESNext'. */,
"lib": [
"ES2017"
] /* Specify library files to be included in the compilation. */,
"allowJs": true /* Allow javascript files to be compiled. */,
// "checkJs": true, /* Report errors in .js files. */
// "jsx": "preserve", /* Specify JSX code generation: 'preserve', 'react-native', or 'react'. */
// "declaration": true, /* Generates corresponding '.d.ts' file. */
// "declarationMap": true, /* Generates a sourcemap for each corresponding '.d.ts' file. */
// "sourceMap": true, /* Generates corresponding '.map' file. */
// "outFile": "./", /* Concatenate and emit output to single file. */
"outDir": "./dist" /* Redirect output structure to the directory. */,
// "rootDir": "./", /* Specify the root directory of input files. Use to control the output directory structure with --outDir. */
// "composite": true, /* Enable project compilation */
// "tsBuildInfoFile": "./", /* Specify file to store incremental compilation information */
// "removeComments": true, /* Do not emit comments to output. */
// "noEmit": true, /* Do not emit outputs. */
// "importHelpers": true, /* Import emit helpers from 'tslib'. */
// "downlevelIteration": true, /* Provide full support for iterables in 'for-of', spread, and destructuring when targeting 'ES5' or 'ES3'. */
// "isolatedModules": true, /* Transpile each file as a separate module (similar to 'ts.transpileModule'). */
/* Strict Type-Checking Options */
"strict": true /* Enable all strict type-checking options. */,
"noImplicitAny": false /* Raise error on expressions and declarations with an implied 'any' type. */,
// "strictNullChecks": true, /* Enable strict null checks. */
// "strictFunctionTypes": true, /* Enable strict checking of function types. */
// "strictBindCallApply": true, /* Enable strict 'bind', 'call', and 'apply' methods on functions. */
// "strictPropertyInitialization": true, /* Enable strict checking of property initialization in classes. */
// "noImplicitThis": true, /* Raise error on 'this' expressions with an implied 'any' type. */
// "alwaysStrict": true, /* Parse in strict mode and emit "use strict" for each source file. */
/* Additional Checks */
// "noUnusedLocals": true, /* Report errors on unused locals. */
// "noUnusedParameters": true, /* Report errors on unused parameters. */
// "noImplicitReturns": true, /* Report error when not all code paths in function return a value. */
// "noFallthroughCasesInSwitch": true, /* Report errors for fallthrough cases in switch statement. */
/* Module Resolution Options */
// "moduleResolution": "node", /* Specify module resolution strategy: 'node' (Node.js) or 'classic' (TypeScript pre-1.6). */
// "baseUrl": "./", /* Base directory to resolve non-absolute module names. */
// "paths": {}, /* A series of entries which re-map imports to lookup locations relative to the 'baseUrl'. */
// "rootDirs": [], /* List of root folders whose combined content represents the structure of the project at runtime. */
// "typeRoots": [], /* List of folders to include type definitions from. */
// "types": [], /* Type declaration files to be included in compilation. */
"allowSyntheticDefaultImports": true /* Allow default imports from modules with no default export. This does not affect code emit, just typechecking. */,
"esModuleInterop": true /* Enables emit interoperability between CommonJS and ES Modules via creation of namespace objects for all imports. Implies 'allowSyntheticDefaultImports'. */,
// "preserveSymlinks": true, /* Do not resolve the real path of symlinks. */
// "allowUmdGlobalAccess": true, /* Allow accessing UMD globals from modules. */
/* Source Map Options */
// "sourceRoot": "", /* Specify the location where debugger should locate TypeScript files instead of source locations. */
// "mapRoot": "", /* Specify the location where debugger should locate map files instead of generated locations. */
// "inlineSourceMap": true, /* Emit a single file with source maps instead of having a separate file. */
// "inlineSources": true, /* Emit the source alongside the sourcemaps within a single file; requires '--inlineSourceMap' or '--sourceMap' to be set. */
/* Experimental Options */
// "experimentalDecorators": true, /* Enables experimental support for ES7 decorators. */
// "emitDecoratorMetadata": true, /* Enables experimental support for emitting type metadata for decorators. */
/* Advanced Options */
"forceConsistentCasingInFileNames": true /* Disallow inconsistently-cased references to the same file. */,
"skipLibCheck": true
},
"include": ["src"]
}

3293
yarn.lock Normal file

Разница между файлами не показана из-за своего большого размера Загрузить разницу