* allows global script cache

* sorting the cache to be consistent

* adding correct deps for lage + glob-hasher

* fixed salt test

* updating snapshot with a reasoning

* fixing the packages

* Change files

* fixing lint issues

* get rid of unused dep

* fixing depchecks

* fixing depchecker!
This commit is contained in:
Kenneth Chau 2023-03-07 16:01:15 -08:00 коммит произвёл GitHub
Родитель cde9ecf8fb
Коммит e2eb2c00d1
Не найден ключ, соответствующий данной подписи
Идентификатор ключа GPG: 4AEE18F83AFDEB23
19 изменённых файлов: 197 добавлений и 95 удалений

Просмотреть файл

@ -0,0 +1,7 @@
{
"type": "minor",
"comment": "allows global script cache",
"packageName": "@lage-run/cache",
"email": "kchau@microsoft.com",
"dependentChangeType": "patch"
}

Просмотреть файл

@ -0,0 +1,7 @@
{
"type": "minor",
"comment": "allows global script cache",
"packageName": "@lage-run/hasher",
"email": "kchau@microsoft.com",
"dependentChangeType": "patch"
}

Просмотреть файл

@ -0,0 +1,7 @@
{
"type": "minor",
"comment": "allows global script cache",
"packageName": "@lage-run/target-graph",
"email": "kchau@microsoft.com",
"dependentChangeType": "patch"
}

Просмотреть файл

@ -0,0 +1,7 @@
{
"type": "minor",
"comment": "allows global script cache",
"packageName": "lage",
"email": "kchau@microsoft.com",
"dependentChangeType": "patch"
}

6
packages/cache/package.json поставляемый
Просмотреть файл

@ -21,13 +21,11 @@
"backfill-config": "^6.3.0",
"backfill-cache": "^5.6.1",
"backfill-logger": "^5.1.3",
"fast-glob": "^3.2.11"
"glob-hasher": "1.1.1"
},
"devDependencies": {
"@lage-run/monorepo-fixture": "*",
"@types/mock-fs": "4.13.1",
"monorepo-scripts": "*",
"mock-fs": "5.2.0"
"monorepo-scripts": "*"
},
"publishConfig": {
"access": "public"

27
packages/cache/src/TargetHasher.ts поставляемый
Просмотреть файл

@ -1,6 +1,8 @@
import { Hasher as LageHasher } from "@lage-run/hasher";
import { salt } from "./salt.js";
import type { Target } from "@lage-run/target-graph";
import { hashGlobGit } from "glob-hasher";
import { hashStrings } from "./hashStrings.js";
export interface TargetHasherOptions {
root: string;
@ -9,6 +11,15 @@ export interface TargetHasherOptions {
cliArgs?: string[];
}
function sortObject<T>(unordered: Record<string, T>): Record<string, T> {
return Object.keys(unordered)
.sort((a, b) => a.localeCompare(b))
.reduce((obj, key) => {
obj[key] = unordered[key];
return obj;
}, {});
}
/**
* TargetHasher is a class that can be used to generate a hash of a target.
*
@ -18,12 +29,28 @@ export class TargetHasher {
constructor(private options: TargetHasherOptions) {}
async hash(target: Target): Promise<string> {
const { root } = this.options;
const hashKey = await salt(
target.environmentGlob ?? this.options.environmentGlob ?? ["lage.config.js"],
`${target.id}|${JSON.stringify(this.options.cliArgs)}`,
this.options.root,
this.options.cacheKey || ""
);
if (target.cwd === root && target.cache) {
if (!target.inputs) {
throw new Error("Root-level targets must have `inputs` defined if it has cache enabled.");
}
const hashes = hashGlobGit(target.inputs, { cwd: root, gitignore: false }) ?? {};
const sortedHashMap = sortObject(hashes);
const sortedHashes = Object.values(sortedHashMap);
sortedHashes.push(hashKey);
return hashStrings(sortedHashes);
}
const hasher = new LageHasher(target.cwd);
return hasher.createPackageHash(hashKey);
}

11
packages/cache/src/hashStrings.ts поставляемый Normal file
Просмотреть файл

@ -0,0 +1,11 @@
import crypto from "crypto";
export function hashStrings(strings: string | string[]): string {
const hasher = crypto.createHash("sha1");
const anArray = typeof strings === "string" ? [strings] : strings;
const elements = [...anArray];
elements.sort((a, b) => a.localeCompare(b));
elements.forEach((element) => hasher.update(element));
return hasher.digest("hex");
}

59
packages/cache/src/salt.ts поставляемый
Просмотреть файл

@ -1,7 +1,5 @@
import * as path from "path";
import * as crypto from "crypto";
import * as fg from "fast-glob";
import * as fs from "fs/promises";
import { hashGlobGit } from "glob-hasher";
import { hashStrings } from "./hashStrings.js";
interface MemoizedEnvHashes {
[key: string]: string[];
@ -25,7 +23,16 @@ function envHashKey(environmentGlobFiles: string[]) {
return environmentGlobFiles.sort().join("|");
}
async function getEnvHash(environmentGlobFiles: string[], repoRoot: string) {
function sortObject(unordered: Record<string, unknown>) {
return Object.keys(unordered)
.sort((a, b) => a.localeCompare(b))
.reduce((obj, key) => {
obj[key] = unordered[key];
return obj;
}, {});
}
async function getEnvHash(environmentGlobFiles: string[], repoRoot: string): Promise<string[]> {
const key = envHashKey(environmentGlobFiles);
// We want to make sure that we only call getEnvHashOneAtTime one at a time
@ -42,39 +49,17 @@ async function getEnvHash(environmentGlobFiles: string[], repoRoot: string) {
return oneAtATime;
}
async function getEnvHashOneAtTime(environmentGlobFiles: string[], repoRoot: string) {
const envHash: string[] = [];
const newline = /\r\n|\r|\n/g;
const LF = "\n";
const files = fg.sync(environmentGlobFiles, {
cwd: repoRoot,
});
files.sort((a, b) => a.localeCompare(b));
for (const file of files) {
const hasher = crypto.createHash("sha1");
hasher.update(file);
const fileBuffer = await fs.readFile(path.join(repoRoot, file), "utf-8");
const data = fileBuffer.replace(newline, LF);
hasher.update(data);
envHash.push(hasher.digest("hex"));
function getEnvHashOneAtTime(environmentGlobFiles: string[], repoRoot: string) {
const key = envHashKey(environmentGlobFiles);
if (environmentGlobFiles.length === 0) {
envHashes[key] = [];
return envHashes[key];
}
const key = envHashKey(environmentGlobFiles);
envHashes[key] = envHash;
const hashes = hashGlobGit(environmentGlobFiles, { cwd: repoRoot, gitignore: false })!;
const sortedHashes = sortObject(hashes);
return envHash;
}
function hashStrings(strings: string | string[]): string {
const hasher = crypto.createHash("sha1");
const anArray = typeof strings === "string" ? [strings] : strings;
const elements = [...anArray];
elements.sort((a, b) => a.localeCompare(b));
elements.forEach((element) => hasher.update(element));
return hasher.digest("hex");
envHashes[key] = Object.values(sortedHashes);
return envHashes[key];
}

4
packages/cache/tests/TargetHasher.test.ts поставляемый
Просмотреть файл

@ -35,7 +35,9 @@ describe("BackfillCacheProvider", () => {
};
const hash = await new TargetHasher(options).hash(target);
await expect(hash).toMatchInlineSnapshot(`"b6ab40b8acf59d71451c845ca9ba7dd468777b26"`);
// This hash is dependent on the underlying hash algorithm. The last change here was due to us switching from sha1 to git hash.
// git hash is sha1("blob {byte count}\0{content}")
await expect(hash).toMatchInlineSnapshot(`"03577ca79ad4a10f67831e169f58f0aff9eefa74"`);
await monorepo.cleanup();
});
});

73
packages/cache/tests/salt.test.ts поставляемый
Просмотреть файл

@ -1,30 +1,38 @@
import mockFs from "mock-fs";
import fs from "fs";
import os from "os";
import path from "path";
import { salt, _testResetEnvHash } from "../src/salt";
function mockFs(contents: Record<string, string>) {
const tmpDir = fs.mkdtempSync(os.tmpdir() + path.sep);
for (const [filename, content] of Object.entries(contents)) {
fs.writeFileSync(path.join(tmpDir, filename), content);
}
return { cwd: tmpDir, cleanup: () => fs.rmdirSync(tmpDir, { recursive: true }) };
}
describe("salt", () => {
beforeEach(() => {
_testResetEnvHash();
});
afterEach(() => {
mockFs.restore();
});
it("should generate the same salt for the same files each time even with env-hash cache reset", async () => {
const contents = {
"lage.config.js": 'module.exports = { environmentGlob: ["test.txt"] }',
"test.txt": "test text",
};
mockFs(contents);
const contentsSalt = await salt(["test.txt"], "command", process.cwd());
mockFs.restore();
const dir = mockFs(contents);
const contentsSalt = await salt(["test.txt"], "command", dir.cwd);
dir.cleanup();
_testResetEnvHash();
mockFs(contents);
const newContentsSalt = await salt(["test.txt"], "command", process.cwd());
mockFs.restore();
const dir2 = mockFs(contents);
const newContentsSalt = await salt(["test.txt"], "command", dir2.cwd);
dir2.cleanup();
expect(contentsSalt).toBe(newContentsSalt);
});
@ -35,18 +43,19 @@ describe("salt", () => {
"test.txt": "test text",
};
mockFs(contents);
const contentsSalt = await salt(["test.txt"], "command", process.cwd());
mockFs.restore();
const dir = mockFs(contents);
const contentsSalt = await salt(["test.txt"], "command", dir.cwd);
dir.cleanup();
_testResetEnvHash();
mockFs({
const dir2 = mockFs({
...contents,
"test.txt": "test text 2",
});
const contentsSaltChanged = await salt(["test.txt"], "command", process.cwd());
mockFs.restore();
const contentsSaltChanged = await salt(["test.txt"], "command", dir2.cwd);
dir2.cleanup();
expect(contentsSalt).not.toBe(contentsSaltChanged);
});
@ -57,15 +66,15 @@ describe("salt", () => {
"test.txt": "test text",
};
mockFs(contents);
const contentsSalt = await salt(["test.txt"], "command", process.cwd());
mockFs.restore();
const dir = mockFs(contents);
const contentsSalt = await salt(["test.txt"], "command", dir.cwd);
dir.cleanup();
_testResetEnvHash();
mockFs(contents);
const newSalt = await salt(["test.txt"], "command2", process.cwd());
mockFs.restore();
const dir2 = mockFs(contents);
const newSalt = await salt(["test.txt"], "command2", dir2.cwd);
dir2.cleanup();
expect(contentsSalt).not.toBe(newSalt);
});
@ -76,15 +85,15 @@ describe("salt", () => {
"test.txt": "test text",
};
mockFs(contents);
const contentsSalt = await salt(["test.txt"], "command", process.cwd(), "custom1");
mockFs.restore();
const dir = mockFs(contents);
const contentsSalt = await salt(["test.txt"], "command", dir.cwd, "custom1");
dir.cleanup();
_testResetEnvHash();
mockFs(contents);
const newSalt = await salt(["test.txt"], "command", process.cwd(), "custom2");
mockFs.restore();
const dir2 = mockFs(contents);
const newSalt = await salt(["test.txt"], "command", dir2.cwd, "custom2");
dir2.cleanup();
expect(contentsSalt).not.toBe(newSalt);
});
@ -95,9 +104,9 @@ describe("salt", () => {
"test.txt": "test text",
};
mockFs(contents);
const contentsSalt = await salt([], "command", process.cwd());
mockFs.restore();
const dir = mockFs(contents);
const contentsSalt = await salt([], "command", dir.cwd);
dir.cleanup();
expect(contentsSalt).not.toBeUndefined();
});

Просмотреть файл

@ -23,6 +23,7 @@
"@lage-run/cli": "^0.9.0",
"@lage-run/target-graph": "^0.6.2",
"@lage-run/scheduler-types": "^0.3.1",
"execa": "5.1.1"
"execa": "5.1.1",
"glob-hasher": "1.1.1"
}
}

Просмотреть файл

@ -24,7 +24,7 @@ describe("basics", () => {
repo.cleanup();
});
it.only("basic with missing script names - logging should not include those targets", () => {
it("basic with missing script names - logging should not include those targets", () => {
const repo = new Monorepo("basics-missing-scripts");
repo.init();

Просмотреть файл

@ -3,6 +3,8 @@ import * as fs from "fs";
import * as path from "path";
import * as execa from "execa";
import { glob } from "glob-hasher";
export class Monorepo {
static tmpdir = os.tmpdir();
@ -10,6 +12,17 @@ export class Monorepo {
nodeModulesPath: string;
yarnPath: string;
static externalPackageJsonGlobs = [
"node_modules/yoga-layout-prebuilt/package.json",
"node_modules/glob-hasher/package.json",
"node_modules/glob-hasher-*/package.json",
];
static externalPackageJsons = glob(Monorepo.externalPackageJsonGlobs, {
cwd: path.join(__dirname, "..", "..", "..", ".."),
gitignore: false,
})!;
constructor(private name: string) {
this.root = fs.mkdtempSync(path.join(Monorepo.tmpdir, `lage-monorepo-${name}-`));
this.nodeModulesPath = path.join(this.root, "node_modules");
@ -26,8 +39,11 @@ export class Monorepo {
}
install() {
const yogaPath = path.dirname(require.resolve("yoga-layout-prebuilt/package.json"));
fs.cpSync(yogaPath, path.join(this.root, "node_modules/yoga-layout-prebuilt"), { recursive: true });
for (const packagePath of Monorepo.externalPackageJsons.map((p) => path.dirname(p))) {
const name = JSON.parse(fs.readFileSync(path.join(packagePath, "package.json"), "utf-8")).name;
fs.cpSync(packagePath, path.join(this.root, "node_modules", name), { recursive: true });
}
fs.cpSync(path.join(__dirname, "..", "..", "yarn"), path.dirname(this.yarnPath), { recursive: true });
execa.sync("node", [this.yarnPath, "install"], { cwd: this.root });
}

Просмотреть файл

Просмотреть файл

@ -10,7 +10,8 @@
"bundle": "rollup --config ./rollup.config.js"
},
"dependencies": {
"yoga-layout-prebuilt": "^1.10.0"
"yoga-layout-prebuilt": "^1.10.0",
"glob-hasher": "1.1.1"
},
"optionalDependencies": {
"fsevents": "~2.3.2"

Просмотреть файл

@ -37,7 +37,7 @@ export default [
retainDynamicImport(),
terser(),
],
external: ["fsevents", "yoga-layout-prebuilt"],
external: ["fsevents", "yoga-layout-prebuilt", "glob-hasher"],
inlineDynamicImports: true,
},
{

Просмотреть файл

@ -51,14 +51,14 @@ export class TargetFactory {
createGlobalTarget(id: string, config: TargetConfig): Target {
const { root } = this.options;
const { options, deps, dependsOn, inputs, outputs, priority, maxWorkers, environmentGlob, weight } = config;
const { options, deps, dependsOn, cache, inputs, outputs, priority, maxWorkers, environmentGlob, weight } = config;
const { task } = getPackageAndTask(id);
const target = {
id,
label: id,
type: config.type,
task,
cache: false,
cache: cache !== false,
cwd: root,
depSpecs: dependsOn ?? deps ?? [],
dependencies: [],

Просмотреть файл

@ -15,7 +15,7 @@ module.exports = async function depcheckWorker({ target }) {
const results = await depcheck(target.cwd, {
ignoreBinPackage: true,
ignorePatterns: ["node_modules", "dist", "lib", "build"],
ignoreMatches: ["yoga-layout-prebuilt"]
ignoreMatches: ["yoga-layout-prebuilt", "glob-hasher"]
});
let hasErrors = false;

Просмотреть файл

@ -1128,13 +1128,6 @@
resolved "https://registry.yarnpkg.com/@types/minimatch/-/minimatch-3.0.5.tgz#1001cc5e6a3704b83c236027e77f2f58ea010f40"
integrity sha512-Klz949h02Gz2uZCMGwDUSDS1YBlTdDDgbWHi+81l29tQALUtvz4rAYi5uoVhE5Lagoq6DeqAUlbrHvW/mXDgdQ==
"@types/mock-fs@4.13.1":
version "4.13.1"
resolved "https://registry.yarnpkg.com/@types/mock-fs/-/mock-fs-4.13.1.tgz#9201554ceb23671badbfa8ac3f1fa9e0706305be"
integrity sha512-m6nFAJ3lBSnqbvDZioawRvpLXSaPyn52Srf7OfzjubYbYX8MTUdIgDxQl0wEapm4m/pNYSd9TXocpQ0TvZFlYA==
dependencies:
"@types/node" "*"
"@types/node-fetch@^2.5.0":
version "2.6.2"
resolved "https://registry.yarnpkg.com/@types/node-fetch/-/node-fetch-2.6.2.tgz#d1a9c5fd049d9415dce61571557104dec3ec81da"
@ -2625,6 +2618,42 @@ git-url-parse@^13.0.0:
dependencies:
git-up "^7.0.0"
glob-hasher-darwin-arm64@1.1.1:
version "1.1.1"
resolved "https://registry.yarnpkg.com/glob-hasher-darwin-arm64/-/glob-hasher-darwin-arm64-1.1.1.tgz#625f6bf445b441ef3d733298869a7620d32c38bb"
integrity sha512-Zx2WB81BZ+5TDemdM5l8UjW94Css8YQmSBQfnvG2lqdmnfWZ8upaaK1uHrUyQ9XbQotDpjais7xC92GU+PzOpw==
glob-hasher-darwin-x64@1.1.1:
version "1.1.1"
resolved "https://registry.yarnpkg.com/glob-hasher-darwin-x64/-/glob-hasher-darwin-x64-1.1.1.tgz#0126f3bc153db7a708c0c58a4103c3c0064b20fe"
integrity sha512-U8xVbnPnOIL7nyiUnnOiyz9hpZS7UEsZbBn8F2705QmtOPazoe9zcvJnzcLp5G9OUQ4lMQoZsBVPIXrVtsxHUA==
glob-hasher-linux-x64-gnu@1.1.1:
version "1.1.1"
resolved "https://registry.yarnpkg.com/glob-hasher-linux-x64-gnu/-/glob-hasher-linux-x64-gnu-1.1.1.tgz#1fd5d8501e5636953778ad3ab206378d2438488f"
integrity sha512-u/IkNXy4OruR9eukkNTKnY3E+QgCIpVUAKi41dMjFfRH6OPisWNWPy8yb4ouKR6xPyRT9kTzbtJoYb72CcZOBw==
glob-hasher-win32-arm64-msvc@1.1.1:
version "1.1.1"
resolved "https://registry.yarnpkg.com/glob-hasher-win32-arm64-msvc/-/glob-hasher-win32-arm64-msvc-1.1.1.tgz#867fcec657d57e0709360d12b47594ba334c9b5c"
integrity sha512-4GCuvDDoMwdbYl83T/cJM8sYjrP2dY1IPqFOTEMBiOAoFuoLuk9vMvUF5GqYqa/gPUU9q2lhZorrxH+NZZBiaw==
glob-hasher-win32-x64-msvc@1.1.1:
version "1.1.1"
resolved "https://registry.yarnpkg.com/glob-hasher-win32-x64-msvc/-/glob-hasher-win32-x64-msvc-1.1.1.tgz#c79bec37c3038cd8c87be33930a2c0648ed3d087"
integrity sha512-qJCm1Zfr8I5eNRuYK32oDshiuybJCSqQ95Spharv9Ns0yl8BPzh6VmXUHSPV2RZnUmzZr6KzAvAceQJ6n6pXfg==
glob-hasher@1.1.1:
version "1.1.1"
resolved "https://registry.yarnpkg.com/glob-hasher/-/glob-hasher-1.1.1.tgz#a5d64acbdbe32ad65f3770a66b5da1f093f35b57"
integrity sha512-N/YHEuUUlKIMGp2J2IfjI967o0t6ZaOq4IlyEzjFAqbE8M7zdFOK1dIZ5cTYVY3JEyVI2ffR8Tuo1neinF43eA==
optionalDependencies:
glob-hasher-darwin-arm64 "1.1.1"
glob-hasher-darwin-x64 "1.1.1"
glob-hasher-linux-x64-gnu "1.1.1"
glob-hasher-win32-arm64-msvc "1.1.1"
glob-hasher-win32-x64-msvc "1.1.1"
glob-parent@^5.1.2, glob-parent@~5.1.2:
version "5.1.2"
resolved "https://registry.yarnpkg.com/glob-parent/-/glob-parent-5.1.2.tgz#869832c58034fe68a4093c17dc15e8340d8401c4"
@ -3712,11 +3741,6 @@ mkdirp-classic@^0.5.2:
resolved "https://registry.yarnpkg.com/mkdirp-classic/-/mkdirp-classic-0.5.3.tgz#fa10c9115cc6d8865be221ba47ee9bed78601113"
integrity sha512-gKLcREMhtuZRwRAfqP3RFW+TK4JqApVBtOIftVgjuABpAtpxhPGaDcfvbhNvD0B8iD1oUr/txX35NjcaY6Ns/A==
mock-fs@5.2.0:
version "5.2.0"
resolved "https://registry.yarnpkg.com/mock-fs/-/mock-fs-5.2.0.tgz#3502a9499c84c0a1218ee4bf92ae5bf2ea9b2b5e"
integrity sha512-2dF2R6YMSZbpip1V1WHKGLNjr/k48uQClqMVb5H3MOvwc9qhYis3/IWbj02qIg/Y8MDXKFF4c5v0rxx2o6xTZw==
ms@2.1.2:
version "2.1.2"
resolved "https://registry.yarnpkg.com/ms/-/ms-2.1.2.tgz#d09d1f357b443f493382a8eb3ccd183872ae6009"