diff --git a/.eslintrc.js b/.eslintrc.js index 3b27e19a120d..1e02e4c04d22 100644 --- a/.eslintrc.js +++ b/.eslintrc.js @@ -45,7 +45,6 @@ module.exports = { "overrides": [{ "files": [ "devtools/**", - "taskcluster/**", "testing/**", "toolkit/**", "tools/**", diff --git a/.prettierignore b/.prettierignore index 91ef9226abe2..06a4fa792316 100644 --- a/.prettierignore +++ b/.prettierignore @@ -40,7 +40,6 @@ toolkit/components/telemetry/datareporting-prefs.js toolkit/components/telemetry/healthreport-prefs.js # Ignore all top-level directories for now. -taskcluster/** testing/** toolkit/** tools/** diff --git a/taskcluster/docker/index-task/insert-indexes.js b/taskcluster/docker/index-task/insert-indexes.js index 6a5bef66af9c..f84ca3def7a5 100644 --- a/taskcluster/docker/index-task/insert-indexes.js +++ b/taskcluster/docker/index-task/insert-indexes.js @@ -2,16 +2,18 @@ let taskcluster = require("taskcluster-client"); // Create instance of index client let index = new taskcluster.Index({ - delayFactor: 750, // Good solid delay for background process - retries: 8, // A few extra retries for robustness - rootUrl: process.env.TASKCLUSTER_PROXY_URL || process.env.TASKCLUSTER_ROOT_URL, + delayFactor: 750, // Good solid delay for background process + retries: 8, // A few extra retries for robustness + rootUrl: + process.env.TASKCLUSTER_PROXY_URL || process.env.TASKCLUSTER_ROOT_URL, }); // Create queue instance for fetching taskId let queue = new taskcluster.Queue({ - delayFactor: 750, // Good solid delay for background process - retries: 8, // A few extra retries for robustness - rootUrl: process.env.TASKCLUSTER_PROXY_URL || process.env.TASKCLUSTER_ROOT_URL, + delayFactor: 750, // Good solid delay for background process + retries: 8, // A few extra retries for robustness + rootUrl: + process.env.TASKCLUSTER_PROXY_URL || process.env.TASKCLUSTER_ROOT_URL, }); // Load input @@ -31,24 +33,37 @@ if (isNaN(rank)) { } // Fetch task definition to get expiration and then insert into index -queue.task(taskId).then(task => task.expires).then(expires => { - return Promise.all(namespaces.map(namespace => { - console.log("Inserting %s into index (rank %d) under: %s", taskId, rank, namespace); - return index.insertTask(namespace, { - taskId, - rank, - data: {}, - expires, - }); - })); -}).then(() => { - console.log("indexing successfully completed."); - process.exit(0); -}).catch(err => { - console.log("Error:\n%s", err); - if (err.stack) { - console.log("Stack:\n%s", err.stack); - } - console.log("Properties:\n%j", err); - throw err; -}).catch(() => process.exit(1)); +queue + .task(taskId) + .then(task => task.expires) + .then(expires => { + return Promise.all( + namespaces.map(namespace => { + console.log( + "Inserting %s into index (rank %d) under: %s", + taskId, + rank, + namespace + ); + return index.insertTask(namespace, { + taskId, + rank, + data: {}, + expires, + }); + }) + ); + }) + .then(() => { + console.log("indexing successfully completed."); + process.exit(0); + }) + .catch(err => { + console.log("Error:\n%s", err); + if (err.stack) { + console.log("Stack:\n%s", err.stack); + } + console.log("Properties:\n%j", err); + throw err; + }) + .catch(() => process.exit(1)); diff --git a/taskcluster/docker/periodic-updates/scripts/genHPKPStaticPins.js b/taskcluster/docker/periodic-updates/scripts/genHPKPStaticPins.js index 063547620f7d..8437816cb292 100644 --- a/taskcluster/docker/periodic-updates/scripts/genHPKPStaticPins.js +++ b/taskcluster/docker/periodic-updates/scripts/genHPKPStaticPins.js @@ -12,17 +12,20 @@ "use strict"; if (arguments.length != 2) { - throw new Error("Usage: genHPKPStaticPins.js " + - " " + - ""); + throw new Error( + "Usage: genHPKPStaticPins.js " + + " " + + "" + ); } var { NetUtil } = ChromeUtils.import("resource://gre/modules/NetUtil.jsm"); var { FileUtils } = ChromeUtils.import("resource://gre/modules/FileUtils.jsm"); var { Services } = ChromeUtils.import("resource://gre/modules/Services.jsm"); -var gCertDB = Cc["@mozilla.org/security/x509certdb;1"] - .getService(Ci.nsIX509CertDB); +var gCertDB = Cc["@mozilla.org/security/x509certdb;1"].getService( + Ci.nsIX509CertDB +); const SHA256_PREFIX = "sha256/"; const GOOGLE_PIN_PREFIX = "GOOGLE_PIN_"; @@ -30,18 +33,20 @@ const GOOGLE_PIN_PREFIX = "GOOGLE_PIN_"; // Pins expire in 14 weeks (6 weeks on Beta + 8 weeks on stable) const PINNING_MINIMUM_REQUIRED_MAX_AGE = 60 * 60 * 24 * 7 * 14; -const FILE_HEADER = "/* This Source Code Form is subject to the terms of the Mozilla Public\n" + -" * License, v. 2.0. If a copy of the MPL was not distributed with this\n" + -" * file, You can obtain one at http://mozilla.org/MPL/2.0/. */\n" + -"\n" + -"/*****************************************************************************/\n" + -"/* This is an automatically generated file. If you're not */\n" + -"/* PublicKeyPinningService.cpp, you shouldn't be #including it. */\n" + -"/*****************************************************************************/\n" + -"#include " + -"\n"; +const FILE_HEADER = + "/* This Source Code Form is subject to the terms of the Mozilla Public\n" + + " * License, v. 2.0. If a copy of the MPL was not distributed with this\n" + + " * file, You can obtain one at http://mozilla.org/MPL/2.0/. */\n" + + "\n" + + "/*****************************************************************************/\n" + + "/* This is an automatically generated file. If you're not */\n" + + "/* PublicKeyPinningService.cpp, you shouldn't be #including it. */\n" + + "/*****************************************************************************/\n" + + "#include " + + "\n"; -const DOMAINHEADER = "/* Domainlist */\n" + +const DOMAINHEADER = + "/* Domainlist */\n" + "struct TransportSecurityPreload {\n" + " // See bug 1338873 about making these fields const.\n" + " const char* mHost;\n" + @@ -52,7 +57,8 @@ const DOMAINHEADER = "/* Domainlist */\n" + " const StaticFingerprints* pinset;\n" + "};\n\n"; -const PINSETDEF = "/* Pinsets are each an ordered list by the actual value of the fingerprint */\n" + +const PINSETDEF = + "/* Pinsets are each an ordered list by the actual value of the fingerprint */\n" + "struct StaticFingerprints {\n" + " // See bug 1338873 about making these fields const.\n" + " size_t size;\n" + @@ -74,8 +80,9 @@ function writeString(string) { function readFileToString(filename) { let file = Cc["@mozilla.org/file/local;1"].createInstance(Ci.nsIFile); file.initWithPath(filename); - let stream = Cc["@mozilla.org/network/file-input-stream;1"] - .createInstance(Ci.nsIFileInputStream); + let stream = Cc["@mozilla.org/network/file-input-stream;1"].createInstance( + Ci.nsIFileInputStream + ); stream.init(file, -1, 0, 0); let buf = NetUtil.readInputStreamToString(stream, stream.available()); return buf; @@ -104,28 +111,32 @@ function download(filename) { } if (req.status != 200) { - throw new Error("ERROR: problem downloading '" + filename + "': status " + - req.status); + throw new Error( + "ERROR: problem downloading '" + filename + "': status " + req.status + ); } let resultDecoded; try { resultDecoded = atob(req.responseText); } catch (e) { - throw new Error("ERROR: could not decode data as base64 from '" + filename + - "': " + e); + throw new Error( + "ERROR: could not decode data as base64 from '" + filename + "': " + e + ); } return resultDecoded; } function downloadAsJson(filename) { // we have to filter out '//' comments, while not mangling the json - let result = download(filename).replace(/^(\s*)?\/\/[^\n]*\n/mg, ""); + let result = download(filename).replace(/^(\s*)?\/\/[^\n]*\n/gm, ""); let data = null; try { data = JSON.parse(result); } catch (e) { - throw new Error("ERROR: could not parse data from '" + filename + "': " + e); + throw new Error( + "ERROR: could not parse data from '" + filename + "': " + e + ); } return data; } @@ -161,8 +172,9 @@ function sha256Base64(input) { data[i] = decodedValue.charCodeAt(i); } - let hasher = Cc["@mozilla.org/security/hash;1"] - .createInstance(Ci.nsICryptoHash); + let hasher = Cc["@mozilla.org/security/hash;1"].createInstance( + Ci.nsICryptoHash + ); hasher.init(hasher.SHA256); hasher.update(data, data.length); @@ -231,8 +243,9 @@ function downloadAndParseChromeCerts(filename, certNameToSKD, certSKDToName) { } else if (line.startsWith(BEGIN_PUB_KEY)) { state = IN_PUB_KEY; } else { - throw new Error("ERROR: couldn't parse Chrome certificate file " + - "line: " + line); + throw new Error( + "ERROR: couldn't parse Chrome certificate file " + "line: " + line + ); } break; case IN_CERT: @@ -247,8 +260,13 @@ function downloadAndParseChromeCerts(filename, certNameToSKD, certSKDToName) { // Not one of our built-in certs. Prefix the name with // GOOGLE_PIN_. mozName = GOOGLE_PIN_PREFIX + chromeName; - dump("Can't find hash in builtin certs for Chrome nickname " + - chromeName + ", inserting " + mozName + "\n"); + dump( + "Can't find hash in builtin certs for Chrome nickname " + + chromeName + + ", inserting " + + mozName + + "\n" + ); certSKDToName[hash] = mozName; certNameToSKD[mozName] = hash; } @@ -270,10 +288,12 @@ function downloadAndParseChromeCerts(filename, certNameToSKD, certSKDToName) { } break; default: - throw new Error("ERROR: couldn't parse Chrome certificate file " + line); + throw new Error( + "ERROR: couldn't parse Chrome certificate file " + line + ); } } - return [ chromeNameToHash, chromeNameToMozName ]; + return [chromeNameToHash, chromeNameToMozName]; } // We can only import pinsets from chrome if for every name in the pinset: @@ -290,11 +310,13 @@ function downloadAndParseChromeCerts(filename, certNameToSKD, certSKDToName) { // and an array of imported pinset entries: // { name: string, include_subdomains: boolean, test_mode: boolean, // pins: pinset_name } -function downloadAndParseChromePins(filename, - chromeNameToHash, - chromeNameToMozName, - certNameToSKD, - certSKDToName) { +function downloadAndParseChromePins( + filename, + chromeNameToHash, + chromeNameToMozName, + certNameToSKD, + certSKDToName +) { let chromePreloads = downloadAsJson(filename); let chromePins = chromePreloads.pinsets; let chromeImportedPinsets = {}; @@ -317,8 +339,14 @@ function downloadAndParseChromePins(filename, } else if (name in chromeNameToMozName) { pinset.sha256_hashes.push(chromeNameToMozName[name]); } else { - dump("Skipping Chrome pinset " + pinset.name + ", couldn't find " + - "builtin " + name + " from cert file\n"); + dump( + "Skipping Chrome pinset " + + pinset.name + + ", couldn't find " + + "builtin " + + name + + " from cert file\n" + ); valid = false; } }); @@ -345,12 +373,9 @@ function downloadAndParseChromePins(filename, // HPKP implementation. entry.name = entry.name.trim(); - let isProductionDomain = - (cData.production_domains.includes(entry.name)); - let isProductionPinset = - (cData.production_pinsets.includes(pinsetName)); - let excludeDomain = - (cData.exclude_domains.includes(entry.name)); + let isProductionDomain = cData.production_domains.includes(entry.name); + let isProductionPinset = cData.production_pinsets.includes(pinsetName); + let excludeDomain = cData.exclude_domains.includes(entry.name); let isTestMode = !isProductionPinset && !isProductionDomain; if (entry.pins && !excludeDomain && chromeImportedPinsets[entry.pins]) { chromeImportedEntries.push({ @@ -358,10 +383,11 @@ function downloadAndParseChromePins(filename, include_subdomains: entry.include_subdomains, test_mode: isTestMode, is_moz: false, - pins: pinsetName }); + pins: pinsetName, + }); } }); - return [ chromeImportedPinsets, chromeImportedEntries ]; + return [chromeImportedPinsets, chromeImportedEntries]; } // Returns a pair of maps [certNameToSKD, certSKDToName] between cert @@ -420,18 +446,25 @@ function compareByName(a, b) { function genExpirationTime() { let now = new Date(); let nowMillis = now.getTime(); - let expirationMillis = nowMillis + (PINNING_MINIMUM_REQUIRED_MAX_AGE * 1000); + let expirationMillis = nowMillis + PINNING_MINIMUM_REQUIRED_MAX_AGE * 1000; let expirationMicros = expirationMillis * 1000; - return "static const PRTime kPreloadPKPinsExpirationTime = INT64_C(" + - expirationMicros + ");\n"; + return ( + "static const PRTime kPreloadPKPinsExpirationTime = INT64_C(" + + expirationMicros + + ");\n" + ); } function writeFullPinset(certNameToSKD, certSKDToName, pinset) { if (!pinset.sha256_hashes || pinset.sha256_hashes.length == 0) { throw new Error(`ERROR: Pinset ${pinset.name} does not contain any hashes`); } - writeFingerprints(certNameToSKD, certSKDToName, pinset.name, - pinset.sha256_hashes); + writeFingerprints( + certNameToSKD, + certSKDToName, + pinset.name, + pinset.sha256_hashes + ); } function writeFingerprints(certNameToSKD, certSKDToName, name, hashes) { @@ -452,9 +485,16 @@ function writeFingerprints(certNameToSKD, certSKDToName, name, hashes) { writeString(" 0\n"); } writeString("};\n"); - writeString("static const StaticFingerprints " + varPrefix + " = {\n " + - "sizeof(" + varPrefix + "_Data) / sizeof(const char*),\n " + varPrefix + - "_Data\n};\n\n"); + writeString( + "static const StaticFingerprints " + + varPrefix + + " = {\n " + + "sizeof(" + + varPrefix + + "_Data) / sizeof(const char*),\n " + + varPrefix + + "_Data\n};\n\n" + ); } function writeEntry(entry) { @@ -474,8 +514,10 @@ function writeEntry(entry) { } else { printVal += "false, "; } - if (entry.is_moz || (entry.pins.includes("mozilla") && - entry.pins != "mozilla_test")) { + if ( + entry.is_moz || + (entry.pins.includes("mozilla") && entry.pins != "mozilla_test") + ) { printVal += "true, "; } else { printVal += "false, "; @@ -497,8 +539,10 @@ function writeEntry(entry) { function writeDomainList(chromeImportedEntries) { writeString("/* Sort hostnames for binary search. */\n"); - writeString("static const TransportSecurityPreload " + - "kPublicKeyPinningPreloadList[] = {\n"); + writeString( + "static const TransportSecurityPreload " + + "kPublicKeyPinningPreloadList[] = {\n" + ); let count = 0; let mozillaDomains = {}; gStaticPins.entries.forEach(function(entry) { @@ -508,8 +552,11 @@ function writeDomainList(chromeImportedEntries) { // chromeImportedEntries. for (let i = chromeImportedEntries.length - 1; i >= 0; i--) { if (mozillaDomains[chromeImportedEntries[i].name]) { - dump("Skipping duplicate pinset for domain " + - JSON.stringify(chromeImportedEntries[i], undefined, 2) + "\n"); + dump( + "Skipping duplicate pinset for domain " + + JSON.stringify(chromeImportedEntries[i], undefined, 2) + + "\n" + ); chromeImportedEntries.splice(i, 1); } } @@ -525,15 +572,19 @@ function writeDomainList(chromeImportedEntries) { writeString("\nstatic const int32_t kUnknownId = -1;\n"); } -function writeFile(certNameToSKD, certSKDToName, - chromeImportedPinsets, chromeImportedEntries) { +function writeFile( + certNameToSKD, + certSKDToName, + chromeImportedPinsets, + chromeImportedEntries +) { // Compute used pins from both Chrome's and our pinsets, so we can output // them later. let usedFingerprints = {}; let mozillaPins = {}; gStaticPins.pinsets.forEach(function(pinset) { mozillaPins[pinset.name] = true; - pinset.sha256_hashes.forEach(function (name) { + pinset.sha256_hashes.forEach(function(name) { usedFingerprints[name] = true; }); }); @@ -547,14 +598,16 @@ function writeFile(certNameToSKD, certSKDToName, writeString(FILE_HEADER); // Write actual fingerprints. - Object.keys(usedFingerprints).sort().forEach(function(certName) { - if (certName) { - writeString("/* " + certName + " */\n"); - writeString("static const char " + nameToAlias(certName) + "[] =\n"); - writeString(" \"" + certNameToSKD[certName] + "\";\n"); - writeString("\n"); - } - }); + Object.keys(usedFingerprints) + .sort() + .forEach(function(certName) { + if (certName) { + writeString("/* " + certName + " */\n"); + writeString("static const char " + nameToAlias(certName) + "[] =\n"); + writeString(' "' + certNameToSKD[certName] + '";\n'); + writeString("\n"); + } + }); // Write the pinsets writeString(PINSETDEF); @@ -588,14 +641,25 @@ function loadExtraCertificates(certStringList) { } var extraCertificates = loadExtraCertificates(gStaticPins.extra_certificates); -var [ certNameToSKD, certSKDToName ] = loadNSSCertinfo(extraCertificates); -var [ chromeNameToHash, chromeNameToMozName ] = downloadAndParseChromeCerts( - gStaticPins.chromium_data.cert_file_url, certNameToSKD, certSKDToName); -var [ chromeImportedPinsets, chromeImportedEntries ] = - downloadAndParseChromePins(gStaticPins.chromium_data.json_file_url, - chromeNameToHash, chromeNameToMozName, certNameToSKD, certSKDToName); +var [certNameToSKD, certSKDToName] = loadNSSCertinfo(extraCertificates); +var [chromeNameToHash, chromeNameToMozName] = downloadAndParseChromeCerts( + gStaticPins.chromium_data.cert_file_url, + certNameToSKD, + certSKDToName +); +var [chromeImportedPinsets, chromeImportedEntries] = downloadAndParseChromePins( + gStaticPins.chromium_data.json_file_url, + chromeNameToHash, + chromeNameToMozName, + certNameToSKD, + certSKDToName +); -writeFile(certNameToSKD, certSKDToName, chromeImportedPinsets, - chromeImportedEntries); +writeFile( + certNameToSKD, + certSKDToName, + chromeImportedPinsets, + chromeImportedEntries +); FileUtils.closeSafeFileOutputStream(gFileOutputStream); diff --git a/taskcluster/docker/periodic-updates/scripts/getHSTSPreloadList.js b/taskcluster/docker/periodic-updates/scripts/getHSTSPreloadList.js index 472bcce31042..9e8bf492bc95 100644 --- a/taskcluster/docker/periodic-updates/scripts/getHSTSPreloadList.js +++ b/taskcluster/docker/periodic-updates/scripts/getHSTSPreloadList.js @@ -14,13 +14,19 @@ var Ci = Components.interfaces; var Cu = Components.utils; var Cr = Components.results; */ -var gSSService = Cc["@mozilla.org/ssservice;1"].getService(Ci.nsISiteSecurityService); +var gSSService = Cc["@mozilla.org/ssservice;1"].getService( + Ci.nsISiteSecurityService +); -const {Services} = ChromeUtils.import("resource://gre/modules/Services.jsm"); -const {FileUtils} = ChromeUtils.import("resource://gre/modules/FileUtils.jsm"); +const { Services } = ChromeUtils.import("resource://gre/modules/Services.jsm"); +const { FileUtils } = ChromeUtils.import( + "resource://gre/modules/FileUtils.jsm" +); -const SOURCE = "https://chromium.googlesource.com/chromium/src/net/+/master/http/transport_security_state_static.json?format=TEXT"; -const TOOL_SOURCE = "https://hg.mozilla.org/mozilla-central/file/tip/taskcluster/docker/periodic-updates/scripts/getHSTSPreloadList.js"; +const SOURCE = + "https://chromium.googlesource.com/chromium/src/net/+/master/http/transport_security_state_static.json?format=TEXT"; +const TOOL_SOURCE = + "https://hg.mozilla.org/mozilla-central/file/tip/taskcluster/docker/periodic-updates/scripts/getHSTSPreloadList.js"; const OUTPUT = "nsSTSPreloadList.inc"; const ERROR_OUTPUT = "nsSTSPreloadList.errors"; const MINIMUM_REQUIRED_MAX_AGE = 60 * 60 * 24 * 7 * 18; @@ -55,18 +61,22 @@ function download() { } if (req.status != 200) { - throw new Error("ERROR: problem downloading '" + SOURCE + "': status " + req.status); + throw new Error( + "ERROR: problem downloading '" + SOURCE + "': status " + req.status + ); } let resultDecoded; try { resultDecoded = atob(req.responseText); } catch (e) { - throw new Error("ERROR: could not decode data as base64 from '" + SOURCE + "': " + e); + throw new Error( + "ERROR: could not decode data as base64 from '" + SOURCE + "': " + e + ); } // we have to filter out '//' comments, while not mangling the json - let result = resultDecoded.replace(/^(\s*)?\/\/[^\n]*\n/mg, ""); + let result = resultDecoded.replace(/^(\s*)?\/\/[^\n]*\n/gm, ""); let data = null; try { data = JSON.parse(result); @@ -80,7 +90,9 @@ function getHosts(rawdata) { let hosts = []; if (!rawdata || !rawdata.entries) { - throw new Error("ERROR: source data not formatted correctly: 'entries' not found"); + throw new Error( + "ERROR: source data not formatted correctly: 'entries' not found" + ); } for (let entry of rawdata.entries) { @@ -114,9 +126,27 @@ function processStsHeader(host, header, status, securityInfo) { try { let uri = Services.io.newURI("https://" + host.name); let secInfo = securityInfo.QueryInterface(Ci.nsITransportSecurityInfo); - gSSService.processHeader(Ci.nsISiteSecurityService.HEADER_HSTS, uri, header, secInfo, 0, Ci.nsISiteSecurityService.SOURCE_PRELOAD_LIST, {}, maxAge, includeSubdomains); + gSSService.processHeader( + Ci.nsISiteSecurityService.HEADER_HSTS, + uri, + header, + secInfo, + 0, + Ci.nsISiteSecurityService.SOURCE_PRELOAD_LIST, + {}, + maxAge, + includeSubdomains + ); } catch (e) { - dump("ERROR: could not process header '" + header + "' from " + host.name + ": " + e + "\n"); + dump( + "ERROR: could not process header '" + + header + + "' from " + + host.name + + ": " + + e + + "\n" + ); error = e; } } else if (status == 0) { @@ -161,7 +191,10 @@ RedirectAndAuthStopper.prototype = { return this.QueryInterface(iid); }, - QueryInterface: ChromeUtils.generateQI([Ci.nsIChannelEventSink, Ci.nsIAuthPrompt2]), + QueryInterface: ChromeUtils.generateQI([ + Ci.nsIChannelEventSink, + Ci.nsIAuthPrompt2, + ]), }; function fetchstatus(host) { @@ -175,8 +208,14 @@ function fetchstatus(host) { let errorHandler = () => { dump("ERROR: exception making request to " + host.name + "\n"); - resolve(processStsHeader(host, null, xhr.status, - xhr.channel && xhr.channel.securityInfo)); + resolve( + processStsHeader( + host, + null, + xhr.status, + xhr.channel && xhr.channel.securityInfo + ) + ); }; xhr.onerror = errorHandler; @@ -185,7 +224,9 @@ function fetchstatus(host) { xhr.onload = () => { let header = xhr.getResponseHeader("strict-transport-security"); - resolve(processStsHeader(host, header, xhr.status, xhr.channel.securityInfo)); + resolve( + processStsHeader(host, header, xhr.status, xhr.channel.securityInfo) + ); }; xhr.channel.notificationCallbacks = new RedirectAndAuthStopper(); @@ -221,16 +262,24 @@ function getExpirationTimeString() { let now = new Date(); let nowMillis = now.getTime(); // MINIMUM_REQUIRED_MAX_AGE is in seconds, so convert to milliseconds - let expirationMillis = nowMillis + (MINIMUM_REQUIRED_MAX_AGE * 1000); + let expirationMillis = nowMillis + MINIMUM_REQUIRED_MAX_AGE * 1000; let expirationMicros = expirationMillis * 1000; - return "const PRTime gPreloadListExpirationTime = INT64_C(" + expirationMicros + ");\n"; + return ( + "const PRTime gPreloadListExpirationTime = INT64_C(" + + expirationMicros + + ");\n" + ); } function shouldRetry(response) { - return (response.error != ERROR_NO_HSTS_HEADER && response.error != ERROR_MAX_AGE_TOO_LOW && response.error != ERROR_NONE && response.retries > 0); + return ( + response.error != ERROR_NO_HSTS_HEADER && + response.error != ERROR_MAX_AGE_TOO_LOW && + response.error != ERROR_NONE && + response.retries > 0 + ); } - // Copied from browser/components/migration/MigrationUtils.jsm function spinResolve(promise) { if (!(promise instanceof Promise)) { @@ -239,12 +288,14 @@ function spinResolve(promise) { let done = false; let result = null; let error = null; - promise.catch(e => { - error = e; - }).then(r => { - result = r; - done = true; - }); + promise + .catch(e => { + error = e; + }) + .then(r => { + result = r; + done = true; + }); Services.tm.spinEventLoopUntil(() => done); if (error) { @@ -271,7 +322,10 @@ async function probeHSTSStatuses(inHosts) { promises.push(getHSTSStatus(host)); } let results = await Promise.all(promises); - let progress = (100 * (totalLength - inHosts.length) / totalLength).toFixed(2); + let progress = ( + (100 * (totalLength - inHosts.length)) / + totalLength + ).toFixed(2); dump(progress + "% done\n"); allResults = allResults.concat(results); } @@ -284,7 +338,9 @@ function readCurrentList(filename) { var currentHosts = {}; var file = Cc["@mozilla.org/file/local;1"].createInstance(Ci.nsIFile); file.initWithPath(filename); - var fis = Cc["@mozilla.org/network/file-input-stream;1"].createInstance(Ci.nsILineInputStream); + var fis = Cc["@mozilla.org/network/file-input-stream;1"].createInstance( + Ci.nsILineInputStream + ); fis.init(file, -1, -1, Ci.nsIFileInputStream.CLOSE_ON_EOF); var line = {}; @@ -300,12 +356,12 @@ function readCurrentList(filename) { while (fis.readLine(line)) { let match; - entryRegexes.find((r) => { + entryRegexes.find(r => { match = r.exec(line.value); return match; }); if (match) { - currentHosts[match[1]] = (match[2] == "1" || match[2] == "true"); + currentHosts[match[1]] = match[2] == "1" || match[2] == "true"; } } return currentHosts; @@ -321,7 +377,7 @@ function combineLists(newHosts, currentHosts) { } } if (!found) { - newHosts.push({name: currentHost, retries: MAX_RETRIES}); + newHosts.push({ name: currentHost, retries: MAX_RETRIES }); } } } @@ -330,10 +386,12 @@ const TEST_ENTRIES = [ { name: "includesubdomains.preloaded.test", includeSubdomains: true, - }, { + }, + { name: "includesubdomains2.preloaded.test", includeSubdomains: true, - }, { + }, + { name: "noincludesubdomains.preloaded.test", includeSubdomains: false, }, @@ -349,7 +407,10 @@ function getTestHosts() { let hosts = []; for (let testEntry of TEST_ENTRIES) { hosts.push({ - name: testEntry.name, maxAge: MINIMUM_REQUIRED_MAX_AGE, includeSubdomains: testEntry.includeSubdomains, error: ERROR_NONE, + name: testEntry.name, + maxAge: MINIMUM_REQUIRED_MAX_AGE, + includeSubdomains: testEntry.includeSubdomains, + error: ERROR_NONE, // This deliberately doesn't have a value for `retries` (because we should // never attempt to connect to this host). forceInclude: true, @@ -369,7 +430,11 @@ function filterForcedInclusions(inHosts, outNotForced, outForced) { // will be included without being checked (forced); the others will be // checked using active probing. for (let host of inHosts) { - if (host.policy == "google" || host.policy == "public-suffix" || host.policy == "public-suffix-requested") { + if ( + host.policy == "google" || + host.policy == "public-suffix" || + host.policy == "public-suffix-requested" + ) { host.forceInclude = true; host.error = ERROR_NONE; outForced.push(host); @@ -382,7 +447,9 @@ function filterForcedInclusions(inHosts, outNotForced, outForced) { function output(statuses) { dump("INFO: Writing output to " + OUTPUT + "\n"); try { - var {FileUtils} = ChromeUtils.import("resource://gre/modules/FileUtils.jsm"); + var { FileUtils } = ChromeUtils.import( + "resource://gre/modules/FileUtils.jsm" + ); let file = FileUtils.getFile("CurWorkD", [OUTPUT]); let fos = FileUtils.openSafeFileOutputStream(file); @@ -392,10 +459,7 @@ function output(statuses) { writeTo(GPERF_DELIM, fos); for (let status of statuses) { - let includeSubdomains = ( - status.includeSubdomains - ? 1 - : 0); + let includeSubdomains = status.includeSubdomains ? 1 : 0; writeTo(status.name + ", " + includeSubdomains + "\n", fos); } @@ -408,15 +472,16 @@ function output(statuses) { } function errorToString(status) { - return ( - status.error == ERROR_MAX_AGE_TOO_LOW + return status.error == ERROR_MAX_AGE_TOO_LOW ? status.error + status.maxAge - : status.error); + : status.error; } async function main(args) { if (args.length != 1) { - throw new Error("Usage: getHSTSPreloadList.js "); + throw new Error( + "Usage: getHSTSPreloadList.js " + ); } // get the current preload list @@ -424,7 +489,10 @@ async function main(args) { // delete any hosts we use in tests so we don't actually connect to them deleteTestHosts(currentHosts); // disable the current preload list so it won't interfere with requests we make - Services.prefs.setBoolPref("network.stricttransportsecurity.preloadlist", false); + Services.prefs.setBoolPref( + "network.stricttransportsecurity.preloadlist", + false + ); // download and parse the raw json file from the Chromium source let rawdata = download(); // get just the hosts with mode: "force-https" @@ -446,38 +514,53 @@ async function main(args) { dump("Adding forced hosts\n"); insertHosts(hstsStatuses, forcedHosts); - let total = await probeHSTSStatuses(hostsToContact).then(function(probedStatuses) { - return hstsStatuses.concat(probedStatuses); - }).then(function(statuses) { - return statuses.sort(compareHSTSStatus); - }).then(function(statuses) { - for (let status of statuses) { - // If we've encountered an error for this entry (other than the site not - // sending an HSTS header), be safe and don't remove it from the list - // (given that it was already on the list). - if (!status.forceInclude && status.error != ERROR_NONE && status.error != ERROR_NO_HSTS_HEADER && status.error != ERROR_MAX_AGE_TOO_LOW && status.name in currentHosts) { - // dump("INFO: error connecting to or processing " + status.name + " - using previous status on list\n"); - status.maxAge = MINIMUM_REQUIRED_MAX_AGE; - status.includeSubdomains = currentHosts[status.name]; - } - } - return statuses; - }).then(function(statuses) { - // Filter out entries we aren't including. - var includedStatuses = statuses.filter(function(status) { - if (status.maxAge < MINIMUM_REQUIRED_MAX_AGE && !status.forceInclude) { - // dump("INFO: " + status.name + " NOT ON the preload list\n"); - return false; + let total = await probeHSTSStatuses(hostsToContact) + .then(function(probedStatuses) { + return hstsStatuses.concat(probedStatuses); + }) + .then(function(statuses) { + return statuses.sort(compareHSTSStatus); + }) + .then(function(statuses) { + for (let status of statuses) { + // If we've encountered an error for this entry (other than the site not + // sending an HSTS header), be safe and don't remove it from the list + // (given that it was already on the list). + if ( + !status.forceInclude && + status.error != ERROR_NONE && + status.error != ERROR_NO_HSTS_HEADER && + status.error != ERROR_MAX_AGE_TOO_LOW && + status.name in currentHosts + ) { + // dump("INFO: error connecting to or processing " + status.name + " - using previous status on list\n"); + status.maxAge = MINIMUM_REQUIRED_MAX_AGE; + status.includeSubdomains = currentHosts[status.name]; + } } + return statuses; + }) + .then(function(statuses) { + // Filter out entries we aren't including. + var includedStatuses = statuses.filter(function(status) { + if (status.maxAge < MINIMUM_REQUIRED_MAX_AGE && !status.forceInclude) { + // dump("INFO: " + status.name + " NOT ON the preload list\n"); + return false; + } - // dump("INFO: " + status.name + " ON the preload list (includeSubdomains: " + status.includeSubdomains + ")\n"); - if (status.forceInclude && status.error != ERROR_NONE) { - dump(status.name + ": " + errorToString(status) + " (error ignored - included regardless)\n"); - } - return true; + // dump("INFO: " + status.name + " ON the preload list (includeSubdomains: " + status.includeSubdomains + ")\n"); + if (status.forceInclude && status.error != ERROR_NONE) { + dump( + status.name + + ": " + + errorToString(status) + + " (error ignored - included regardless)\n" + ); + } + return true; + }); + return includedStatuses; }); - return includedStatuses; - }); // Write the output file output(total);