2016-02-09 21:51:08 +03:00
|
|
|
/* This Source Code Form is subject to the terms of the Mozilla Public
|
|
|
|
* License, v. 2.0. If a copy of the MPL was not distributed with this
|
|
|
|
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
|
|
|
|
|
|
|
|
"use strict";
|
|
|
|
|
2016-04-18 12:38:25 +03:00
|
|
|
this.EXPORTED_SYMBOLS = ["AddonBlocklistClient",
|
|
|
|
"GfxBlocklistClient",
|
|
|
|
"OneCRLBlocklistClient",
|
2016-12-16 19:34:42 +03:00
|
|
|
"PinningBlocklistClient",
|
2016-12-12 03:37:22 +03:00
|
|
|
"PluginBlocklistClient"];
|
2016-02-09 21:51:08 +03:00
|
|
|
|
|
|
|
const { classes: Cc, interfaces: Ci, utils: Cu } = Components;
|
|
|
|
|
2018-01-30 02:20:18 +03:00
|
|
|
ChromeUtils.import("resource://gre/modules/Services.jsm");
|
|
|
|
ChromeUtils.import("resource://gre/modules/XPCOMUtils.jsm");
|
|
|
|
const { OS } = ChromeUtils.import("resource://gre/modules/osfile.jsm", {});
|
2016-06-02 11:01:26 +03:00
|
|
|
Cu.importGlobalProperties(["fetch"]);
|
2016-04-18 12:38:25 +03:00
|
|
|
|
2018-01-30 02:20:18 +03:00
|
|
|
ChromeUtils.defineModuleGetter(this, "FileUtils",
|
|
|
|
"resource://gre/modules/FileUtils.jsm");
|
|
|
|
ChromeUtils.defineModuleGetter(this, "Kinto",
|
|
|
|
"resource://services-common/kinto-offline-client.js");
|
|
|
|
ChromeUtils.defineModuleGetter(this, "KintoHttpClient",
|
|
|
|
"resource://services-common/kinto-http-client.js");
|
|
|
|
ChromeUtils.defineModuleGetter(this, "FirefoxAdapter",
|
|
|
|
"resource://services-common/kinto-storage-adapter.js");
|
|
|
|
ChromeUtils.defineModuleGetter(this, "CanonicalJSON",
|
|
|
|
"resource://gre/modules/CanonicalJSON.jsm");
|
|
|
|
ChromeUtils.defineModuleGetter(this, "UptakeTelemetry",
|
|
|
|
"resource://services-common/uptake-telemetry.js");
|
2016-12-12 03:37:22 +03:00
|
|
|
|
|
|
|
const KEY_APPDIR = "XCurProcD";
|
2016-05-19 13:51:13 +03:00
|
|
|
const PREF_SETTINGS_SERVER = "services.settings.server";
|
|
|
|
const PREF_BLOCKLIST_BUCKET = "services.blocklist.bucket";
|
|
|
|
const PREF_BLOCKLIST_ONECRL_COLLECTION = "services.blocklist.onecrl.collection";
|
|
|
|
const PREF_BLOCKLIST_ONECRL_CHECKED_SECONDS = "services.blocklist.onecrl.checked";
|
|
|
|
const PREF_BLOCKLIST_ADDONS_COLLECTION = "services.blocklist.addons.collection";
|
|
|
|
const PREF_BLOCKLIST_ADDONS_CHECKED_SECONDS = "services.blocklist.addons.checked";
|
|
|
|
const PREF_BLOCKLIST_PLUGINS_COLLECTION = "services.blocklist.plugins.collection";
|
|
|
|
const PREF_BLOCKLIST_PLUGINS_CHECKED_SECONDS = "services.blocklist.plugins.checked";
|
2016-12-16 19:34:42 +03:00
|
|
|
const PREF_BLOCKLIST_PINNING_ENABLED = "services.blocklist.pinning.enabled";
|
|
|
|
const PREF_BLOCKLIST_PINNING_BUCKET = "services.blocklist.pinning.bucket";
|
|
|
|
const PREF_BLOCKLIST_PINNING_COLLECTION = "services.blocklist.pinning.collection";
|
|
|
|
const PREF_BLOCKLIST_PINNING_CHECKED_SECONDS = "services.blocklist.pinning.checked";
|
2016-05-19 13:51:13 +03:00
|
|
|
const PREF_BLOCKLIST_GFX_COLLECTION = "services.blocklist.gfx.collection";
|
|
|
|
const PREF_BLOCKLIST_GFX_CHECKED_SECONDS = "services.blocklist.gfx.checked";
|
2016-06-02 11:01:26 +03:00
|
|
|
const PREF_BLOCKLIST_ENFORCE_SIGNING = "services.blocklist.signing.enforced";
|
|
|
|
|
|
|
|
const INVALID_SIGNATURE = "Invalid content/signature";
|
2016-04-18 12:38:25 +03:00
|
|
|
|
2017-06-30 22:07:28 +03:00
|
|
|
// This was the default path in earlier versions of
|
Bug 1319884 - Address FirefoxAdapter feedback from kinto.js#589, r=mgoodwin
Change FirefoxAdapter definitively to require an externally-managed
Sqlite connection in order to function. This connection must be
produced by calling an openConnection() static method, which does the
work of initializing the tables and schema. Passing any other
connection is wrong, but won't be detected at runtime, and might even
work depending on the previous state of the database. Future work
might define a new KintoSqliteConnection type that can only be
produced by this method, so that it's impossible to create an
uninitialized Kinto database.
This change, since it moves Sqlite connections out of the
FirefoxAdapter, also means that the path option is no longer handled
or provided with a default. This means that the previous default,
"kinto.sqlite", is now preserved in a bunch of places all over the
codebase. This is unfortunate, but a migration is outside the scope of
this patch.
MozReview-Commit-ID: BKJqPR3jOTq
--HG--
extra : rebase_source : 91e0027890ac5fd0ba683abddc23e268f672d169
2016-11-23 22:18:53 +03:00
|
|
|
// FirefoxAdapter, so for backwards compatibility we maintain this
|
|
|
|
// filename, even though it isn't descriptive of who is using it.
|
2017-06-30 22:07:28 +03:00
|
|
|
const KINTO_STORAGE_PATH = "kinto.sqlite";
|
Bug 1319884 - Address FirefoxAdapter feedback from kinto.js#589, r=mgoodwin
Change FirefoxAdapter definitively to require an externally-managed
Sqlite connection in order to function. This connection must be
produced by calling an openConnection() static method, which does the
work of initializing the tables and schema. Passing any other
connection is wrong, but won't be detected at runtime, and might even
work depending on the previous state of the database. Future work
might define a new KintoSqliteConnection type that can only be
produced by this method, so that it's impossible to create an
uninitialized Kinto database.
This change, since it moves Sqlite connections out of the
FirefoxAdapter, also means that the path option is no longer handled
or provided with a default. This means that the previous default,
"kinto.sqlite", is now preserved in a bunch of places all over the
codebase. This is unfortunate, but a migration is outside the scope of
this patch.
MozReview-Commit-ID: BKJqPR3jOTq
--HG--
extra : rebase_source : 91e0027890ac5fd0ba683abddc23e268f672d169
2016-11-23 22:18:53 +03:00
|
|
|
|
2016-04-18 12:38:25 +03:00
|
|
|
|
2017-01-17 17:04:43 +03:00
|
|
|
|
2016-11-18 18:07:39 +03:00
|
|
|
function mergeChanges(collection, localRecords, changes) {
|
2016-06-02 11:01:26 +03:00
|
|
|
const records = {};
|
|
|
|
// Local records by id.
|
2016-11-18 18:07:39 +03:00
|
|
|
localRecords.forEach((record) => records[record.id] = collection.cleanLocalFields(record));
|
2016-06-02 11:01:26 +03:00
|
|
|
// All existing records are replaced by the version from the server.
|
|
|
|
changes.forEach((record) => records[record.id] = record);
|
|
|
|
|
|
|
|
return Object.values(records)
|
|
|
|
// Filter out deleted records.
|
|
|
|
.filter((record) => record.deleted != true)
|
|
|
|
// Sort list by record id.
|
2017-02-17 04:34:45 +03:00
|
|
|
.sort((a, b) => {
|
|
|
|
if (a.id < b.id) {
|
|
|
|
return -1;
|
|
|
|
}
|
|
|
|
return a.id > b.id ? 1 : 0;
|
|
|
|
});
|
2016-06-02 11:01:26 +03:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2017-02-06 12:35:26 +03:00
|
|
|
function fetchCollectionMetadata(remote, collection) {
|
|
|
|
const client = new KintoHttpClient(remote);
|
2016-07-13 22:09:42 +03:00
|
|
|
return client.bucket(collection.bucket).collection(collection.name).getData()
|
2016-06-02 11:01:26 +03:00
|
|
|
.then(result => {
|
|
|
|
return result.signature;
|
|
|
|
});
|
|
|
|
}
|
|
|
|
|
2017-02-06 12:35:26 +03:00
|
|
|
function fetchRemoteCollection(remote, collection) {
|
|
|
|
const client = new KintoHttpClient(remote);
|
2016-06-02 11:01:26 +03:00
|
|
|
return client.bucket(collection.bucket)
|
|
|
|
.collection(collection.name)
|
|
|
|
.listRecords({sort: "id"});
|
|
|
|
}
|
2016-04-18 12:38:25 +03:00
|
|
|
|
2016-02-09 21:51:08 +03:00
|
|
|
|
2016-04-18 12:38:25 +03:00
|
|
|
class BlocklistClient {
|
|
|
|
|
2016-12-16 19:34:42 +03:00
|
|
|
constructor(collectionName, lastCheckTimePref, processCallback, bucketName, signerName) {
|
2016-04-18 12:38:25 +03:00
|
|
|
this.collectionName = collectionName;
|
|
|
|
this.lastCheckTimePref = lastCheckTimePref;
|
|
|
|
this.processCallback = processCallback;
|
2016-12-16 19:34:42 +03:00
|
|
|
this.bucketName = bucketName;
|
2016-06-02 11:01:26 +03:00
|
|
|
this.signerName = signerName;
|
2017-02-06 12:35:26 +03:00
|
|
|
|
2017-04-20 11:35:04 +03:00
|
|
|
this._kinto = null;
|
2016-06-02 11:01:26 +03:00
|
|
|
}
|
|
|
|
|
2017-03-22 17:46:55 +03:00
|
|
|
get identifier() {
|
|
|
|
return `${this.bucketName}/${this.collectionName}`;
|
|
|
|
}
|
|
|
|
|
2016-12-12 03:37:22 +03:00
|
|
|
get filename() {
|
2017-03-22 17:46:55 +03:00
|
|
|
// Replace slash by OS specific path separator (eg. Windows)
|
|
|
|
const identifier = OS.Path.join(...this.identifier.split("/"));
|
|
|
|
return `${identifier}.json`;
|
2016-12-12 03:37:22 +03:00
|
|
|
}
|
|
|
|
|
2017-06-30 22:07:28 +03:00
|
|
|
/**
|
|
|
|
* Open the underlying Kinto collection, using the appropriate adapter and
|
|
|
|
* options. This acts as a context manager where the connection is closed
|
|
|
|
* once the specified `callback` has finished.
|
|
|
|
*
|
|
|
|
* @param {callback} function the async function to execute with the open SQlite connection.
|
|
|
|
* @param {Object} options additional advanced options.
|
|
|
|
* @param {string} options.bucket override bucket name of client (default: this.bucketName)
|
|
|
|
* @param {string} options.collection override collection name of client (default: this.collectionName)
|
|
|
|
* @param {string} options.path override default Sqlite path (default: kinto.sqlite)
|
|
|
|
* @param {string} options.hooks hooks to execute on synchronization (see Kinto.js docs)
|
|
|
|
*/
|
|
|
|
async openCollection(callback, options = {}) {
|
|
|
|
const { bucket = this.bucketName, path = KINTO_STORAGE_PATH } = options;
|
|
|
|
if (!this._kinto) {
|
|
|
|
this._kinto = new Kinto({bucket, adapter: FirefoxAdapter});
|
|
|
|
}
|
|
|
|
let sqliteHandle;
|
|
|
|
try {
|
|
|
|
sqliteHandle = await FirefoxAdapter.openConnection({path});
|
|
|
|
const colOptions = Object.assign({adapterOptions: {sqliteHandle}}, options);
|
|
|
|
const {collection: collectionName = this.collectionName} = options;
|
|
|
|
const collection = this._kinto.collection(collectionName, colOptions);
|
|
|
|
return await callback(collection);
|
|
|
|
} finally {
|
|
|
|
if (sqliteHandle) {
|
|
|
|
await sqliteHandle.close();
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2016-12-12 03:37:22 +03:00
|
|
|
/**
|
|
|
|
* Load the the JSON file distributed with the release for this blocklist.
|
|
|
|
*
|
|
|
|
* For Bug 1257565 this method will have to try to load the file from the profile,
|
|
|
|
* in order to leverage the updateJSONBlocklist() below, which writes a new
|
|
|
|
* dump each time the collection changes.
|
|
|
|
*/
|
2017-05-03 02:29:33 +03:00
|
|
|
async loadDumpFile() {
|
2017-03-22 17:46:55 +03:00
|
|
|
// Replace OS specific path separator by / for URI.
|
|
|
|
const { components: folderFile } = OS.Path.split(this.filename);
|
|
|
|
const fileURI = `resource://app/defaults/${folderFile.join("/")}`;
|
2017-05-03 02:29:33 +03:00
|
|
|
const response = await fetch(fileURI);
|
|
|
|
if (!response.ok) {
|
|
|
|
throw new Error(`Could not read from '${fileURI}'`);
|
|
|
|
}
|
|
|
|
// Will be rejected if JSON is invalid.
|
|
|
|
return response.json();
|
2016-12-12 03:37:22 +03:00
|
|
|
}
|
|
|
|
|
2017-05-03 02:29:33 +03:00
|
|
|
async validateCollectionSignature(remote, payload, collection, options = {}) {
|
2017-02-06 12:35:26 +03:00
|
|
|
const {ignoreLocal} = options;
|
|
|
|
|
2017-05-03 02:29:33 +03:00
|
|
|
// this is a content-signature field from an autograph response.
|
|
|
|
const {x5u, signature} = await fetchCollectionMetadata(remote, collection);
|
2017-10-15 21:50:30 +03:00
|
|
|
const certChainResponse = await fetch(x5u);
|
2017-05-03 02:29:33 +03:00
|
|
|
const certChain = await certChainResponse.text();
|
|
|
|
|
|
|
|
const verifier = Cc["@mozilla.org/security/contentsignatureverifier;1"]
|
|
|
|
.createInstance(Ci.nsIContentSignatureVerifier);
|
|
|
|
|
|
|
|
let toSerialize;
|
|
|
|
if (ignoreLocal) {
|
|
|
|
toSerialize = {
|
|
|
|
last_modified: `${payload.last_modified}`,
|
|
|
|
data: payload.data
|
|
|
|
};
|
|
|
|
} else {
|
|
|
|
const {data: localRecords} = await collection.list();
|
|
|
|
const records = mergeChanges(collection, localRecords, payload.changes);
|
|
|
|
toSerialize = {
|
|
|
|
last_modified: `${payload.lastModified}`,
|
|
|
|
data: records
|
|
|
|
};
|
|
|
|
}
|
2016-07-18 21:32:56 +03:00
|
|
|
|
2017-05-03 02:29:33 +03:00
|
|
|
const serialized = CanonicalJSON.stringify(toSerialize);
|
2016-06-02 11:01:26 +03:00
|
|
|
|
2017-05-03 02:29:33 +03:00
|
|
|
if (verifier.verifyContentSignature(serialized, "p384ecdsa=" + signature,
|
|
|
|
certChain,
|
|
|
|
this.signerName)) {
|
|
|
|
// In case the hash is valid, apply the changes locally.
|
|
|
|
return payload;
|
|
|
|
}
|
|
|
|
throw new Error(INVALID_SIGNATURE);
|
2016-04-18 12:38:25 +03:00
|
|
|
}
|
|
|
|
|
|
|
|
/**
|
|
|
|
* Synchronize from Kinto server, if necessary.
|
|
|
|
*
|
2016-12-12 03:37:22 +03:00
|
|
|
* @param {int} lastModified the lastModified date (on the server) for
|
|
|
|
the remote collection.
|
|
|
|
* @param {Date} serverTime the current date return by the server.
|
|
|
|
* @param {Object} options additional advanced options.
|
|
|
|
* @param {bool} options.loadDump load initial dump from disk on first sync (default: true)
|
|
|
|
* @return {Promise} which rejects on sync or process failure.
|
2016-04-18 12:38:25 +03:00
|
|
|
*/
|
2017-05-03 02:29:33 +03:00
|
|
|
async maybeSync(lastModified, serverTime, options = {loadDump: true}) {
|
2016-12-12 03:37:22 +03:00
|
|
|
const {loadDump} = options;
|
2017-02-06 12:35:26 +03:00
|
|
|
const remote = Services.prefs.getCharPref(PREF_SETTINGS_SERVER);
|
2016-12-12 03:37:22 +03:00
|
|
|
const enforceCollectionSigning =
|
2016-06-02 11:01:26 +03:00
|
|
|
Services.prefs.getBoolPref(PREF_BLOCKLIST_ENFORCE_SIGNING);
|
|
|
|
|
|
|
|
// if there is a signerName and collection signing is enforced, add a
|
|
|
|
// hook for incoming changes that validates the signature
|
2017-06-30 22:07:28 +03:00
|
|
|
const colOptions = {};
|
2016-06-02 11:01:26 +03:00
|
|
|
if (this.signerName && enforceCollectionSigning) {
|
2017-06-30 22:07:28 +03:00
|
|
|
colOptions.hooks = {
|
2017-02-06 12:35:26 +03:00
|
|
|
"incoming-changes": [(payload, collection) => {
|
|
|
|
return this.validateCollectionSignature(remote, payload, collection);
|
|
|
|
}]
|
2017-10-15 21:50:30 +03:00
|
|
|
};
|
2016-06-02 11:01:26 +03:00
|
|
|
}
|
|
|
|
|
2017-03-22 13:27:17 +03:00
|
|
|
let reportStatus = null;
|
2017-05-03 02:29:33 +03:00
|
|
|
try {
|
2017-06-30 22:07:28 +03:00
|
|
|
return await this.openCollection(async (collection) => {
|
|
|
|
// Synchronize remote data into a local Sqlite DB.
|
|
|
|
let collectionLastModified = await collection.db.getLastModified();
|
|
|
|
|
|
|
|
// If there is no data currently in the collection, attempt to import
|
|
|
|
// initial data from the application defaults.
|
|
|
|
// This allows to avoid synchronizing the whole collection content on
|
|
|
|
// cold start.
|
|
|
|
if (!collectionLastModified && loadDump) {
|
2017-08-31 20:27:03 +03:00
|
|
|
try {
|
2017-06-30 22:07:28 +03:00
|
|
|
const initialData = await this.loadDumpFile();
|
|
|
|
await collection.loadDump(initialData.data);
|
|
|
|
collectionLastModified = await collection.db.getLastModified();
|
2017-08-31 20:27:03 +03:00
|
|
|
} catch (e) {
|
2017-06-30 22:07:28 +03:00
|
|
|
// Report but go-on.
|
|
|
|
Cu.reportError(e);
|
2017-08-31 20:27:03 +03:00
|
|
|
}
|
2017-06-30 22:07:28 +03:00
|
|
|
}
|
|
|
|
|
|
|
|
// If the data is up to date, there's no need to sync. We still need
|
|
|
|
// to record the fact that a check happened.
|
|
|
|
if (lastModified <= collectionLastModified) {
|
|
|
|
this.updateLastCheck(serverTime);
|
|
|
|
reportStatus = UptakeTelemetry.STATUS.UP_TO_DATE;
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
// Fetch changes from server.
|
|
|
|
try {
|
|
|
|
// Server changes have priority during synchronization.
|
|
|
|
const strategy = Kinto.syncStrategy.SERVER_WINS;
|
|
|
|
const {ok} = await collection.sync({remote, strategy});
|
|
|
|
if (!ok) {
|
|
|
|
// Some synchronization conflicts occured.
|
|
|
|
reportStatus = UptakeTelemetry.STATUS.CONFLICT_ERROR;
|
|
|
|
throw new Error("Sync failed");
|
2017-08-31 20:27:03 +03:00
|
|
|
}
|
2017-06-30 22:07:28 +03:00
|
|
|
} catch (e) {
|
|
|
|
if (e.message == INVALID_SIGNATURE) {
|
|
|
|
// Signature verification failed during synchronzation.
|
|
|
|
reportStatus = UptakeTelemetry.STATUS.SIGNATURE_ERROR;
|
|
|
|
// if sync fails with a signature error, it's likely that our
|
|
|
|
// local data has been modified in some way.
|
|
|
|
// We will attempt to fix this by retrieving the whole
|
|
|
|
// remote collection.
|
|
|
|
const payload = await fetchRemoteCollection(remote, collection);
|
|
|
|
try {
|
|
|
|
await this.validateCollectionSignature(remote, payload, collection, {ignoreLocal: true});
|
|
|
|
} catch (e) {
|
|
|
|
reportStatus = UptakeTelemetry.STATUS.SIGNATURE_RETRY_ERROR;
|
|
|
|
throw e;
|
|
|
|
}
|
|
|
|
// if the signature is good (we haven't thrown), and the remote
|
|
|
|
// last_modified is newer than the local last_modified, replace the
|
|
|
|
// local data
|
|
|
|
const localLastModified = await collection.db.getLastModified();
|
|
|
|
if (payload.last_modified >= localLastModified) {
|
|
|
|
await collection.clear();
|
|
|
|
await collection.loadDump(payload.data);
|
|
|
|
}
|
2017-08-31 20:27:03 +03:00
|
|
|
} else {
|
2017-06-30 22:07:28 +03:00
|
|
|
// The sync has thrown, it can be a network or a general error.
|
|
|
|
if (/NetworkError/.test(e.message)) {
|
|
|
|
reportStatus = UptakeTelemetry.STATUS.NETWORK_ERROR;
|
|
|
|
} else if (/Backoff/.test(e.message)) {
|
|
|
|
reportStatus = UptakeTelemetry.STATUS.BACKOFF;
|
|
|
|
} else {
|
|
|
|
reportStatus = UptakeTelemetry.STATUS.SYNC_ERROR;
|
|
|
|
}
|
|
|
|
throw e;
|
2017-08-31 20:27:03 +03:00
|
|
|
}
|
2017-06-30 22:07:28 +03:00
|
|
|
}
|
|
|
|
// Read local collection of records.
|
|
|
|
const {data} = await collection.list();
|
|
|
|
|
|
|
|
// Handle the obtained records (ie. apply locally).
|
|
|
|
try {
|
|
|
|
await this.processCallback(data);
|
|
|
|
} catch (e) {
|
|
|
|
reportStatus = UptakeTelemetry.STATUS.APPLY_ERROR;
|
2017-05-03 02:29:33 +03:00
|
|
|
throw e;
|
2016-06-02 11:01:26 +03:00
|
|
|
}
|
2016-04-18 12:38:25 +03:00
|
|
|
|
2017-06-30 22:07:28 +03:00
|
|
|
// Track last update.
|
|
|
|
this.updateLastCheck(serverTime);
|
|
|
|
|
|
|
|
}, colOptions);
|
2017-03-22 13:27:17 +03:00
|
|
|
} catch (e) {
|
|
|
|
// No specific error was tracked, mark it as unknown.
|
|
|
|
if (reportStatus === null) {
|
|
|
|
reportStatus = UptakeTelemetry.STATUS.UNKNOWN_ERROR;
|
|
|
|
}
|
|
|
|
throw e;
|
2017-05-03 02:29:33 +03:00
|
|
|
} finally {
|
2017-03-22 13:27:17 +03:00
|
|
|
// No error was reported, this is a success!
|
|
|
|
if (reportStatus === null) {
|
|
|
|
reportStatus = UptakeTelemetry.STATUS.SUCCESS;
|
|
|
|
}
|
|
|
|
// Report success/error status to Telemetry.
|
|
|
|
UptakeTelemetry.report(this.identifier, reportStatus);
|
2017-05-03 02:29:33 +03:00
|
|
|
}
|
2016-02-09 21:51:08 +03:00
|
|
|
}
|
2016-04-18 12:38:25 +03:00
|
|
|
|
|
|
|
/**
|
|
|
|
* Save last time server was checked in users prefs.
|
|
|
|
*
|
|
|
|
* @param {Date} serverTime the current date return by server.
|
|
|
|
*/
|
|
|
|
updateLastCheck(serverTime) {
|
2017-01-17 17:04:43 +03:00
|
|
|
const checkedServerTimeInSeconds = Math.round(serverTime / 1000);
|
2016-04-18 12:38:25 +03:00
|
|
|
Services.prefs.setIntPref(this.lastCheckTimePref, checkedServerTimeInSeconds);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
/**
|
|
|
|
* Revoke the appropriate certificates based on the records from the blocklist.
|
|
|
|
*
|
|
|
|
* @param {Object} records current records in the local db.
|
|
|
|
*/
|
2017-05-03 02:29:33 +03:00
|
|
|
async function updateCertBlocklist(records) {
|
2017-01-17 17:04:43 +03:00
|
|
|
const certList = Cc["@mozilla.org/security/certblocklist;1"]
|
|
|
|
.getService(Ci.nsICertBlocklist);
|
2016-04-18 12:38:25 +03:00
|
|
|
for (let item of records) {
|
2016-07-14 23:31:54 +03:00
|
|
|
try {
|
|
|
|
if (item.issuerName && item.serialNumber) {
|
|
|
|
certList.revokeCertByIssuerAndSerial(item.issuerName,
|
|
|
|
item.serialNumber);
|
|
|
|
} else if (item.subject && item.pubKeyHash) {
|
|
|
|
certList.revokeCertBySubjectAndPubKey(item.subject,
|
|
|
|
item.pubKeyHash);
|
|
|
|
}
|
|
|
|
} catch (e) {
|
|
|
|
// prevent errors relating to individual blocklist entries from
|
2016-12-16 19:34:42 +03:00
|
|
|
// causing sync to fail. We will accumulate telemetry on these failures in
|
|
|
|
// bug 1254099.
|
2016-07-14 23:31:54 +03:00
|
|
|
Cu.reportError(e);
|
2016-04-18 12:38:25 +03:00
|
|
|
}
|
|
|
|
}
|
|
|
|
certList.saveEntries();
|
2016-02-09 21:51:08 +03:00
|
|
|
}
|
|
|
|
|
2016-12-16 19:34:42 +03:00
|
|
|
/**
|
|
|
|
* Modify the appropriate security pins based on records from the remote
|
|
|
|
* collection.
|
|
|
|
*
|
|
|
|
* @param {Object} records current records in the local db.
|
|
|
|
*/
|
2017-05-03 02:29:33 +03:00
|
|
|
async function updatePinningList(records) {
|
2017-01-17 17:04:43 +03:00
|
|
|
if (!Services.prefs.getBoolPref(PREF_BLOCKLIST_PINNING_ENABLED)) {
|
|
|
|
return;
|
|
|
|
}
|
2016-12-16 19:34:42 +03:00
|
|
|
|
2017-01-17 17:04:43 +03:00
|
|
|
const siteSecurityService = Cc["@mozilla.org/ssservice;1"]
|
|
|
|
.getService(Ci.nsISiteSecurityService);
|
2016-12-16 19:34:42 +03:00
|
|
|
|
2017-01-17 17:04:43 +03:00
|
|
|
// clear the current preload list
|
|
|
|
siteSecurityService.clearPreloads();
|
2016-12-16 19:34:42 +03:00
|
|
|
|
2017-01-17 17:04:43 +03:00
|
|
|
// write each KeyPin entry to the preload list
|
|
|
|
for (let item of records) {
|
|
|
|
try {
|
|
|
|
const {pinType, pins = [], versions} = item;
|
2017-11-24 17:46:33 +03:00
|
|
|
if (versions.indexOf(Services.appinfo.version) != -1) {
|
2017-01-17 17:04:43 +03:00
|
|
|
if (pinType == "KeyPin" && pins.length) {
|
|
|
|
siteSecurityService.setKeyPins(item.hostName,
|
|
|
|
item.includeSubdomains,
|
|
|
|
item.expires,
|
|
|
|
pins.length,
|
|
|
|
pins, true);
|
|
|
|
}
|
|
|
|
if (pinType == "STSPin") {
|
|
|
|
siteSecurityService.setHSTSPreload(item.hostName,
|
|
|
|
item.includeSubdomains,
|
|
|
|
item.expires);
|
2016-12-16 19:34:42 +03:00
|
|
|
}
|
|
|
|
}
|
2017-01-17 17:04:43 +03:00
|
|
|
} catch (e) {
|
|
|
|
// prevent errors relating to individual preload entries from causing
|
|
|
|
// sync to fail. We will accumulate telemetry for such failures in bug
|
|
|
|
// 1254099.
|
2016-12-16 19:34:42 +03:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2016-04-18 12:38:25 +03:00
|
|
|
/**
|
|
|
|
* Write list of records into JSON file, and notify nsBlocklistService.
|
|
|
|
*
|
|
|
|
* @param {String} filename path relative to profile dir.
|
|
|
|
* @param {Object} records current records in the local db.
|
|
|
|
*/
|
2017-05-03 02:29:33 +03:00
|
|
|
async function updateJSONBlocklist(filename, records) {
|
2016-04-18 12:38:25 +03:00
|
|
|
// Write JSON dump for synchronous load at startup.
|
|
|
|
const path = OS.Path.join(OS.Constants.Path.profileDir, filename);
|
2016-12-12 03:37:22 +03:00
|
|
|
const blocklistFolder = OS.Path.dirname(path);
|
|
|
|
|
2017-05-03 02:29:33 +03:00
|
|
|
await OS.File.makeDir(blocklistFolder, {from: OS.Constants.Path.profileDir});
|
2016-12-12 03:37:22 +03:00
|
|
|
|
2016-04-18 12:38:25 +03:00
|
|
|
const serialized = JSON.stringify({data: records}, null, 2);
|
|
|
|
try {
|
2017-05-03 02:29:33 +03:00
|
|
|
await OS.File.writeAtomic(path, serialized, {tmpPath: path + ".tmp"});
|
2016-04-18 12:38:25 +03:00
|
|
|
// Notify change to `nsBlocklistService`
|
2017-01-10 20:09:02 +03:00
|
|
|
const eventData = {filename};
|
2016-04-18 12:38:25 +03:00
|
|
|
Services.cpmm.sendAsyncMessage("Blocklist:reload-from-disk", eventData);
|
2017-01-10 20:09:02 +03:00
|
|
|
} catch (e) {
|
2016-04-18 12:38:25 +03:00
|
|
|
Cu.reportError(e);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
this.OneCRLBlocklistClient = new BlocklistClient(
|
2016-05-19 13:51:13 +03:00
|
|
|
Services.prefs.getCharPref(PREF_BLOCKLIST_ONECRL_COLLECTION),
|
|
|
|
PREF_BLOCKLIST_ONECRL_CHECKED_SECONDS,
|
2016-06-02 11:01:26 +03:00
|
|
|
updateCertBlocklist,
|
2016-12-16 19:34:42 +03:00
|
|
|
Services.prefs.getCharPref(PREF_BLOCKLIST_BUCKET),
|
2016-06-02 11:01:26 +03:00
|
|
|
"onecrl.content-signature.mozilla.org"
|
2016-04-18 12:38:25 +03:00
|
|
|
);
|
|
|
|
|
|
|
|
this.AddonBlocklistClient = new BlocklistClient(
|
2016-05-19 13:51:13 +03:00
|
|
|
Services.prefs.getCharPref(PREF_BLOCKLIST_ADDONS_COLLECTION),
|
|
|
|
PREF_BLOCKLIST_ADDONS_CHECKED_SECONDS,
|
2016-12-12 03:37:22 +03:00
|
|
|
(records) => updateJSONBlocklist(this.AddonBlocklistClient.filename, records),
|
2016-12-16 19:34:42 +03:00
|
|
|
Services.prefs.getCharPref(PREF_BLOCKLIST_BUCKET)
|
2016-04-18 12:38:25 +03:00
|
|
|
);
|
|
|
|
|
|
|
|
this.GfxBlocklistClient = new BlocklistClient(
|
2016-05-19 13:51:13 +03:00
|
|
|
Services.prefs.getCharPref(PREF_BLOCKLIST_GFX_COLLECTION),
|
|
|
|
PREF_BLOCKLIST_GFX_CHECKED_SECONDS,
|
2016-12-12 03:37:22 +03:00
|
|
|
(records) => updateJSONBlocklist(this.GfxBlocklistClient.filename, records),
|
2016-12-16 19:34:42 +03:00
|
|
|
Services.prefs.getCharPref(PREF_BLOCKLIST_BUCKET)
|
2016-04-18 12:38:25 +03:00
|
|
|
);
|
|
|
|
|
|
|
|
this.PluginBlocklistClient = new BlocklistClient(
|
2016-05-19 13:51:13 +03:00
|
|
|
Services.prefs.getCharPref(PREF_BLOCKLIST_PLUGINS_COLLECTION),
|
|
|
|
PREF_BLOCKLIST_PLUGINS_CHECKED_SECONDS,
|
2016-12-12 03:37:22 +03:00
|
|
|
(records) => updateJSONBlocklist(this.PluginBlocklistClient.filename, records),
|
2016-12-16 19:34:42 +03:00
|
|
|
Services.prefs.getCharPref(PREF_BLOCKLIST_BUCKET)
|
|
|
|
);
|
|
|
|
|
|
|
|
this.PinningPreloadClient = new BlocklistClient(
|
|
|
|
Services.prefs.getCharPref(PREF_BLOCKLIST_PINNING_COLLECTION),
|
|
|
|
PREF_BLOCKLIST_PINNING_CHECKED_SECONDS,
|
|
|
|
updatePinningList,
|
|
|
|
Services.prefs.getCharPref(PREF_BLOCKLIST_PINNING_BUCKET),
|
|
|
|
"pinning-preload.content-signature.mozilla.org"
|
2016-04-18 12:38:25 +03:00
|
|
|
);
|