зеркало из https://github.com/mozilla/gecko-dev.git
Bug 1639284 - Store records, timestamp and metadata in one IndexedDB transaction r=Gijs,mixedpuppy
Differential Revision: https://phabricator.services.mozilla.com/D78246
This commit is contained in:
Родитель
21e6b1735a
Коммит
07af99c7f1
|
@ -88,7 +88,7 @@ add_task(async function setup_head() {
|
|||
useRecordId: true,
|
||||
});
|
||||
}
|
||||
await db.saveLastModified(42);
|
||||
await db.importChanges({}, 42);
|
||||
if (EXPECTED_BREACH) {
|
||||
await RemoteSettings(LoginBreaches.REMOTE_SETTINGS_COLLECTION).emit(
|
||||
"sync",
|
||||
|
|
|
@ -282,8 +282,7 @@ add_task(async function test_setBreachesFromRemoteSettingsSync() {
|
|||
);
|
||||
gBrowserGlue.observe(null, "browser-glue-test", "add-breaches-sync-handler");
|
||||
const db = await RemoteSettings(LoginBreaches.REMOTE_SETTINGS_COLLECTION).db;
|
||||
await db.create(nowExampleIsInBreachedRecords[0]);
|
||||
await db.saveLastModified(42);
|
||||
await db.importChanges({}, 42, [nowExampleIsInBreachedRecords[0]]);
|
||||
await emitSync();
|
||||
|
||||
const breachesByLoginGUID = await LoginBreaches.getPotentialBreachesByLoginGUID(
|
||||
|
|
|
@ -66,7 +66,7 @@ add_task(async function test_main_flow() {
|
|||
1}-${AddedDate.getDate()}`,
|
||||
PwnCount: 1000000,
|
||||
});
|
||||
await db.saveLastModified(1234567);
|
||||
await db.importChanges({}, 1234567);
|
||||
|
||||
// Trigger a sync.
|
||||
await RemoteSettings(kRemoteSettingsKey).emit("sync", {
|
||||
|
@ -103,7 +103,7 @@ add_task(async function test_main_flow() {
|
|||
|
||||
// Reset state.
|
||||
await db.clear();
|
||||
await db.saveLastModified(1234567);
|
||||
await db.importChanges({}, 1234567);
|
||||
await clearWarnedHosts();
|
||||
await SpecialPowers.pushPrefEnv({
|
||||
clear: [["extensions.fxmonitor.firstAlertShown"]],
|
||||
|
@ -133,7 +133,7 @@ add_task(async function test_main_flow() {
|
|||
|
||||
// Reset state (but not firstAlertShown).
|
||||
await db.clear();
|
||||
await db.saveLastModified(1234567);
|
||||
await db.importChanges({}, 1234567);
|
||||
await clearWarnedHosts();
|
||||
|
||||
info(
|
||||
|
@ -151,7 +151,7 @@ add_task(async function test_main_flow() {
|
|||
1}-${AddedDate.getDate()}`,
|
||||
PwnCount: 1000000,
|
||||
});
|
||||
await db.saveLastModified(1234567);
|
||||
await db.importChanges({}, 1234567);
|
||||
|
||||
// Trigger a sync.
|
||||
await RemoteSettings(kRemoteSettingsKey).emit("sync", {
|
||||
|
@ -169,7 +169,7 @@ add_task(async function test_main_flow() {
|
|||
// Reset state (but not firstAlertShown).
|
||||
AddedDate.setMonth(AddedDate.getMonth() + 3);
|
||||
await db.clear();
|
||||
await db.saveLastModified(1234567);
|
||||
await db.importChanges({}, 1234567);
|
||||
await clearWarnedHosts();
|
||||
|
||||
info("Test that we do show the second alert for a recent breach.");
|
||||
|
@ -184,7 +184,7 @@ add_task(async function test_main_flow() {
|
|||
1}-${AddedDate.getDate()}`,
|
||||
PwnCount: 1000000,
|
||||
});
|
||||
await db.saveLastModified(1234567);
|
||||
await db.importChanges({}, 1234567);
|
||||
|
||||
// Trigger a sync.
|
||||
await RemoteSettings(kRemoteSettingsKey).emit("sync", {
|
||||
|
@ -201,7 +201,7 @@ add_task(async function test_main_flow() {
|
|||
|
||||
// Reset state (including firstAlertShown)
|
||||
await db.clear();
|
||||
await db.saveLastModified(1234567);
|
||||
await db.importChanges({}, 1234567);
|
||||
await clearWarnedHosts();
|
||||
await SpecialPowers.pushPrefEnv({
|
||||
clear: [["extensions.fxmonitor.firstAlertShown"]],
|
||||
|
@ -222,7 +222,7 @@ add_task(async function test_main_flow() {
|
|||
1}-${AddedDate.getDate()}`,
|
||||
PwnCount: 1000000,
|
||||
});
|
||||
await db.saveLastModified(1234567);
|
||||
await db.importChanges({}, 1234567);
|
||||
|
||||
// Trigger a sync.
|
||||
await RemoteSettings(kRemoteSettingsKey).emit("sync", {
|
||||
|
@ -240,7 +240,7 @@ add_task(async function test_main_flow() {
|
|||
// Clean up.
|
||||
BrowserTestUtils.removeTab(tab);
|
||||
await db.clear();
|
||||
await db.saveLastModified(1234567);
|
||||
await db.importChanges({}, 1234567);
|
||||
// Trigger a sync to clear.
|
||||
await RemoteSettings(kRemoteSettingsKey).emit("sync", {
|
||||
data: {
|
||||
|
|
|
@ -135,13 +135,15 @@ add_task(async function test_loading_experimentsAPI() {
|
|||
],
|
||||
});
|
||||
const client = RemoteSettings("messaging-experiments");
|
||||
await client.db.clear();
|
||||
await client.db.create(
|
||||
// Modify targeting to ensure the messages always show up
|
||||
{ ...EXPERIMENT_PAYLOAD }
|
||||
await client.db.importChanges(
|
||||
{},
|
||||
42,
|
||||
[
|
||||
// Modify targeting to ensure the messages always show up
|
||||
{ ...EXPERIMENT_PAYLOAD },
|
||||
],
|
||||
{ clear: true }
|
||||
);
|
||||
await client.db.saveLastModified(42); // Prevent from loading JSON dump.
|
||||
|
||||
// Fetch the new recipe from RS
|
||||
await RemoteSettingsExperimentLoader.updateRecipes();
|
||||
await BrowserTestUtils.waitForCondition(
|
||||
|
|
|
@ -35,9 +35,9 @@ add_task(async function setup() {
|
|||
id: `HEARTBEAT_MESSAGE_${Date.now()}`,
|
||||
};
|
||||
const client = RemoteSettings("cfr");
|
||||
await client.db.clear();
|
||||
await client.db.create(testMessage);
|
||||
await client.db.saveLastModified(42); // Prevent from loading JSON dump.
|
||||
await client.db.importChanges({}, 42, [testMessage], {
|
||||
clear: true,
|
||||
});
|
||||
|
||||
// Reload the providers
|
||||
await BrowserTestUtils.waitForCondition(async () => {
|
||||
|
@ -93,9 +93,9 @@ add_task(async function test_heartbeat_tactic_2() {
|
|||
frequency: { lifetime: 2 },
|
||||
};
|
||||
const client = RemoteSettings("message-groups");
|
||||
await client.db.clear();
|
||||
await client.db.create(groupConfiguration);
|
||||
await client.db.saveLastModified(42); // Prevent from loading JSON dump.
|
||||
await client.db.importChanges({}, 42, [groupConfiguration], {
|
||||
clear: true,
|
||||
});
|
||||
|
||||
// Reload the providers
|
||||
await ASRouter._updateMessageProviders();
|
||||
|
|
|
@ -35,9 +35,7 @@ add_task(async function setup() {
|
|||
id: `HEARTBEAT_MESSAGE_${Date.now()}`,
|
||||
};
|
||||
const client = RemoteSettings("cfr");
|
||||
await client.db.clear();
|
||||
await client.db.create(testMessage);
|
||||
await client.db.saveLastModified(42); // Prevent from loading JSON dump.
|
||||
await client.db.importChanges({}, 42, [testMessage], { clear: true });
|
||||
|
||||
// Reload the providers
|
||||
await BrowserTestUtils.waitForCondition(async () => {
|
||||
|
@ -89,9 +87,7 @@ add_task(async function test_heartbeat_tactic_2() {
|
|||
userPreferences: ["browser.userPreference.messaging-experiments"],
|
||||
};
|
||||
const client = RemoteSettings("message-groups");
|
||||
await client.db.clear();
|
||||
await client.db.create(groupConfiguration);
|
||||
await client.db.saveLastModified(42); // Prevent from loading JSON dump.
|
||||
await client.db.importChanges({}, 42, [groupConfiguration], { clear: true });
|
||||
|
||||
// Reload the providers
|
||||
await ASRouter._updateMessageProviders();
|
||||
|
|
|
@ -28,16 +28,20 @@ add_task(async function test_with_rs_messages() {
|
|||
);
|
||||
const initialMessageCount = ASRouter.state.messages.length;
|
||||
const client = RemoteSettings("cfr");
|
||||
await client.db.clear();
|
||||
await client.db.create({
|
||||
// Modify targeting and randomize message name to work around the message
|
||||
// getting blocked (for --verify)
|
||||
...msg,
|
||||
id: `MOMENTS_MOCHITEST_${Date.now()}`,
|
||||
targeting: "true",
|
||||
});
|
||||
await client.db.saveLastModified(42); // Prevent from loading JSON dump.
|
||||
|
||||
await client.db.importChanges(
|
||||
{},
|
||||
42,
|
||||
[
|
||||
{
|
||||
// Modify targeting and randomize message name to work around the message
|
||||
// getting blocked (for --verify)
|
||||
...msg,
|
||||
id: `MOMENTS_MOCHITEST_${Date.now()}`,
|
||||
targeting: "true",
|
||||
},
|
||||
],
|
||||
{ clear: true }
|
||||
);
|
||||
// Reload the provider
|
||||
await ASRouter._updateMessageProviders();
|
||||
// Wait to load the WNPanel messages
|
||||
|
@ -67,8 +71,7 @@ add_task(async function test_with_rs_messages() {
|
|||
id: `MOMENTS_MOCHITEST_${Date.now()}`,
|
||||
priority: 2,
|
||||
targeting: "true",
|
||||
},
|
||||
{ useRecordId: true }
|
||||
}
|
||||
);
|
||||
|
||||
// Reset so we can `await` for the pref value to be set again
|
||||
|
|
|
@ -26,14 +26,15 @@ add_task(async function test_with_rs_messages() {
|
|||
);
|
||||
const initialMessageCount = ASRouter.state.messages.length;
|
||||
const client = RemoteSettings("whats-new-panel");
|
||||
await client.db.clear();
|
||||
for (const record of msgs) {
|
||||
await client.db.create(
|
||||
await client.db.importChanges(
|
||||
{},
|
||||
42,
|
||||
[
|
||||
// Modify targeting to ensure the messages always show up
|
||||
{ ...record, targeting: "true" }
|
||||
);
|
||||
}
|
||||
await client.db.saveLastModified(42); // Prevent from loading JSON dump.
|
||||
{ ...record, targeting: "true" },
|
||||
],
|
||||
{ clear: true }
|
||||
);
|
||||
|
||||
const whatsNewBtn = document.getElementById("appMenu-whatsnew-button");
|
||||
Assert.equal(whatsNewBtn.hidden, true, "What's New btn doesn't exist");
|
||||
|
|
|
@ -428,7 +428,9 @@ class IntermediatePreloads {
|
|||
}
|
||||
const toReset = current.filter(record => record.cert_import_complete);
|
||||
try {
|
||||
await this.client.db.importBulk(
|
||||
await this.client.db.importChanges(
|
||||
undefined, // do not touch metadata.
|
||||
undefined, // do not touch collection timestamp.
|
||||
toReset.map(r => ({ ...r, cert_import_complete: false }))
|
||||
);
|
||||
} catch (err) {
|
||||
|
@ -495,7 +497,9 @@ class IntermediatePreloads {
|
|||
return;
|
||||
}
|
||||
try {
|
||||
await this.client.db.importBulk(
|
||||
await this.client.db.importChanges(
|
||||
undefined, // do not touch metadata.
|
||||
undefined, // do not touch collection timestamp.
|
||||
recordsToUpdate.map(r => ({ ...r, cert_import_complete: true }))
|
||||
);
|
||||
} catch (err) {
|
||||
|
@ -521,6 +525,7 @@ class IntermediatePreloads {
|
|||
const finalWaiting = finalCurrent.filter(
|
||||
record => !record.cert_import_complete
|
||||
);
|
||||
|
||||
const countPreloaded = finalCurrent.length - finalWaiting.length;
|
||||
|
||||
TelemetryStopwatch.finish(INTERMEDIATES_UPDATE_MS_TELEMETRY);
|
||||
|
|
|
@ -435,7 +435,7 @@ And records can be created manually (as if they were synchronized from the serve
|
|||
domain: "website.com",
|
||||
usernameSelector: "#login-account",
|
||||
passwordSelector: "#pass-signin",
|
||||
}, { synced: true });
|
||||
});
|
||||
|
||||
If no timestamp is set, any call to ``.get()`` will trigger the load of initial data (JSON dump) if any, or a synchronization will be triggered. To avoid that, store a fake timestamp:
|
||||
|
||||
|
|
|
@ -10,9 +10,11 @@ const { Services } = ChromeUtils.import("resource://gre/modules/Services.jsm");
|
|||
XPCOMUtils.defineLazyModuleGetters(this, {
|
||||
AsyncShutdown: "resource://gre/modules/AsyncShutdown.jsm",
|
||||
IDBHelpers: "resource://services-settings/IDBHelpers.jsm",
|
||||
Utils: "resource://services-settings/Utils.jsm",
|
||||
CommonUtils: "resource://services-common/utils.js",
|
||||
ObjectUtils: "resource://gre/modules/ObjectUtils.jsm",
|
||||
});
|
||||
XPCOMUtils.defineLazyGetter(this, "console", () => Utils.log);
|
||||
|
||||
var EXPORTED_SYMBOLS = ["Database"];
|
||||
|
||||
|
@ -74,51 +76,87 @@ class Database {
|
|||
return sort ? sortObjects(sort, results) : results;
|
||||
}
|
||||
|
||||
async importBulk(toInsert) {
|
||||
async importChanges(metadata, timestamp, records = [], options = {}) {
|
||||
const { clear = false } = options;
|
||||
const _cid = this.identifier;
|
||||
try {
|
||||
await executeIDB(
|
||||
"records",
|
||||
(store, rejectTransaction) => {
|
||||
IDBHelpers.bulkOperationHelper(
|
||||
store,
|
||||
{
|
||||
reject: rejectTransaction,
|
||||
},
|
||||
"put",
|
||||
toInsert.map(item => {
|
||||
return Object.assign({ _cid }, item);
|
||||
})
|
||||
);
|
||||
},
|
||||
{ desc: "importBulk() in " + this.identifier }
|
||||
);
|
||||
} catch (e) {
|
||||
throw new IDBHelpers.IndexedDBError(e, "importBulk()", this.identifier);
|
||||
}
|
||||
}
|
||||
["collections", "timestamps", "records"],
|
||||
(stores, rejectTransaction) => {
|
||||
const [storeMetadata, storeTimestamps, storeRecords] = stores;
|
||||
|
||||
async deleteBulk(toDelete) {
|
||||
const _cid = this.identifier;
|
||||
try {
|
||||
await executeIDB(
|
||||
"records",
|
||||
(store, rejectTransaction) => {
|
||||
if (clear) {
|
||||
// Our index is over the _cid and id fields. We want to remove
|
||||
// all of the items in the collection for which the object was
|
||||
// created, ie with _cid == this.identifier.
|
||||
// We would like to just tell IndexedDB:
|
||||
// store.index(IDBKeyRange.only(this.identifier)).delete();
|
||||
// to delete all records matching the first part of the 2-part key.
|
||||
// Unfortunately such an API does not exist.
|
||||
// While we could iterate over the index with a cursor, we'd do
|
||||
// a roundtrip to PBackground for each item. Once you have 1000
|
||||
// items, the result is very slow because of all the overhead of
|
||||
// jumping between threads and serializing/deserializing.
|
||||
// So instead, we tell the store to delete everything between
|
||||
// "our" _cid identifier, and what would be the next identifier
|
||||
// (via lexicographical sorting). Unfortunately there does not
|
||||
// seem to be a way to specify bounds for all items that share
|
||||
// the same first part of the key using just that first part, hence
|
||||
// the use of the hypothetical [] for the second part of the end of
|
||||
// the bounds.
|
||||
storeRecords.delete(
|
||||
IDBKeyRange.bound([_cid], [_cid, []], false, true)
|
||||
);
|
||||
}
|
||||
|
||||
// Store or erase metadata.
|
||||
if (metadata === null) {
|
||||
storeMetadata.delete(_cid);
|
||||
} else if (metadata) {
|
||||
storeMetadata.put({ cid: _cid, metadata });
|
||||
}
|
||||
// Store or erase timestamp.
|
||||
if (timestamp === null) {
|
||||
storeTimestamps.delete(_cid);
|
||||
} else if (timestamp) {
|
||||
storeTimestamps.put({ cid: _cid, value: timestamp });
|
||||
}
|
||||
|
||||
if (records.length == 0) {
|
||||
return;
|
||||
}
|
||||
|
||||
// Separate tombstones from creations/updates.
|
||||
const toDelete = records.filter(r => r.deleted);
|
||||
const toInsert = records.filter(r => !r.deleted);
|
||||
console.debug(
|
||||
`${_cid} ${toDelete.length} to delete, ${toInsert.length} to insert`
|
||||
);
|
||||
// Delete local records for each tombstone.
|
||||
IDBHelpers.bulkOperationHelper(
|
||||
store,
|
||||
storeRecords,
|
||||
{
|
||||
reject: rejectTransaction,
|
||||
completion() {
|
||||
// Overwrite all other data.
|
||||
IDBHelpers.bulkOperationHelper(
|
||||
storeRecords,
|
||||
{
|
||||
reject: rejectTransaction,
|
||||
},
|
||||
"put",
|
||||
toInsert.map(item => ({ ...item, _cid }))
|
||||
);
|
||||
},
|
||||
},
|
||||
"delete",
|
||||
toDelete.map(item => {
|
||||
return [_cid, item.id];
|
||||
})
|
||||
toDelete.map(item => [_cid, item.id])
|
||||
);
|
||||
},
|
||||
{ desc: "deleteBulk() in " + this.identifier }
|
||||
{ desc: "importChanges() in " + _cid }
|
||||
);
|
||||
} catch (e) {
|
||||
throw new IDBHelpers.IndexedDBError(e, "deleteBulk()", this.identifier);
|
||||
throw new IDBHelpers.IndexedDBError(e, "importChanges()", _cid);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -142,30 +180,6 @@ class Database {
|
|||
return entry ? entry.value : null;
|
||||
}
|
||||
|
||||
async saveLastModified(lastModified) {
|
||||
const value = parseInt(lastModified, 10) || null;
|
||||
try {
|
||||
await executeIDB(
|
||||
"timestamps",
|
||||
store => {
|
||||
if (value === null) {
|
||||
store.delete(this.identifier);
|
||||
} else {
|
||||
store.put({ cid: this.identifier, value });
|
||||
}
|
||||
},
|
||||
{ desc: "saveLastModified() in " + this.identifier }
|
||||
);
|
||||
} catch (e) {
|
||||
throw new IDBHelpers.IndexedDBError(
|
||||
e,
|
||||
"saveLastModified()",
|
||||
this.identifier
|
||||
);
|
||||
}
|
||||
return value;
|
||||
}
|
||||
|
||||
async getMetadata() {
|
||||
let entry = null;
|
||||
try {
|
||||
|
@ -182,25 +196,6 @@ class Database {
|
|||
return entry ? entry.metadata : null;
|
||||
}
|
||||
|
||||
async saveMetadata(metadata) {
|
||||
try {
|
||||
await executeIDB(
|
||||
"collections",
|
||||
store => {
|
||||
if (metadata === null) {
|
||||
store.delete(this.identifier);
|
||||
} else {
|
||||
store.put({ cid: this.identifier, metadata });
|
||||
}
|
||||
},
|
||||
{ desc: "saveMetadata() in " + this.identifier }
|
||||
);
|
||||
return metadata;
|
||||
} catch (e) {
|
||||
throw new IDBHelpers.IndexedDBError(e, "saveMetadata()", this.identifier);
|
||||
}
|
||||
}
|
||||
|
||||
async getAttachment(attachmentId) {
|
||||
let entry = null;
|
||||
try {
|
||||
|
@ -247,40 +242,7 @@ class Database {
|
|||
|
||||
async clear() {
|
||||
try {
|
||||
await this.saveLastModified(null);
|
||||
await this.saveMetadata(null);
|
||||
await executeIDB(
|
||||
"records",
|
||||
store => {
|
||||
// Our index is over the _cid and id fields. We want to remove
|
||||
// all of the items in the collection for which the object was
|
||||
// created, ie with _cid == this.identifier.
|
||||
// We would like to just tell IndexedDB:
|
||||
// store.index(IDBKeyRange.only(this.identifier)).delete();
|
||||
// to delete all records matching the first part of the 2-part key.
|
||||
// Unfortunately such an API does not exist.
|
||||
// While we could iterate over the index with a cursor, we'd do
|
||||
// a roundtrip to PBackground for each item. Once you have 1000
|
||||
// items, the result is very slow because of all the overhead of
|
||||
// jumping between threads and serializing/deserializing.
|
||||
// So instead, we tell the store to delete everything between
|
||||
// "our" _cid identifier, and what would be the next identifier
|
||||
// (via lexicographical sorting). Unfortunately there does not
|
||||
// seem to be a way to specify bounds for all items that share
|
||||
// the same first part of the key using just that first part, hence
|
||||
// the use of the hypothetical [] for the second part of the end of
|
||||
// the bounds.
|
||||
return store.delete(
|
||||
IDBKeyRange.bound(
|
||||
[this.identifier],
|
||||
[this.identifier, []],
|
||||
false,
|
||||
true
|
||||
)
|
||||
);
|
||||
},
|
||||
{ desc: "clear() in " + this.identifier }
|
||||
);
|
||||
await this.importChanges(null, null, [], { clear: true });
|
||||
} catch (e) {
|
||||
throw new IDBHelpers.IndexedDBError(e, "clear()", this.identifier);
|
||||
}
|
||||
|
@ -366,13 +328,13 @@ const gPendingWriteOperations = new Set();
|
|||
* Helper to wrap some IDBObjectStore operations into a promise.
|
||||
*
|
||||
* @param {IDBDatabase} db
|
||||
* @param {String} storeName
|
||||
* @param {String|String[]} storeNames - either a string or an array of strings.
|
||||
* @param {function} callback
|
||||
* @param {Object} options
|
||||
* @param {String} options.mode
|
||||
* @param {String} options.desc for shutdown tracking.
|
||||
*/
|
||||
async function executeIDB(storeName, callback, options = {}) {
|
||||
async function executeIDB(storeNames, callback, options = {}) {
|
||||
if (!gDB) {
|
||||
// Check if we're shutting down. Services.startup.shuttingDown will
|
||||
// be true sooner, but is never true in xpcshell tests, so we check
|
||||
|
@ -403,7 +365,7 @@ async function executeIDB(storeName, callback, options = {}) {
|
|||
const { mode = "readwrite", desc = "" } = options;
|
||||
let { promise, transaction } = IDBHelpers.executeIDB(
|
||||
gDB,
|
||||
storeName,
|
||||
storeNames,
|
||||
mode,
|
||||
callback,
|
||||
desc
|
||||
|
|
|
@ -362,9 +362,9 @@ class RemoteSettingsClient extends EventEmitter {
|
|||
try {
|
||||
await this._importingPromise;
|
||||
} catch (e) {
|
||||
// Report but return an empty list since there will be no data anyway.
|
||||
// Report error, but continue because there could have been data
|
||||
// loaded from a parrallel call.
|
||||
Cu.reportError(e);
|
||||
return [];
|
||||
} finally {
|
||||
// then delete this promise again, as now we should have local data:
|
||||
delete this._importingPromise;
|
||||
|
@ -520,7 +520,7 @@ class RemoteSettingsClient extends EventEmitter {
|
|||
const metadata = await this.httpClient().getData({
|
||||
query: { _expected: expectedTimestamp },
|
||||
});
|
||||
await this.db.saveMetadata(metadata);
|
||||
await this.db.importChanges(metadata);
|
||||
// We don't bother validating the signature if the dump was just loaded. We do
|
||||
// if the dump was loaded at some other point (eg. from .get()).
|
||||
if (this.verifySignature && importedFromDump.length == 0) {
|
||||
|
@ -749,6 +749,11 @@ class RemoteSettingsClient extends EventEmitter {
|
|||
"duration"
|
||||
);
|
||||
}
|
||||
if (result < 0) {
|
||||
console.debug(`${this.identifier} no dump available`);
|
||||
} else {
|
||||
console.info(`${this.identifier} imported ${result} records from dump`);
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
|
@ -856,20 +861,10 @@ class RemoteSettingsClient extends EventEmitter {
|
|||
return syncResult;
|
||||
}
|
||||
|
||||
// Separate tombstones from creations/updates.
|
||||
const toDelete = remoteRecords.filter(r => r.deleted);
|
||||
const toInsert = remoteRecords.filter(r => !r.deleted);
|
||||
console.debug(
|
||||
`${this.identifier} ${toDelete.length} to delete, ${toInsert.length} to insert`
|
||||
);
|
||||
|
||||
const start = Cu.now() * 1000;
|
||||
// Delete local records for each tombstone.
|
||||
await this.db.deleteBulk(toDelete);
|
||||
// Overwrite all other data.
|
||||
await this.db.importBulk(toInsert);
|
||||
await this.db.saveLastModified(remoteTimestamp);
|
||||
await this.db.saveMetadata(metadata);
|
||||
await this.db.importChanges(metadata, remoteTimestamp, remoteRecords, {
|
||||
clear: retry,
|
||||
});
|
||||
if (gTimingEnabled) {
|
||||
const end = Cu.now() * 1000;
|
||||
PerformanceCounters.storeExecutionTime(
|
||||
|
@ -921,12 +916,11 @@ class RemoteSettingsClient extends EventEmitter {
|
|||
console.debug(`${this.identifier} previous data was invalid`);
|
||||
}
|
||||
|
||||
// Signature failed, clear local DB because it contains
|
||||
// bad data (local + remote changes).
|
||||
console.debug(`${this.identifier} clear local data`);
|
||||
await this.db.clear();
|
||||
|
||||
if (!localTrustworthy && !retry) {
|
||||
// Signature failed, clear local DB because it contains
|
||||
// bad data (local + remote changes).
|
||||
console.debug(`${this.identifier} clear local data`);
|
||||
await this.db.clear();
|
||||
// Local data was tampered, throw and it will retry from empty DB.
|
||||
console.error(`${this.identifier} local data was corrupted`);
|
||||
throw new CorruptedDataError(this.identifier);
|
||||
|
@ -934,16 +928,22 @@ class RemoteSettingsClient extends EventEmitter {
|
|||
// We retried already, we will restore the previous local data
|
||||
// before throwing eventually.
|
||||
if (localTrustworthy) {
|
||||
// Signature of data before importing changes was good.
|
||||
console.debug(
|
||||
`${this.identifier} Restore previous data (timestamp=${localTimestamp})`
|
||||
await this.db.importChanges(
|
||||
localMetadata,
|
||||
localTimestamp,
|
||||
localRecords,
|
||||
{
|
||||
clear: true, // clear before importing.
|
||||
}
|
||||
);
|
||||
await this.db.importBulk(localRecords);
|
||||
await this.db.saveLastModified(localTimestamp);
|
||||
await this.db.saveMetadata(localMetadata);
|
||||
} else {
|
||||
// Restore the dump if available (no-op if no dump)
|
||||
await this._importJSONDump();
|
||||
const imported = await this._importJSONDump();
|
||||
// _importJSONDump() only clears DB if dump is available,
|
||||
// therefore do it here!
|
||||
if (imported < 0) {
|
||||
await this.db.clear();
|
||||
}
|
||||
}
|
||||
}
|
||||
throw e;
|
||||
|
|
|
@ -199,6 +199,8 @@ async function importDumpIDB(bucket, collection, records) {
|
|||
[IDB_RECORDS_STORE, IDB_TIMESTAMPS_STORE],
|
||||
"readwrite",
|
||||
([recordsStore, timestampStore], rejectTransaction) => {
|
||||
// Wipe before loading
|
||||
recordsStore.delete(IDBKeyRange.bound([cid], [cid, []], false, true));
|
||||
IDBHelpers.bulkOperationHelper(
|
||||
recordsStore,
|
||||
{
|
||||
|
|
|
@ -102,6 +102,12 @@ add_task(async function test_records_obtained_from_server_are_stored_in_db() {
|
|||
// Our test data has a single record; it should be in the local collection
|
||||
const list = await client.get();
|
||||
equal(list.length, 1);
|
||||
|
||||
const timestamp = await client.db.getLastModified();
|
||||
equal(timestamp, 3000, "timestamp was stored");
|
||||
|
||||
const { signature } = await client.db.getMetadata();
|
||||
equal(signature.signature, "abcdef", "metadata was stored");
|
||||
});
|
||||
add_task(clear_state);
|
||||
|
||||
|
@ -217,13 +223,10 @@ add_task(clear_state);
|
|||
add_task(
|
||||
async function test_records_changes_are_overwritten_by_server_changes() {
|
||||
// Create some local conflicting data, and make sure it syncs without error.
|
||||
await client.db.create(
|
||||
{
|
||||
website: "",
|
||||
id: "9d500963-d80e-3a91-6e74-66f3811b99cc",
|
||||
},
|
||||
{ useRecordId: true }
|
||||
);
|
||||
await client.db.create({
|
||||
website: "",
|
||||
id: "9d500963-d80e-3a91-6e74-66f3811b99cc",
|
||||
});
|
||||
|
||||
await client.maybeSync(2000);
|
||||
|
||||
|
@ -714,7 +717,7 @@ add_task(async function test_telemetry_reports_if_application_fails() {
|
|||
add_task(clear_state);
|
||||
|
||||
add_task(async function test_telemetry_reports_if_sync_fails() {
|
||||
await client.db.saveLastModified(9999);
|
||||
await client.db.importChanges({}, 9999);
|
||||
|
||||
const startHistogram = getUptakeTelemetrySnapshot(client.identifier);
|
||||
|
||||
|
@ -729,7 +732,7 @@ add_task(async function test_telemetry_reports_if_sync_fails() {
|
|||
add_task(clear_state);
|
||||
|
||||
add_task(async function test_telemetry_reports_if_parsing_fails() {
|
||||
await client.db.saveLastModified(10000);
|
||||
await client.db.importChanges({}, 10000);
|
||||
|
||||
const startHistogram = getUptakeTelemetrySnapshot(client.identifier);
|
||||
|
||||
|
@ -744,7 +747,7 @@ add_task(async function test_telemetry_reports_if_parsing_fails() {
|
|||
add_task(clear_state);
|
||||
|
||||
add_task(async function test_telemetry_reports_if_fetching_signature_fails() {
|
||||
await client.db.saveLastModified(11000);
|
||||
await client.db.importChanges({}, 11000);
|
||||
|
||||
const startHistogram = getUptakeTelemetrySnapshot(client.identifier);
|
||||
|
||||
|
|
|
@ -6,11 +6,17 @@ const { RemoteSettings } = ChromeUtils.import(
|
|||
let client;
|
||||
|
||||
async function createRecords(records) {
|
||||
await client.db.clear();
|
||||
for (const record of records) {
|
||||
await client.db.create(record);
|
||||
}
|
||||
await client.db.saveLastModified(42); // Prevent from loading JSON dump.
|
||||
await client.db.importChanges(
|
||||
{},
|
||||
42,
|
||||
records.map((record, i) => ({
|
||||
id: `record-${i}`,
|
||||
...record,
|
||||
})),
|
||||
{
|
||||
clear: true,
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
function run_test() {
|
||||
|
@ -38,10 +44,12 @@ add_task(async function test_returns_all_without_target() {
|
|||
|
||||
add_task(async function test_filters_can_be_disabled() {
|
||||
const c = RemoteSettings("no-jexl", { filterFunc: null });
|
||||
await c.db.create({
|
||||
filter_expression: "1 == 2",
|
||||
});
|
||||
await c.db.saveLastModified(42); // Prevent from loading JSON dump.
|
||||
await c.db.importChanges({}, 42, [
|
||||
{
|
||||
id: "abc",
|
||||
filter_expression: "1 == 2",
|
||||
},
|
||||
]);
|
||||
|
||||
const list = await c.get();
|
||||
equal(list.length, 1);
|
||||
|
|
|
@ -942,10 +942,10 @@ add_task(async function test_syncs_clients_with_local_database() {
|
|||
// since we want to test «unknown» clients that have a local database.
|
||||
new RemoteSettingsClient("addons", {
|
||||
bucketNamePref: "services.blocklist.bucket", // bucketName = "blocklists"
|
||||
}).db.saveLastModified(42);
|
||||
}).db.importChanges({}, 42);
|
||||
new RemoteSettingsClient("recipes", {
|
||||
bucketNamePref: "services.settings.default_bucket", // bucketName = "main"
|
||||
}).db.saveLastModified(43);
|
||||
}).db.importChanges({}, 43);
|
||||
|
||||
let error;
|
||||
try {
|
||||
|
|
|
@ -545,7 +545,7 @@ add_task(async function test_check_synchronization_with_signatures() {
|
|||
};
|
||||
|
||||
// ensure our collection hasn't been replaced with an older, empty one
|
||||
equal((await client.get()).length, 2);
|
||||
equal((await client.get()).length, 2, "collection was restored");
|
||||
|
||||
registerHandlers(badSigGoodOldResponses);
|
||||
|
||||
|
@ -558,7 +558,7 @@ add_task(async function test_check_synchronization_with_signatures() {
|
|||
|
||||
// Local data was unchanged, since it was never than the one returned by the server,
|
||||
// thus the sync event is not sent.
|
||||
equal(syncEventSent, false);
|
||||
equal(syncEventSent, false, "event was not sent");
|
||||
|
||||
//
|
||||
// 7.
|
||||
|
@ -595,14 +595,18 @@ add_task(async function test_check_synchronization_with_signatures() {
|
|||
// properly contains created, updated, and deleted records.
|
||||
// the local DB contains same id as RECORD2 and a fake record.
|
||||
// the final server collection contains RECORD2 and RECORD3
|
||||
await client.db.clear();
|
||||
await client.db.saveMetadata({ signature: { x5u, signature: "abc" } });
|
||||
await client.db.create(
|
||||
{ ...RECORD2, last_modified: 1234567890, serialNumber: "abc" },
|
||||
{ synced: true, useRecordId: true }
|
||||
);
|
||||
const localId = "0602b1b2-12ab-4d3a-b6fb-593244e7b035";
|
||||
await client.db.create({ id: localId }, { synced: true, useRecordId: true });
|
||||
await client.db.importChanges(
|
||||
{ signature: { x5u, signature: "abc" } },
|
||||
null,
|
||||
[
|
||||
{ ...RECORD2, last_modified: 1234567890, serialNumber: "abc" },
|
||||
{ id: localId },
|
||||
],
|
||||
{
|
||||
clear: true,
|
||||
}
|
||||
);
|
||||
|
||||
let syncData = null;
|
||||
client.on("sync", ({ data }) => {
|
||||
|
@ -787,15 +791,21 @@ add_task(async function test_check_synchronization_with_signatures() {
|
|||
][i++];
|
||||
},
|
||||
};
|
||||
// Pull changes from above tests.
|
||||
await client.db.saveLastModified(4000);
|
||||
await client.db.saveMetadata({ signature: { x5u, signature: "aa" } });
|
||||
// Create an extra record. It will have a valid signature locally
|
||||
// thanks to the verifier mock.
|
||||
await client.db.create({
|
||||
id: "extraId",
|
||||
last_modified: 42,
|
||||
});
|
||||
await client.db.importChanges(
|
||||
{
|
||||
signature: { x5u, signature: "aa" },
|
||||
},
|
||||
4000,
|
||||
[
|
||||
{
|
||||
id: "extraId",
|
||||
last_modified: 42,
|
||||
},
|
||||
]
|
||||
);
|
||||
|
||||
equal((await client.get()).length, 1);
|
||||
|
||||
// Now sync, but importing changes will have failing signature,
|
||||
|
|
|
@ -54,8 +54,7 @@ add_task(async _ => {
|
|||
});
|
||||
}
|
||||
let db = await RemoteSettings(COLLECTION_NAME).db;
|
||||
await db.create(records[0]);
|
||||
await db.saveLastModified(42);
|
||||
await db.importChanges({}, 42, [records[0]]);
|
||||
await emitSync();
|
||||
|
||||
await uds.ensureUpdated();
|
||||
|
|
|
@ -191,7 +191,7 @@ const NormandyTestUtils = {
|
|||
|
||||
// last modified needs to be some positive integer
|
||||
let lastModified = await db.getLastModified();
|
||||
await db.saveLastModified(lastModified + 1);
|
||||
await db.importChanges({}, lastModified + 1);
|
||||
|
||||
const collectionHelper = {
|
||||
async addRecipes(newRecipes) {
|
||||
|
@ -212,7 +212,7 @@ const NormandyTestUtils = {
|
|||
});
|
||||
}
|
||||
lastModified = (await db.getLastModified()) || 0;
|
||||
await db.saveLastModified(lastModified + 1);
|
||||
await db.importChanges({}, lastModified + 1);
|
||||
},
|
||||
};
|
||||
|
||||
|
@ -222,7 +222,7 @@ const NormandyTestUtils = {
|
|||
db = await RecipeRunner._remoteSettingsClientForTesting.db;
|
||||
await db.clear();
|
||||
lastModified = await db.getLastModified();
|
||||
await db.saveLastModified(lastModified + 1);
|
||||
await db.importChanges({}, lastModified + 1);
|
||||
}
|
||||
};
|
||||
};
|
||||
|
|
|
@ -290,20 +290,20 @@ decorate_task(
|
|||
};
|
||||
|
||||
const db = await RecipeRunner._remoteSettingsClientForTesting.db;
|
||||
await db.clear();
|
||||
const fakeSig = { signature: "abc" };
|
||||
await db.create({ id: "match", recipe: matchRecipe, signature: fakeSig });
|
||||
await db.create({
|
||||
id: "noMatch",
|
||||
recipe: noMatchRecipe,
|
||||
signature: fakeSig,
|
||||
});
|
||||
await db.create({
|
||||
id: "missing",
|
||||
recipe: missingRecipe,
|
||||
signature: fakeSig,
|
||||
});
|
||||
await db.saveLastModified(42);
|
||||
await db.importChanges({}, 42, [
|
||||
{ id: "match", recipe: matchRecipe, signature: fakeSig },
|
||||
{
|
||||
id: "noMatch",
|
||||
recipe: noMatchRecipe,
|
||||
signature: fakeSig,
|
||||
},
|
||||
{
|
||||
id: "missing",
|
||||
recipe: missingRecipe,
|
||||
signature: fakeSig,
|
||||
},
|
||||
]);
|
||||
|
||||
let recipesFromRS = (
|
||||
await RecipeRunner._remoteSettingsClientForTesting.get()
|
||||
|
@ -366,19 +366,26 @@ decorate_task(
|
|||
};
|
||||
|
||||
const db = await RecipeRunner._remoteSettingsClientForTesting.db;
|
||||
await db.clear();
|
||||
const fakeSig = { signature: "abc" };
|
||||
await db.create({
|
||||
id: "match",
|
||||
recipe: compatibleRecipe,
|
||||
signature: fakeSig,
|
||||
});
|
||||
await db.create({
|
||||
id: "noMatch",
|
||||
recipe: incompatibleRecipe,
|
||||
signature: fakeSig,
|
||||
});
|
||||
await db.saveLastModified(42);
|
||||
await db.importChanges(
|
||||
{},
|
||||
42,
|
||||
[
|
||||
{
|
||||
id: "match",
|
||||
recipe: compatibleRecipe,
|
||||
signature: fakeSig,
|
||||
},
|
||||
{
|
||||
id: "noMatch",
|
||||
recipe: incompatibleRecipe,
|
||||
signature: fakeSig,
|
||||
},
|
||||
],
|
||||
{
|
||||
clear: true,
|
||||
}
|
||||
);
|
||||
|
||||
await RecipeRunner.run();
|
||||
|
||||
|
|
|
@ -201,13 +201,19 @@ add_task(async function test_selector_db_modification() {
|
|||
const engineSelector = new SearchEngineSelector();
|
||||
// Fill the database with some values that we can use to test that it is cleared.
|
||||
const db = await RemoteSettings(SearchUtils.SETTINGS_KEY).db;
|
||||
await db.clear();
|
||||
await db.create({
|
||||
default: "yes",
|
||||
engineName: "askjeeves",
|
||||
appliesTo: [{ included: { everywhere: true } }],
|
||||
});
|
||||
await db.saveLastModified(42);
|
||||
await db.importChanges(
|
||||
{},
|
||||
42,
|
||||
[
|
||||
{
|
||||
id: "85e1f268-9ca5-4b52-a4ac-922df5c07264",
|
||||
default: "yes",
|
||||
engineName: "askjeeves",
|
||||
appliesTo: [{ included: { everywhere: true } }],
|
||||
},
|
||||
],
|
||||
{ clear: true }
|
||||
);
|
||||
|
||||
// Stub the get() so that the first call simulates a signature error, and
|
||||
// the second simulates success reading from the dump.
|
||||
|
@ -242,13 +248,21 @@ add_task(async function test_selector_db_modification_never_succeeds() {
|
|||
const engineSelector = new SearchEngineSelector();
|
||||
// Fill the database with some values that we can use to test that it is cleared.
|
||||
const db = RemoteSettings(SearchUtils.SETTINGS_KEY).db;
|
||||
await db.clear();
|
||||
await db.create({
|
||||
default: "yes",
|
||||
engineName: "askjeeves",
|
||||
appliesTo: [{ included: { everywhere: true } }],
|
||||
});
|
||||
await db.saveLastModified(42);
|
||||
await db.importChanges(
|
||||
{},
|
||||
42,
|
||||
[
|
||||
{
|
||||
id: "b70edfdd-1c3f-4b7b-ab55-38cb048636c0",
|
||||
default: "yes",
|
||||
engineName: "askjeeves",
|
||||
appliesTo: [{ included: { everywhere: true } }],
|
||||
},
|
||||
],
|
||||
{
|
||||
clear: true,
|
||||
}
|
||||
);
|
||||
|
||||
// Now simulate the condition where for some reason we never get a
|
||||
// valid result.
|
||||
|
@ -277,13 +291,21 @@ add_task(async function test_empty_results() {
|
|||
const engineSelector = new SearchEngineSelector();
|
||||
// Fill the database with some values that we can use to test that it is cleared.
|
||||
const db = await RemoteSettings(SearchUtils.SETTINGS_KEY).db;
|
||||
await db.clear();
|
||||
await db.create({
|
||||
default: "yes",
|
||||
engineName: "askjeeves",
|
||||
appliesTo: [{ included: { everywhere: true } }],
|
||||
});
|
||||
await db.saveLastModified(42);
|
||||
await db.importChanges(
|
||||
{},
|
||||
42,
|
||||
[
|
||||
{
|
||||
id: "df5655ca-e045-4f8c-a7ee-047eeb654722",
|
||||
default: "yes",
|
||||
engineName: "askjeeves",
|
||||
appliesTo: [{ included: { everywhere: true } }],
|
||||
},
|
||||
],
|
||||
{
|
||||
clear: true,
|
||||
}
|
||||
);
|
||||
|
||||
// Stub the get() so that the first call simulates an empty database, and
|
||||
// the second simulates success reading from the dump.
|
||||
|
|
|
@ -14,6 +14,7 @@ skip-if = os == "win" # Windows doesn't have the same sort of permissions manage
|
|||
[test_engine_selector_application.js]
|
||||
[test_engine_selector_order.js]
|
||||
[test_engine_selector_remote_settings.js]
|
||||
tags = remote-settings
|
||||
[test_engine_selector.js]
|
||||
[test_engine_set_alias.js]
|
||||
[test_identifiers.js]
|
||||
|
|
|
@ -58,9 +58,7 @@ add_task(async function test_list_changes() {
|
|||
|
||||
// Add some initial data.
|
||||
let db = await RemoteSettings(COLLECTION_NAME).db;
|
||||
await db.create(records[0]);
|
||||
await db.saveLastModified(42);
|
||||
|
||||
await db.importChanges({}, 42, records);
|
||||
let promise = waitForEvent(updateEvent, "update");
|
||||
|
||||
skipListService.registerAndRunSkipListObserver(
|
||||
|
@ -185,10 +183,7 @@ add_task(async function test_list_init_data() {
|
|||
|
||||
// Add some initial data.
|
||||
let db = await RemoteSettings(COLLECTION_NAME).db;
|
||||
for (const record of records) {
|
||||
await db.create(record);
|
||||
}
|
||||
await db.saveLastModified(42);
|
||||
await db.importChanges({}, 42, records);
|
||||
|
||||
// The first registered feature make SkipListService get the initial data
|
||||
// from remote setting.
|
||||
|
|
|
@ -161,14 +161,7 @@ add_task(async function test_ignoreList_updates() {
|
|||
add_task(async function test_ignoreList_db_modification() {
|
||||
// Fill the database with some values that we can use to test that it is cleared.
|
||||
const db = await RemoteSettings(IGNORELIST_KEY).db;
|
||||
await db.clear();
|
||||
for (const data of IGNORELIST_TEST_DATA) {
|
||||
await db.create({
|
||||
id: data.id,
|
||||
matches: data.matches,
|
||||
});
|
||||
}
|
||||
await db.saveLastModified(42);
|
||||
await db.importChanges({}, 42, IGNORELIST_TEST_DATA, { clear: true });
|
||||
|
||||
// Stub the get() so that the first call simulates a signature error, and
|
||||
// the second simulates success reading from the dump.
|
||||
|
@ -198,14 +191,7 @@ add_task(async function test_ignoreList_db_modification() {
|
|||
add_task(async function test_ignoreList_db_modification_never_succeeds() {
|
||||
// Fill the database with some values that we can use to test that it is cleared.
|
||||
const db = await RemoteSettings(IGNORELIST_KEY).db;
|
||||
await db.clear();
|
||||
for (const data of IGNORELIST_TEST_DATA) {
|
||||
await db.create({
|
||||
id: data.id,
|
||||
matches: data.matches,
|
||||
});
|
||||
}
|
||||
await db.saveLastModified(42);
|
||||
await db.importChanges({}, 42, IGNORELIST_TEST_DATA, { clear: true });
|
||||
|
||||
// Now simulate the condition where for some reason we never get a
|
||||
// valid result.
|
||||
|
|
|
@ -892,6 +892,21 @@ var AddonTestUtils = {
|
|||
plugins: bsPass.PluginBlocklistRS,
|
||||
};
|
||||
|
||||
// Since we load the specified test data, we shouldn't let the
|
||||
// packaged JSON dumps to interfere.
|
||||
const pref = "services.settings.load_dump";
|
||||
const backup = Services.prefs.getBoolPref(pref, null);
|
||||
Services.prefs.setBoolPref(pref, false);
|
||||
if (this.testScope) {
|
||||
this.testScope.registerCleanupFunction(() => {
|
||||
if (backup === null) {
|
||||
Services.prefs.clearUserPref(pref);
|
||||
} else {
|
||||
Services.prefs.setBoolPref(pref, backup);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
for (const [dataProp, blocklistObj] of Object.entries(blocklistMapping)) {
|
||||
let newData = data[dataProp];
|
||||
if (!newData) {
|
||||
|
@ -914,12 +929,12 @@ var AddonTestUtils = {
|
|||
}
|
||||
blocklistObj.ensureInitialized();
|
||||
let db = await blocklistObj._client.db;
|
||||
await db.clear();
|
||||
const collectionTimestamp = Math.max(
|
||||
...newData.map(r => r.last_modified)
|
||||
);
|
||||
await db.saveLastModified(collectionTimestamp);
|
||||
await db.importBulk(newData);
|
||||
await db.importChanges({}, collectionTimestamp, newData, {
|
||||
clear: true,
|
||||
});
|
||||
// We manually call _onUpdate... which is evil, but at the moment kinto doesn't have
|
||||
// a better abstraction unless you want to mock your own http server to do the update.
|
||||
await blocklistObj._onUpdate();
|
||||
|
|
|
@ -1196,7 +1196,6 @@ async function mockGfxBlocklistItems(items) {
|
|||
Services.prefs.getCharPref("services.blocklist.gfx.collection"),
|
||||
{ bucketNamePref: "services.blocklist.bucket" }
|
||||
);
|
||||
await client.db.clear();
|
||||
const records = items.map(item => {
|
||||
if (item.id && item.last_modified) {
|
||||
return item;
|
||||
|
@ -1210,8 +1209,9 @@ async function mockGfxBlocklistItems(items) {
|
|||
};
|
||||
});
|
||||
const collectionTimestamp = Math.max(...records.map(r => r.last_modified));
|
||||
await client.db.importBulk(records);
|
||||
await client.db.saveLastModified(collectionTimestamp);
|
||||
await client.db.importChanges({}, collectionTimestamp, records, {
|
||||
clear: true,
|
||||
});
|
||||
let rv = await bsPass.GfxBlocklistRS.checkForEntries();
|
||||
return rv;
|
||||
}
|
||||
|
|
|
@ -209,7 +209,7 @@ add_task(
|
|||
for (const record of records) {
|
||||
await client.db.create(record);
|
||||
}
|
||||
await client.db.saveLastModified(42); // Prevent from loading JSON dump.
|
||||
await client.db.importChanges({}, 42); // Prevent from loading JSON dump.
|
||||
const list = await client.get({ syncIfEmpty: false });
|
||||
equal(list.length, 4);
|
||||
ok(list.every(e => e.willMatch));
|
||||
|
|
|
@ -17,10 +17,11 @@ async function clear_state() {
|
|||
}
|
||||
|
||||
async function createRecords(records) {
|
||||
for (const record of records) {
|
||||
await client.db.create(record);
|
||||
}
|
||||
client.db.saveLastModified(42); // Simulate sync (and prevent load dump).
|
||||
const withId = records.map((record, i) => ({
|
||||
id: `record-${i}`,
|
||||
...record,
|
||||
}));
|
||||
return client.db.importChanges({}, 42, withId);
|
||||
}
|
||||
|
||||
function run_test() {
|
||||
|
|
|
@ -75,7 +75,7 @@ add_task(async function test_blocklist_lastModified_rs_scalars() {
|
|||
}
|
||||
|
||||
async function fakeRemoteSettingsSync(rsClient, lastModified) {
|
||||
await rsClient.db.saveLastModified(lastModified);
|
||||
await rsClient.db.importChanges({}, lastModified);
|
||||
await rsClient.emit("sync");
|
||||
}
|
||||
|
||||
|
|
Загрузка…
Ссылка в новой задаче