зеркало из https://github.com/mozilla/gecko-dev.git
Backed out changeset a0af6d53c082 (bug 1639284) for multiple failures e.g. browser_asrouter_whatsnewpanel.js CLOSED TREE
This commit is contained in:
Родитель
7036817d6e
Коммит
aec20399f5
|
@ -88,7 +88,7 @@ add_task(async function setup_head() {
|
|||
useRecordId: true,
|
||||
});
|
||||
}
|
||||
await db.importChanges({}, 42);
|
||||
await db.saveLastModified(42);
|
||||
if (EXPECTED_BREACH) {
|
||||
await RemoteSettings(LoginBreaches.REMOTE_SETTINGS_COLLECTION).emit(
|
||||
"sync",
|
||||
|
|
|
@ -282,7 +282,8 @@ add_task(async function test_setBreachesFromRemoteSettingsSync() {
|
|||
);
|
||||
gBrowserGlue.observe(null, "browser-glue-test", "add-breaches-sync-handler");
|
||||
const db = await RemoteSettings(LoginBreaches.REMOTE_SETTINGS_COLLECTION).db;
|
||||
await db.importChanges({}, 42, [nowExampleIsInBreachedRecords[0]]);
|
||||
await db.create(nowExampleIsInBreachedRecords[0]);
|
||||
await db.saveLastModified(42);
|
||||
await emitSync();
|
||||
|
||||
const breachesByLoginGUID = await LoginBreaches.getPotentialBreachesByLoginGUID(
|
||||
|
|
|
@ -66,7 +66,7 @@ add_task(async function test_main_flow() {
|
|||
1}-${AddedDate.getDate()}`,
|
||||
PwnCount: 1000000,
|
||||
});
|
||||
await db.importChanges({}, 1234567);
|
||||
await db.saveLastModified(1234567);
|
||||
|
||||
// Trigger a sync.
|
||||
await RemoteSettings(kRemoteSettingsKey).emit("sync", {
|
||||
|
@ -103,7 +103,7 @@ add_task(async function test_main_flow() {
|
|||
|
||||
// Reset state.
|
||||
await db.clear();
|
||||
await db.importChanges({}, 1234567);
|
||||
await db.saveLastModified(1234567);
|
||||
await clearWarnedHosts();
|
||||
await SpecialPowers.pushPrefEnv({
|
||||
clear: [["extensions.fxmonitor.firstAlertShown"]],
|
||||
|
@ -133,7 +133,7 @@ add_task(async function test_main_flow() {
|
|||
|
||||
// Reset state (but not firstAlertShown).
|
||||
await db.clear();
|
||||
await db.importChanges({}, 1234567);
|
||||
await db.saveLastModified(1234567);
|
||||
await clearWarnedHosts();
|
||||
|
||||
info(
|
||||
|
@ -151,7 +151,7 @@ add_task(async function test_main_flow() {
|
|||
1}-${AddedDate.getDate()}`,
|
||||
PwnCount: 1000000,
|
||||
});
|
||||
await db.importChanges({}, 1234567);
|
||||
await db.saveLastModified(1234567);
|
||||
|
||||
// Trigger a sync.
|
||||
await RemoteSettings(kRemoteSettingsKey).emit("sync", {
|
||||
|
@ -169,7 +169,7 @@ add_task(async function test_main_flow() {
|
|||
// Reset state (but not firstAlertShown).
|
||||
AddedDate.setMonth(AddedDate.getMonth() + 3);
|
||||
await db.clear();
|
||||
await db.importChanges({}, 1234567);
|
||||
await db.saveLastModified(1234567);
|
||||
await clearWarnedHosts();
|
||||
|
||||
info("Test that we do show the second alert for a recent breach.");
|
||||
|
@ -184,7 +184,7 @@ add_task(async function test_main_flow() {
|
|||
1}-${AddedDate.getDate()}`,
|
||||
PwnCount: 1000000,
|
||||
});
|
||||
await db.importChanges({}, 1234567);
|
||||
await db.saveLastModified(1234567);
|
||||
|
||||
// Trigger a sync.
|
||||
await RemoteSettings(kRemoteSettingsKey).emit("sync", {
|
||||
|
@ -201,7 +201,7 @@ add_task(async function test_main_flow() {
|
|||
|
||||
// Reset state (including firstAlertShown)
|
||||
await db.clear();
|
||||
await db.importChanges({}, 1234567);
|
||||
await db.saveLastModified(1234567);
|
||||
await clearWarnedHosts();
|
||||
await SpecialPowers.pushPrefEnv({
|
||||
clear: [["extensions.fxmonitor.firstAlertShown"]],
|
||||
|
@ -222,7 +222,7 @@ add_task(async function test_main_flow() {
|
|||
1}-${AddedDate.getDate()}`,
|
||||
PwnCount: 1000000,
|
||||
});
|
||||
await db.importChanges({}, 1234567);
|
||||
await db.saveLastModified(1234567);
|
||||
|
||||
// Trigger a sync.
|
||||
await RemoteSettings(kRemoteSettingsKey).emit("sync", {
|
||||
|
@ -240,7 +240,7 @@ add_task(async function test_main_flow() {
|
|||
// Clean up.
|
||||
BrowserTestUtils.removeTab(tab);
|
||||
await db.clear();
|
||||
await db.importChanges({}, 1234567);
|
||||
await db.saveLastModified(1234567);
|
||||
// Trigger a sync to clear.
|
||||
await RemoteSettings(kRemoteSettingsKey).emit("sync", {
|
||||
data: {
|
||||
|
|
|
@ -135,15 +135,13 @@ add_task(async function test_loading_experimentsAPI() {
|
|||
],
|
||||
});
|
||||
const client = RemoteSettings("messaging-experiments");
|
||||
await client.db.importChanges(
|
||||
{},
|
||||
42,
|
||||
[
|
||||
// Modify targeting to ensure the messages always show up
|
||||
{ ...EXPERIMENT_PAYLOAD },
|
||||
],
|
||||
{ clear: true }
|
||||
await client.db.clear();
|
||||
await client.db.create(
|
||||
// Modify targeting to ensure the messages always show up
|
||||
{ ...EXPERIMENT_PAYLOAD }
|
||||
);
|
||||
await client.db.saveLastModified(42); // Prevent from loading JSON dump.
|
||||
|
||||
// Fetch the new recipe from RS
|
||||
await RemoteSettingsExperimentLoader.updateRecipes();
|
||||
await BrowserTestUtils.waitForCondition(
|
||||
|
|
|
@ -35,9 +35,9 @@ add_task(async function setup() {
|
|||
id: `HEARTBEAT_MESSAGE_${Date.now()}`,
|
||||
};
|
||||
const client = RemoteSettings("cfr");
|
||||
await client.db.importChanges({}, 42, [testMessage], {
|
||||
clear: true,
|
||||
});
|
||||
await client.db.clear();
|
||||
await client.db.create(testMessage);
|
||||
await client.db.saveLastModified(42); // Prevent from loading JSON dump.
|
||||
|
||||
// Reload the providers
|
||||
await BrowserTestUtils.waitForCondition(async () => {
|
||||
|
@ -93,9 +93,9 @@ add_task(async function test_heartbeat_tactic_2() {
|
|||
frequency: { lifetime: 2 },
|
||||
};
|
||||
const client = RemoteSettings("message-groups");
|
||||
await client.db.importChanges({}, 42, [groupConfiguration], {
|
||||
clear: true,
|
||||
});
|
||||
await client.db.clear();
|
||||
await client.db.create(groupConfiguration);
|
||||
await client.db.saveLastModified(42); // Prevent from loading JSON dump.
|
||||
|
||||
// Reload the providers
|
||||
await ASRouter._updateMessageProviders();
|
||||
|
|
|
@ -35,7 +35,9 @@ add_task(async function setup() {
|
|||
id: `HEARTBEAT_MESSAGE_${Date.now()}`,
|
||||
};
|
||||
const client = RemoteSettings("cfr");
|
||||
await client.db.importChanges({}, 42, [testMessage], { clear: true });
|
||||
await client.db.clear();
|
||||
await client.db.create(testMessage);
|
||||
await client.db.saveLastModified(42); // Prevent from loading JSON dump.
|
||||
|
||||
// Reload the providers
|
||||
await BrowserTestUtils.waitForCondition(async () => {
|
||||
|
@ -87,7 +89,9 @@ add_task(async function test_heartbeat_tactic_2() {
|
|||
userPreferences: ["browser.userPreference.messaging-experiments"],
|
||||
};
|
||||
const client = RemoteSettings("message-groups");
|
||||
await client.db.importChanges({}, 42, [groupConfiguration], { clear: true });
|
||||
await client.db.clear();
|
||||
await client.db.create(groupConfiguration);
|
||||
await client.db.saveLastModified(42); // Prevent from loading JSON dump.
|
||||
|
||||
// Reload the providers
|
||||
await ASRouter._updateMessageProviders();
|
||||
|
|
|
@ -28,20 +28,16 @@ add_task(async function test_with_rs_messages() {
|
|||
);
|
||||
const initialMessageCount = ASRouter.state.messages.length;
|
||||
const client = RemoteSettings("cfr");
|
||||
await client.db.importChanges(
|
||||
{},
|
||||
42,
|
||||
[
|
||||
{
|
||||
// Modify targeting and randomize message name to work around the message
|
||||
// getting blocked (for --verify)
|
||||
...msg,
|
||||
id: `MOMENTS_MOCHITEST_${Date.now()}`,
|
||||
targeting: "true",
|
||||
},
|
||||
],
|
||||
{ clear: true }
|
||||
);
|
||||
await client.db.clear();
|
||||
await client.db.create({
|
||||
// Modify targeting and randomize message name to work around the message
|
||||
// getting blocked (for --verify)
|
||||
...msg,
|
||||
id: `MOMENTS_MOCHITEST_${Date.now()}`,
|
||||
targeting: "true",
|
||||
});
|
||||
await client.db.saveLastModified(42); // Prevent from loading JSON dump.
|
||||
|
||||
// Reload the provider
|
||||
await ASRouter._updateMessageProviders();
|
||||
// Wait to load the WNPanel messages
|
||||
|
@ -71,7 +67,8 @@ add_task(async function test_with_rs_messages() {
|
|||
id: `MOMENTS_MOCHITEST_${Date.now()}`,
|
||||
priority: 2,
|
||||
targeting: "true",
|
||||
}
|
||||
},
|
||||
{ useRecordId: true }
|
||||
);
|
||||
|
||||
// Reset so we can `await` for the pref value to be set again
|
||||
|
|
|
@ -26,15 +26,14 @@ add_task(async function test_with_rs_messages() {
|
|||
);
|
||||
const initialMessageCount = ASRouter.state.messages.length;
|
||||
const client = RemoteSettings("whats-new-panel");
|
||||
await client.db.importChanges(
|
||||
{},
|
||||
42,
|
||||
[
|
||||
await client.db.clear();
|
||||
for (const record of msgs) {
|
||||
await client.db.create(
|
||||
// Modify targeting to ensure the messages always show up
|
||||
{ ...record, targeting: "true" },
|
||||
],
|
||||
{ clear: true }
|
||||
);
|
||||
{ ...record, targeting: "true" }
|
||||
);
|
||||
}
|
||||
await client.db.saveLastModified(42); // Prevent from loading JSON dump.
|
||||
|
||||
const whatsNewBtn = document.getElementById("appMenu-whatsnew-button");
|
||||
Assert.equal(whatsNewBtn.hidden, true, "What's New btn doesn't exist");
|
||||
|
|
|
@ -428,9 +428,7 @@ class IntermediatePreloads {
|
|||
}
|
||||
const toReset = current.filter(record => record.cert_import_complete);
|
||||
try {
|
||||
await this.client.db.importChanges(
|
||||
undefined, // do not touch metadata.
|
||||
undefined, // do not touch collection timestamp.
|
||||
await this.client.db.importBulk(
|
||||
toReset.map(r => ({ ...r, cert_import_complete: false }))
|
||||
);
|
||||
} catch (err) {
|
||||
|
@ -497,9 +495,7 @@ class IntermediatePreloads {
|
|||
return;
|
||||
}
|
||||
try {
|
||||
await this.client.db.importChanges(
|
||||
undefined, // do not touch metadata.
|
||||
undefined, // do not touch collection timestamp.
|
||||
await this.client.db.importBulk(
|
||||
recordsToUpdate.map(r => ({ ...r, cert_import_complete: true }))
|
||||
);
|
||||
} catch (err) {
|
||||
|
@ -525,7 +521,6 @@ class IntermediatePreloads {
|
|||
const finalWaiting = finalCurrent.filter(
|
||||
record => !record.cert_import_complete
|
||||
);
|
||||
|
||||
const countPreloaded = finalCurrent.length - finalWaiting.length;
|
||||
|
||||
TelemetryStopwatch.finish(INTERMEDIATES_UPDATE_MS_TELEMETRY);
|
||||
|
|
|
@ -435,7 +435,7 @@ And records can be created manually (as if they were synchronized from the serve
|
|||
domain: "website.com",
|
||||
usernameSelector: "#login-account",
|
||||
passwordSelector: "#pass-signin",
|
||||
});
|
||||
}, { synced: true });
|
||||
|
||||
If no timestamp is set, any call to ``.get()`` will trigger the load of initial data (JSON dump) if any, or a synchronization will be triggered. To avoid that, store a fake timestamp:
|
||||
|
||||
|
|
|
@ -10,11 +10,9 @@ const { Services } = ChromeUtils.import("resource://gre/modules/Services.jsm");
|
|||
XPCOMUtils.defineLazyModuleGetters(this, {
|
||||
AsyncShutdown: "resource://gre/modules/AsyncShutdown.jsm",
|
||||
IDBHelpers: "resource://services-settings/IDBHelpers.jsm",
|
||||
Utils: "resource://services-settings/Utils.jsm",
|
||||
CommonUtils: "resource://services-common/utils.js",
|
||||
ObjectUtils: "resource://gre/modules/ObjectUtils.jsm",
|
||||
});
|
||||
XPCOMUtils.defineLazyGetter(this, "console", () => Utils.log);
|
||||
|
||||
var EXPORTED_SYMBOLS = ["Database"];
|
||||
|
||||
|
@ -76,87 +74,51 @@ class Database {
|
|||
return sort ? sortObjects(sort, results) : results;
|
||||
}
|
||||
|
||||
async importChanges(metadata, timestamp, records = [], options = {}) {
|
||||
const { clear = false } = options;
|
||||
async importBulk(toInsert) {
|
||||
const _cid = this.identifier;
|
||||
try {
|
||||
await executeIDB(
|
||||
["collections", "timestamps", "records"],
|
||||
(stores, rejectTransaction) => {
|
||||
const [storeMetadata, storeTimestamps, storeRecords] = stores;
|
||||
|
||||
if (clear) {
|
||||
// Our index is over the _cid and id fields. We want to remove
|
||||
// all of the items in the collection for which the object was
|
||||
// created, ie with _cid == this.identifier.
|
||||
// We would like to just tell IndexedDB:
|
||||
// store.index(IDBKeyRange.only(this.identifier)).delete();
|
||||
// to delete all records matching the first part of the 2-part key.
|
||||
// Unfortunately such an API does not exist.
|
||||
// While we could iterate over the index with a cursor, we'd do
|
||||
// a roundtrip to PBackground for each item. Once you have 1000
|
||||
// items, the result is very slow because of all the overhead of
|
||||
// jumping between threads and serializing/deserializing.
|
||||
// So instead, we tell the store to delete everything between
|
||||
// "our" _cid identifier, and what would be the next identifier
|
||||
// (via lexicographical sorting). Unfortunately there does not
|
||||
// seem to be a way to specify bounds for all items that share
|
||||
// the same first part of the key using just that first part, hence
|
||||
// the use of the hypothetical [] for the second part of the end of
|
||||
// the bounds.
|
||||
storeRecords.delete(
|
||||
IDBKeyRange.bound([_cid], [_cid, []], false, true)
|
||||
);
|
||||
}
|
||||
|
||||
// Store or erase metadata.
|
||||
if (metadata === null) {
|
||||
storeMetadata.delete(_cid);
|
||||
} else if (metadata) {
|
||||
storeMetadata.put({ cid: _cid, metadata });
|
||||
}
|
||||
// Store or erase timestamp.
|
||||
if (timestamp === null) {
|
||||
storeTimestamps.delete(_cid);
|
||||
} else if (timestamp) {
|
||||
storeTimestamps.put({ cid: _cid, value: timestamp });
|
||||
}
|
||||
|
||||
if (records.length == 0) {
|
||||
return;
|
||||
}
|
||||
|
||||
// Separate tombstones from creations/updates.
|
||||
const toDelete = records.filter(r => r.deleted);
|
||||
const toInsert = records.filter(r => !r.deleted);
|
||||
console.debug(
|
||||
`${_cid} ${toDelete.length} to delete, ${toInsert.length} to insert`
|
||||
);
|
||||
// Delete local records for each tombstone.
|
||||
"records",
|
||||
(store, rejectTransaction) => {
|
||||
IDBHelpers.bulkOperationHelper(
|
||||
storeRecords,
|
||||
store,
|
||||
{
|
||||
reject: rejectTransaction,
|
||||
completion() {
|
||||
// Overwrite all other data.
|
||||
IDBHelpers.bulkOperationHelper(
|
||||
storeRecords,
|
||||
{
|
||||
reject: rejectTransaction,
|
||||
},
|
||||
"put",
|
||||
toInsert.map(item => ({ ...item, _cid }))
|
||||
);
|
||||
},
|
||||
},
|
||||
"delete",
|
||||
toDelete.map(item => [_cid, item.id])
|
||||
"put",
|
||||
toInsert.map(item => {
|
||||
return Object.assign({ _cid }, item);
|
||||
})
|
||||
);
|
||||
},
|
||||
{ desc: "importChanges() in " + _cid }
|
||||
{ desc: "importBulk() in " + this.identifier }
|
||||
);
|
||||
} catch (e) {
|
||||
throw new IDBHelpers.IndexedDBError(e, "importChanges()", _cid);
|
||||
throw new IDBHelpers.IndexedDBError(e, "importBulk()", this.identifier);
|
||||
}
|
||||
}
|
||||
|
||||
async deleteBulk(toDelete) {
|
||||
const _cid = this.identifier;
|
||||
try {
|
||||
await executeIDB(
|
||||
"records",
|
||||
(store, rejectTransaction) => {
|
||||
IDBHelpers.bulkOperationHelper(
|
||||
store,
|
||||
{
|
||||
reject: rejectTransaction,
|
||||
},
|
||||
"delete",
|
||||
toDelete.map(item => {
|
||||
return [_cid, item.id];
|
||||
})
|
||||
);
|
||||
},
|
||||
{ desc: "deleteBulk() in " + this.identifier }
|
||||
);
|
||||
} catch (e) {
|
||||
throw new IDBHelpers.IndexedDBError(e, "deleteBulk()", this.identifier);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -180,6 +142,30 @@ class Database {
|
|||
return entry ? entry.value : null;
|
||||
}
|
||||
|
||||
async saveLastModified(lastModified) {
|
||||
const value = parseInt(lastModified, 10) || null;
|
||||
try {
|
||||
await executeIDB(
|
||||
"timestamps",
|
||||
store => {
|
||||
if (value === null) {
|
||||
store.delete(this.identifier);
|
||||
} else {
|
||||
store.put({ cid: this.identifier, value });
|
||||
}
|
||||
},
|
||||
{ desc: "saveLastModified() in " + this.identifier }
|
||||
);
|
||||
} catch (e) {
|
||||
throw new IDBHelpers.IndexedDBError(
|
||||
e,
|
||||
"saveLastModified()",
|
||||
this.identifier
|
||||
);
|
||||
}
|
||||
return value;
|
||||
}
|
||||
|
||||
async getMetadata() {
|
||||
let entry = null;
|
||||
try {
|
||||
|
@ -196,6 +182,25 @@ class Database {
|
|||
return entry ? entry.metadata : null;
|
||||
}
|
||||
|
||||
async saveMetadata(metadata) {
|
||||
try {
|
||||
await executeIDB(
|
||||
"collections",
|
||||
store => {
|
||||
if (metadata === null) {
|
||||
store.delete(this.identifier);
|
||||
} else {
|
||||
store.put({ cid: this.identifier, metadata });
|
||||
}
|
||||
},
|
||||
{ desc: "saveMetadata() in " + this.identifier }
|
||||
);
|
||||
return metadata;
|
||||
} catch (e) {
|
||||
throw new IDBHelpers.IndexedDBError(e, "saveMetadata()", this.identifier);
|
||||
}
|
||||
}
|
||||
|
||||
async getAttachment(attachmentId) {
|
||||
let entry = null;
|
||||
try {
|
||||
|
@ -242,7 +247,40 @@ class Database {
|
|||
|
||||
async clear() {
|
||||
try {
|
||||
await this.importChanges(null, null, [], { clear: true });
|
||||
await this.saveLastModified(null);
|
||||
await this.saveMetadata(null);
|
||||
await executeIDB(
|
||||
"records",
|
||||
store => {
|
||||
// Our index is over the _cid and id fields. We want to remove
|
||||
// all of the items in the collection for which the object was
|
||||
// created, ie with _cid == this.identifier.
|
||||
// We would like to just tell IndexedDB:
|
||||
// store.index(IDBKeyRange.only(this.identifier)).delete();
|
||||
// to delete all records matching the first part of the 2-part key.
|
||||
// Unfortunately such an API does not exist.
|
||||
// While we could iterate over the index with a cursor, we'd do
|
||||
// a roundtrip to PBackground for each item. Once you have 1000
|
||||
// items, the result is very slow because of all the overhead of
|
||||
// jumping between threads and serializing/deserializing.
|
||||
// So instead, we tell the store to delete everything between
|
||||
// "our" _cid identifier, and what would be the next identifier
|
||||
// (via lexicographical sorting). Unfortunately there does not
|
||||
// seem to be a way to specify bounds for all items that share
|
||||
// the same first part of the key using just that first part, hence
|
||||
// the use of the hypothetical [] for the second part of the end of
|
||||
// the bounds.
|
||||
return store.delete(
|
||||
IDBKeyRange.bound(
|
||||
[this.identifier],
|
||||
[this.identifier, []],
|
||||
false,
|
||||
true
|
||||
)
|
||||
);
|
||||
},
|
||||
{ desc: "clear() in " + this.identifier }
|
||||
);
|
||||
} catch (e) {
|
||||
throw new IDBHelpers.IndexedDBError(e, "clear()", this.identifier);
|
||||
}
|
||||
|
@ -328,13 +366,13 @@ const gPendingWriteOperations = new Set();
|
|||
* Helper to wrap some IDBObjectStore operations into a promise.
|
||||
*
|
||||
* @param {IDBDatabase} db
|
||||
* @param {String|String[]} storeNames - either a string or an array of strings.
|
||||
* @param {String} storeName
|
||||
* @param {function} callback
|
||||
* @param {Object} options
|
||||
* @param {String} options.mode
|
||||
* @param {String} options.desc for shutdown tracking.
|
||||
*/
|
||||
async function executeIDB(storeNames, callback, options = {}) {
|
||||
async function executeIDB(storeName, callback, options = {}) {
|
||||
if (!gDB) {
|
||||
// Check if we're shutting down. Services.startup.shuttingDown will
|
||||
// be true sooner, but is never true in xpcshell tests, so we check
|
||||
|
@ -365,7 +403,7 @@ async function executeIDB(storeNames, callback, options = {}) {
|
|||
const { mode = "readwrite", desc = "" } = options;
|
||||
let { promise, transaction } = IDBHelpers.executeIDB(
|
||||
gDB,
|
||||
storeNames,
|
||||
storeName,
|
||||
mode,
|
||||
callback,
|
||||
desc
|
||||
|
|
|
@ -362,9 +362,9 @@ class RemoteSettingsClient extends EventEmitter {
|
|||
try {
|
||||
await this._importingPromise;
|
||||
} catch (e) {
|
||||
// Report error, but continue because there could have been data
|
||||
// loaded from a parrallel call.
|
||||
// Report but return an empty list since there will be no data anyway.
|
||||
Cu.reportError(e);
|
||||
return [];
|
||||
} finally {
|
||||
// then delete this promise again, as now we should have local data:
|
||||
delete this._importingPromise;
|
||||
|
@ -520,7 +520,7 @@ class RemoteSettingsClient extends EventEmitter {
|
|||
const metadata = await this.httpClient().getData({
|
||||
query: { _expected: expectedTimestamp },
|
||||
});
|
||||
await this.db.importChanges(metadata);
|
||||
await this.db.saveMetadata(metadata);
|
||||
// We don't bother validating the signature if the dump was just loaded. We do
|
||||
// if the dump was loaded at some other point (eg. from .get()).
|
||||
if (this.verifySignature && importedFromDump.length == 0) {
|
||||
|
@ -749,11 +749,6 @@ class RemoteSettingsClient extends EventEmitter {
|
|||
"duration"
|
||||
);
|
||||
}
|
||||
if (result < 0) {
|
||||
console.debug(`${this.identifier} no dump available`);
|
||||
} else {
|
||||
console.info(`${this.identifier} imported ${result} records from dump`);
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
|
@ -861,10 +856,20 @@ class RemoteSettingsClient extends EventEmitter {
|
|||
return syncResult;
|
||||
}
|
||||
|
||||
// Separate tombstones from creations/updates.
|
||||
const toDelete = remoteRecords.filter(r => r.deleted);
|
||||
const toInsert = remoteRecords.filter(r => !r.deleted);
|
||||
console.debug(
|
||||
`${this.identifier} ${toDelete.length} to delete, ${toInsert.length} to insert`
|
||||
);
|
||||
|
||||
const start = Cu.now() * 1000;
|
||||
await this.db.importChanges(metadata, remoteTimestamp, remoteRecords, {
|
||||
clear: retry,
|
||||
});
|
||||
// Delete local records for each tombstone.
|
||||
await this.db.deleteBulk(toDelete);
|
||||
// Overwrite all other data.
|
||||
await this.db.importBulk(toInsert);
|
||||
await this.db.saveLastModified(remoteTimestamp);
|
||||
await this.db.saveMetadata(metadata);
|
||||
if (gTimingEnabled) {
|
||||
const end = Cu.now() * 1000;
|
||||
PerformanceCounters.storeExecutionTime(
|
||||
|
@ -916,11 +921,12 @@ class RemoteSettingsClient extends EventEmitter {
|
|||
console.debug(`${this.identifier} previous data was invalid`);
|
||||
}
|
||||
|
||||
// Signature failed, clear local DB because it contains
|
||||
// bad data (local + remote changes).
|
||||
console.debug(`${this.identifier} clear local data`);
|
||||
await this.db.clear();
|
||||
|
||||
if (!localTrustworthy && !retry) {
|
||||
// Signature failed, clear local DB because it contains
|
||||
// bad data (local + remote changes).
|
||||
console.debug(`${this.identifier} clear local data`);
|
||||
await this.db.clear();
|
||||
// Local data was tampered, throw and it will retry from empty DB.
|
||||
console.error(`${this.identifier} local data was corrupted`);
|
||||
throw new CorruptedDataError(this.identifier);
|
||||
|
@ -928,22 +934,16 @@ class RemoteSettingsClient extends EventEmitter {
|
|||
// We retried already, we will restore the previous local data
|
||||
// before throwing eventually.
|
||||
if (localTrustworthy) {
|
||||
await this.db.importChanges(
|
||||
localMetadata,
|
||||
localTimestamp,
|
||||
localRecords,
|
||||
{
|
||||
clear: true, // clear before importing.
|
||||
}
|
||||
// Signature of data before importing changes was good.
|
||||
console.debug(
|
||||
`${this.identifier} Restore previous data (timestamp=${localTimestamp})`
|
||||
);
|
||||
await this.db.importBulk(localRecords);
|
||||
await this.db.saveLastModified(localTimestamp);
|
||||
await this.db.saveMetadata(localMetadata);
|
||||
} else {
|
||||
// Restore the dump if available (no-op if no dump)
|
||||
const imported = await this._importJSONDump();
|
||||
// _importJSONDump() only clears DB if dump is available,
|
||||
// therefore do it here!
|
||||
if (imported < 0) {
|
||||
await this.db.clear();
|
||||
}
|
||||
await this._importJSONDump();
|
||||
}
|
||||
}
|
||||
throw e;
|
||||
|
|
|
@ -199,8 +199,6 @@ async function importDumpIDB(bucket, collection, records) {
|
|||
[IDB_RECORDS_STORE, IDB_TIMESTAMPS_STORE],
|
||||
"readwrite",
|
||||
([recordsStore, timestampStore], rejectTransaction) => {
|
||||
// Wipe before loading
|
||||
recordsStore.delete(IDBKeyRange.bound([cid], [cid, []], false, true));
|
||||
IDBHelpers.bulkOperationHelper(
|
||||
recordsStore,
|
||||
{
|
||||
|
|
|
@ -102,12 +102,6 @@ add_task(async function test_records_obtained_from_server_are_stored_in_db() {
|
|||
// Our test data has a single record; it should be in the local collection
|
||||
const list = await client.get();
|
||||
equal(list.length, 1);
|
||||
|
||||
const timestamp = await client.db.getLastModified();
|
||||
equal(timestamp, 3000, "timestamp was stored");
|
||||
|
||||
const { signature } = await client.db.getMetadata();
|
||||
equal(signature.signature, "abcdef", "metadata was stored");
|
||||
});
|
||||
add_task(clear_state);
|
||||
|
||||
|
@ -223,10 +217,13 @@ add_task(clear_state);
|
|||
add_task(
|
||||
async function test_records_changes_are_overwritten_by_server_changes() {
|
||||
// Create some local conflicting data, and make sure it syncs without error.
|
||||
await client.db.create({
|
||||
website: "",
|
||||
id: "9d500963-d80e-3a91-6e74-66f3811b99cc",
|
||||
});
|
||||
await client.db.create(
|
||||
{
|
||||
website: "",
|
||||
id: "9d500963-d80e-3a91-6e74-66f3811b99cc",
|
||||
},
|
||||
{ useRecordId: true }
|
||||
);
|
||||
|
||||
await client.maybeSync(2000);
|
||||
|
||||
|
@ -717,7 +714,7 @@ add_task(async function test_telemetry_reports_if_application_fails() {
|
|||
add_task(clear_state);
|
||||
|
||||
add_task(async function test_telemetry_reports_if_sync_fails() {
|
||||
await client.db.importChanges({}, 9999);
|
||||
await client.db.saveLastModified(9999);
|
||||
|
||||
const startHistogram = getUptakeTelemetrySnapshot(client.identifier);
|
||||
|
||||
|
@ -732,7 +729,7 @@ add_task(async function test_telemetry_reports_if_sync_fails() {
|
|||
add_task(clear_state);
|
||||
|
||||
add_task(async function test_telemetry_reports_if_parsing_fails() {
|
||||
await client.db.importChanges({}, 10000);
|
||||
await client.db.saveLastModified(10000);
|
||||
|
||||
const startHistogram = getUptakeTelemetrySnapshot(client.identifier);
|
||||
|
||||
|
@ -747,7 +744,7 @@ add_task(async function test_telemetry_reports_if_parsing_fails() {
|
|||
add_task(clear_state);
|
||||
|
||||
add_task(async function test_telemetry_reports_if_fetching_signature_fails() {
|
||||
await client.db.importChanges({}, 11000);
|
||||
await client.db.saveLastModified(11000);
|
||||
|
||||
const startHistogram = getUptakeTelemetrySnapshot(client.identifier);
|
||||
|
||||
|
|
|
@ -6,17 +6,11 @@ const { RemoteSettings } = ChromeUtils.import(
|
|||
let client;
|
||||
|
||||
async function createRecords(records) {
|
||||
await client.db.importChanges(
|
||||
{},
|
||||
42,
|
||||
records.map((record, i) => ({
|
||||
id: `record-${i}`,
|
||||
...record,
|
||||
})),
|
||||
{
|
||||
clear: true,
|
||||
}
|
||||
);
|
||||
await client.db.clear();
|
||||
for (const record of records) {
|
||||
await client.db.create(record);
|
||||
}
|
||||
await client.db.saveLastModified(42); // Prevent from loading JSON dump.
|
||||
}
|
||||
|
||||
function run_test() {
|
||||
|
@ -44,12 +38,10 @@ add_task(async function test_returns_all_without_target() {
|
|||
|
||||
add_task(async function test_filters_can_be_disabled() {
|
||||
const c = RemoteSettings("no-jexl", { filterFunc: null });
|
||||
await c.db.importChanges({}, 42, [
|
||||
{
|
||||
id: "abc",
|
||||
filter_expression: "1 == 2",
|
||||
},
|
||||
]);
|
||||
await c.db.create({
|
||||
filter_expression: "1 == 2",
|
||||
});
|
||||
await c.db.saveLastModified(42); // Prevent from loading JSON dump.
|
||||
|
||||
const list = await c.get();
|
||||
equal(list.length, 1);
|
||||
|
|
|
@ -942,10 +942,10 @@ add_task(async function test_syncs_clients_with_local_database() {
|
|||
// since we want to test «unknown» clients that have a local database.
|
||||
new RemoteSettingsClient("addons", {
|
||||
bucketNamePref: "services.blocklist.bucket", // bucketName = "blocklists"
|
||||
}).db.importChanges({}, 42);
|
||||
}).db.saveLastModified(42);
|
||||
new RemoteSettingsClient("recipes", {
|
||||
bucketNamePref: "services.settings.default_bucket", // bucketName = "main"
|
||||
}).db.importChanges({}, 43);
|
||||
}).db.saveLastModified(43);
|
||||
|
||||
let error;
|
||||
try {
|
||||
|
|
|
@ -545,7 +545,7 @@ add_task(async function test_check_synchronization_with_signatures() {
|
|||
};
|
||||
|
||||
// ensure our collection hasn't been replaced with an older, empty one
|
||||
equal((await client.get()).length, 2, "collection was restored");
|
||||
equal((await client.get()).length, 2);
|
||||
|
||||
registerHandlers(badSigGoodOldResponses);
|
||||
|
||||
|
@ -558,7 +558,7 @@ add_task(async function test_check_synchronization_with_signatures() {
|
|||
|
||||
// Local data was unchanged, since it was never than the one returned by the server,
|
||||
// thus the sync event is not sent.
|
||||
equal(syncEventSent, false, "event was not sent");
|
||||
equal(syncEventSent, false);
|
||||
|
||||
//
|
||||
// 7.
|
||||
|
@ -595,18 +595,14 @@ add_task(async function test_check_synchronization_with_signatures() {
|
|||
// properly contains created, updated, and deleted records.
|
||||
// the local DB contains same id as RECORD2 and a fake record.
|
||||
// the final server collection contains RECORD2 and RECORD3
|
||||
const localId = "0602b1b2-12ab-4d3a-b6fb-593244e7b035";
|
||||
await client.db.importChanges(
|
||||
{ signature: { x5u, signature: "abc" } },
|
||||
null,
|
||||
[
|
||||
{ ...RECORD2, last_modified: 1234567890, serialNumber: "abc" },
|
||||
{ id: localId },
|
||||
],
|
||||
{
|
||||
clear: true,
|
||||
}
|
||||
await client.db.clear();
|
||||
await client.db.saveMetadata({ signature: { x5u, signature: "abc" } });
|
||||
await client.db.create(
|
||||
{ ...RECORD2, last_modified: 1234567890, serialNumber: "abc" },
|
||||
{ synced: true, useRecordId: true }
|
||||
);
|
||||
const localId = "0602b1b2-12ab-4d3a-b6fb-593244e7b035";
|
||||
await client.db.create({ id: localId }, { synced: true, useRecordId: true });
|
||||
|
||||
let syncData = null;
|
||||
client.on("sync", ({ data }) => {
|
||||
|
@ -791,21 +787,15 @@ add_task(async function test_check_synchronization_with_signatures() {
|
|||
][i++];
|
||||
},
|
||||
};
|
||||
// Pull changes from above tests.
|
||||
await client.db.saveLastModified(4000);
|
||||
await client.db.saveMetadata({ signature: { x5u, signature: "aa" } });
|
||||
// Create an extra record. It will have a valid signature locally
|
||||
// thanks to the verifier mock.
|
||||
await client.db.importChanges(
|
||||
{
|
||||
signature: { x5u, signature: "aa" },
|
||||
},
|
||||
4000,
|
||||
[
|
||||
{
|
||||
id: "extraId",
|
||||
last_modified: 42,
|
||||
},
|
||||
]
|
||||
);
|
||||
|
||||
await client.db.create({
|
||||
id: "extraId",
|
||||
last_modified: 42,
|
||||
});
|
||||
equal((await client.get()).length, 1);
|
||||
|
||||
// Now sync, but importing changes will have failing signature,
|
||||
|
|
|
@ -54,7 +54,8 @@ add_task(async _ => {
|
|||
});
|
||||
}
|
||||
let db = await RemoteSettings(COLLECTION_NAME).db;
|
||||
await db.importChanges({}, 42, [records[0]]);
|
||||
await db.create(records[0]);
|
||||
await db.saveLastModified(42);
|
||||
await emitSync();
|
||||
|
||||
await uds.ensureUpdated();
|
||||
|
|
|
@ -191,7 +191,7 @@ const NormandyTestUtils = {
|
|||
|
||||
// last modified needs to be some positive integer
|
||||
let lastModified = await db.getLastModified();
|
||||
await db.importChanges({}, lastModified + 1);
|
||||
await db.saveLastModified(lastModified + 1);
|
||||
|
||||
const collectionHelper = {
|
||||
async addRecipes(newRecipes) {
|
||||
|
@ -212,7 +212,7 @@ const NormandyTestUtils = {
|
|||
});
|
||||
}
|
||||
lastModified = (await db.getLastModified()) || 0;
|
||||
await db.importChanges({}, lastModified + 1);
|
||||
await db.saveLastModified(lastModified + 1);
|
||||
},
|
||||
};
|
||||
|
||||
|
@ -222,7 +222,7 @@ const NormandyTestUtils = {
|
|||
db = await RecipeRunner._remoteSettingsClientForTesting.db;
|
||||
await db.clear();
|
||||
lastModified = await db.getLastModified();
|
||||
await db.importChanges({}, lastModified + 1);
|
||||
await db.saveLastModified(lastModified + 1);
|
||||
}
|
||||
};
|
||||
};
|
||||
|
|
|
@ -290,20 +290,20 @@ decorate_task(
|
|||
};
|
||||
|
||||
const db = await RecipeRunner._remoteSettingsClientForTesting.db;
|
||||
await db.clear();
|
||||
const fakeSig = { signature: "abc" };
|
||||
await db.importChanges({}, 42, [
|
||||
{ id: "match", recipe: matchRecipe, signature: fakeSig },
|
||||
{
|
||||
id: "noMatch",
|
||||
recipe: noMatchRecipe,
|
||||
signature: fakeSig,
|
||||
},
|
||||
{
|
||||
id: "missing",
|
||||
recipe: missingRecipe,
|
||||
signature: fakeSig,
|
||||
},
|
||||
]);
|
||||
await db.create({ id: "match", recipe: matchRecipe, signature: fakeSig });
|
||||
await db.create({
|
||||
id: "noMatch",
|
||||
recipe: noMatchRecipe,
|
||||
signature: fakeSig,
|
||||
});
|
||||
await db.create({
|
||||
id: "missing",
|
||||
recipe: missingRecipe,
|
||||
signature: fakeSig,
|
||||
});
|
||||
await db.saveLastModified(42);
|
||||
|
||||
let recipesFromRS = (
|
||||
await RecipeRunner._remoteSettingsClientForTesting.get()
|
||||
|
@ -366,26 +366,19 @@ decorate_task(
|
|||
};
|
||||
|
||||
const db = await RecipeRunner._remoteSettingsClientForTesting.db;
|
||||
await db.clear();
|
||||
const fakeSig = { signature: "abc" };
|
||||
await db.importChanges(
|
||||
{},
|
||||
42,
|
||||
[
|
||||
{
|
||||
id: "match",
|
||||
recipe: compatibleRecipe,
|
||||
signature: fakeSig,
|
||||
},
|
||||
{
|
||||
id: "noMatch",
|
||||
recipe: incompatibleRecipe,
|
||||
signature: fakeSig,
|
||||
},
|
||||
],
|
||||
{
|
||||
clear: true,
|
||||
}
|
||||
);
|
||||
await db.create({
|
||||
id: "match",
|
||||
recipe: compatibleRecipe,
|
||||
signature: fakeSig,
|
||||
});
|
||||
await db.create({
|
||||
id: "noMatch",
|
||||
recipe: incompatibleRecipe,
|
||||
signature: fakeSig,
|
||||
});
|
||||
await db.saveLastModified(42);
|
||||
|
||||
await RecipeRunner.run();
|
||||
|
||||
|
|
|
@ -201,19 +201,13 @@ add_task(async function test_selector_db_modification() {
|
|||
const engineSelector = new SearchEngineSelector();
|
||||
// Fill the database with some values that we can use to test that it is cleared.
|
||||
const db = await RemoteSettings(SearchUtils.SETTINGS_KEY).db;
|
||||
await db.importChanges(
|
||||
{},
|
||||
42,
|
||||
[
|
||||
{
|
||||
id: "85e1f268-9ca5-4b52-a4ac-922df5c07264",
|
||||
default: "yes",
|
||||
engineName: "askjeeves",
|
||||
appliesTo: [{ included: { everywhere: true } }],
|
||||
},
|
||||
],
|
||||
{ clear: true }
|
||||
);
|
||||
await db.clear();
|
||||
await db.create({
|
||||
default: "yes",
|
||||
engineName: "askjeeves",
|
||||
appliesTo: [{ included: { everywhere: true } }],
|
||||
});
|
||||
await db.saveLastModified(42);
|
||||
|
||||
// Stub the get() so that the first call simulates a signature error, and
|
||||
// the second simulates success reading from the dump.
|
||||
|
@ -248,21 +242,13 @@ add_task(async function test_selector_db_modification_never_succeeds() {
|
|||
const engineSelector = new SearchEngineSelector();
|
||||
// Fill the database with some values that we can use to test that it is cleared.
|
||||
const db = RemoteSettings(SearchUtils.SETTINGS_KEY).db;
|
||||
await db.importChanges(
|
||||
{},
|
||||
42,
|
||||
[
|
||||
{
|
||||
id: "b70edfdd-1c3f-4b7b-ab55-38cb048636c0",
|
||||
default: "yes",
|
||||
engineName: "askjeeves",
|
||||
appliesTo: [{ included: { everywhere: true } }],
|
||||
},
|
||||
],
|
||||
{
|
||||
clear: true,
|
||||
}
|
||||
);
|
||||
await db.clear();
|
||||
await db.create({
|
||||
default: "yes",
|
||||
engineName: "askjeeves",
|
||||
appliesTo: [{ included: { everywhere: true } }],
|
||||
});
|
||||
await db.saveLastModified(42);
|
||||
|
||||
// Now simulate the condition where for some reason we never get a
|
||||
// valid result.
|
||||
|
@ -291,21 +277,13 @@ add_task(async function test_empty_results() {
|
|||
const engineSelector = new SearchEngineSelector();
|
||||
// Fill the database with some values that we can use to test that it is cleared.
|
||||
const db = await RemoteSettings(SearchUtils.SETTINGS_KEY).db;
|
||||
await db.importChanges(
|
||||
{},
|
||||
42,
|
||||
[
|
||||
{
|
||||
id: "df5655ca-e045-4f8c-a7ee-047eeb654722",
|
||||
default: "yes",
|
||||
engineName: "askjeeves",
|
||||
appliesTo: [{ included: { everywhere: true } }],
|
||||
},
|
||||
],
|
||||
{
|
||||
clear: true,
|
||||
}
|
||||
);
|
||||
await db.clear();
|
||||
await db.create({
|
||||
default: "yes",
|
||||
engineName: "askjeeves",
|
||||
appliesTo: [{ included: { everywhere: true } }],
|
||||
});
|
||||
await db.saveLastModified(42);
|
||||
|
||||
// Stub the get() so that the first call simulates an empty database, and
|
||||
// the second simulates success reading from the dump.
|
||||
|
|
|
@ -14,7 +14,6 @@ skip-if = os == "win" # Windows doesn't have the same sort of permissions manage
|
|||
[test_engine_selector_application.js]
|
||||
[test_engine_selector_order.js]
|
||||
[test_engine_selector_remote_settings.js]
|
||||
tags = remote-settings
|
||||
[test_engine_selector.js]
|
||||
[test_engine_set_alias.js]
|
||||
[test_identifiers.js]
|
||||
|
|
|
@ -58,7 +58,9 @@ add_task(async function test_list_changes() {
|
|||
|
||||
// Add some initial data.
|
||||
let db = await RemoteSettings(COLLECTION_NAME).db;
|
||||
await db.importChanges({}, 42, records);
|
||||
await db.create(records[0]);
|
||||
await db.saveLastModified(42);
|
||||
|
||||
let promise = waitForEvent(updateEvent, "update");
|
||||
|
||||
skipListService.registerAndRunSkipListObserver(
|
||||
|
@ -183,7 +185,10 @@ add_task(async function test_list_init_data() {
|
|||
|
||||
// Add some initial data.
|
||||
let db = await RemoteSettings(COLLECTION_NAME).db;
|
||||
await db.importChanges({}, 42, records);
|
||||
for (const record of records) {
|
||||
await db.create(record);
|
||||
}
|
||||
await db.saveLastModified(42);
|
||||
|
||||
// The first registered feature make SkipListService get the initial data
|
||||
// from remote setting.
|
||||
|
|
|
@ -161,7 +161,14 @@ add_task(async function test_ignoreList_updates() {
|
|||
add_task(async function test_ignoreList_db_modification() {
|
||||
// Fill the database with some values that we can use to test that it is cleared.
|
||||
const db = await RemoteSettings(IGNORELIST_KEY).db;
|
||||
await db.importChanges({}, 42, IGNORELIST_TEST_DATA, { clear: true });
|
||||
await db.clear();
|
||||
for (const data of IGNORELIST_TEST_DATA) {
|
||||
await db.create({
|
||||
id: data.id,
|
||||
matches: data.matches,
|
||||
});
|
||||
}
|
||||
await db.saveLastModified(42);
|
||||
|
||||
// Stub the get() so that the first call simulates a signature error, and
|
||||
// the second simulates success reading from the dump.
|
||||
|
@ -191,7 +198,14 @@ add_task(async function test_ignoreList_db_modification() {
|
|||
add_task(async function test_ignoreList_db_modification_never_succeeds() {
|
||||
// Fill the database with some values that we can use to test that it is cleared.
|
||||
const db = await RemoteSettings(IGNORELIST_KEY).db;
|
||||
await db.importChanges({}, 42, IGNORELIST_TEST_DATA, { clear: true });
|
||||
await db.clear();
|
||||
for (const data of IGNORELIST_TEST_DATA) {
|
||||
await db.create({
|
||||
id: data.id,
|
||||
matches: data.matches,
|
||||
});
|
||||
}
|
||||
await db.saveLastModified(42);
|
||||
|
||||
// Now simulate the condition where for some reason we never get a
|
||||
// valid result.
|
||||
|
|
|
@ -892,21 +892,6 @@ var AddonTestUtils = {
|
|||
plugins: bsPass.PluginBlocklistRS,
|
||||
};
|
||||
|
||||
// Since we load the specified test data, we shouldn't let the
|
||||
// packaged JSON dumps to interfere.
|
||||
const pref = "services.settings.load_dump";
|
||||
const backup = Services.prefs.getBoolPref(pref, null);
|
||||
Services.prefs.setBoolPref(pref, false);
|
||||
if (this.testScope) {
|
||||
this.testScope.registerCleanupFunction(() => {
|
||||
if (backup === null) {
|
||||
Services.prefs.clearUserPref(pref);
|
||||
} else {
|
||||
Services.prefs.setBoolPref(pref, backup);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
for (const [dataProp, blocklistObj] of Object.entries(blocklistMapping)) {
|
||||
let newData = data[dataProp];
|
||||
if (!newData) {
|
||||
|
@ -929,12 +914,12 @@ var AddonTestUtils = {
|
|||
}
|
||||
blocklistObj.ensureInitialized();
|
||||
let db = await blocklistObj._client.db;
|
||||
await db.clear();
|
||||
const collectionTimestamp = Math.max(
|
||||
...newData.map(r => r.last_modified)
|
||||
);
|
||||
await db.importChanges({}, collectionTimestamp, newData, {
|
||||
clear: true,
|
||||
});
|
||||
await db.saveLastModified(collectionTimestamp);
|
||||
await db.importBulk(newData);
|
||||
// We manually call _onUpdate... which is evil, but at the moment kinto doesn't have
|
||||
// a better abstraction unless you want to mock your own http server to do the update.
|
||||
await blocklistObj._onUpdate();
|
||||
|
|
|
@ -1196,6 +1196,7 @@ async function mockGfxBlocklistItems(items) {
|
|||
Services.prefs.getCharPref("services.blocklist.gfx.collection"),
|
||||
{ bucketNamePref: "services.blocklist.bucket" }
|
||||
);
|
||||
await client.db.clear();
|
||||
const records = items.map(item => {
|
||||
if (item.id && item.last_modified) {
|
||||
return item;
|
||||
|
@ -1209,9 +1210,8 @@ async function mockGfxBlocklistItems(items) {
|
|||
};
|
||||
});
|
||||
const collectionTimestamp = Math.max(...records.map(r => r.last_modified));
|
||||
await client.db.importChanges({}, collectionTimestamp, records, {
|
||||
clear: true,
|
||||
});
|
||||
await client.db.importBulk(records);
|
||||
await client.db.saveLastModified(collectionTimestamp);
|
||||
let rv = await bsPass.GfxBlocklistRS.checkForEntries();
|
||||
return rv;
|
||||
}
|
||||
|
|
|
@ -209,7 +209,7 @@ add_task(
|
|||
for (const record of records) {
|
||||
await client.db.create(record);
|
||||
}
|
||||
await client.db.importChanges({}, 42); // Prevent from loading JSON dump.
|
||||
await client.db.saveLastModified(42); // Prevent from loading JSON dump.
|
||||
const list = await client.get({ syncIfEmpty: false });
|
||||
equal(list.length, 4);
|
||||
ok(list.every(e => e.willMatch));
|
||||
|
|
|
@ -17,11 +17,10 @@ async function clear_state() {
|
|||
}
|
||||
|
||||
async function createRecords(records) {
|
||||
const withId = records.map((record, i) => ({
|
||||
id: `record-${i}`,
|
||||
...record,
|
||||
}));
|
||||
return client.db.importChanges({}, 42, withId);
|
||||
for (const record of records) {
|
||||
await client.db.create(record);
|
||||
}
|
||||
client.db.saveLastModified(42); // Simulate sync (and prevent load dump).
|
||||
}
|
||||
|
||||
function run_test() {
|
||||
|
|
|
@ -75,7 +75,7 @@ add_task(async function test_blocklist_lastModified_rs_scalars() {
|
|||
}
|
||||
|
||||
async function fakeRemoteSettingsSync(rsClient, lastModified) {
|
||||
await rsClient.db.importChanges({}, lastModified);
|
||||
await rsClient.db.saveLastModified(lastModified);
|
||||
await rsClient.emit("sync");
|
||||
}
|
||||
|
||||
|
|
Загрузка…
Ссылка в новой задаче