Bug 1426530 - Make the normandy test suite pass with --verify r=Gijs

MozReview-Commit-ID: Awnxu9IAhDQ

--HG--
extra : rebase_source : 8ec5f4e28c30264de402381d9775fc9abc99a05e
This commit is contained in:
Mike Cooper 2017-12-20 15:34:39 -08:00
Родитель 93b3403772
Коммит d793eaa65d
8 изменённых файлов: 744 добавлений и 615 удалений

Просмотреть файл

@ -120,11 +120,12 @@ this.AddonStudies = {
for (const study of studies) {
await getStore(db).add(study);
}
await AddonStudies.close();
try {
await testFunction(...args, studies);
} finally {
db = await getDatabase(); // Re-acquire in case the test closed the connection.
db = await getDatabase();
await AddonStudies.clear();
for (const study of oldStudies) {
await getStore(db).add(study);

Просмотреть файл

@ -9,6 +9,16 @@ Cu.import("resource://shield-recipe-client/lib/AddonStudies.jsm", this);
// Initialize test utils
AddonTestUtils.initMochitest(this);
let _startArgsFactoryId = 1;
function startArgsFactory(args) {
return Object.assign({
recipeId: _startArgsFactoryId++,
name: "Test",
description: "Test",
addonUrl: "http://test/addon.xpi",
}, args);
}
decorate_task(
AddonStudies.withStudies(),
async function testGetMissing() {
@ -103,16 +113,6 @@ decorate_task(
}
);
let _startArgsFactoryId = 0;
function startArgsFactory(args) {
return Object.assign({
recipeId: _startArgsFactoryId++,
name: "Test",
description: "Test",
addonUrl: "http://test/addon.xpi",
}, args);
}
add_task(async function testStartRequiredArguments() {
const requiredArguments = startArgsFactory();
for (const key in requiredArguments) {
@ -175,6 +175,7 @@ decorate_task(
decorate_task(
withWebExtension({version: "2.0"}),
AddonStudies.withStudies(),
async function testStart([addonId, addonFile]) {
const startupPromise = AddonTestUtils.promiseWebExtensionStartup(addonId);
const addonUrl = Services.io.newFileURI(addonFile).spec;
@ -210,7 +211,7 @@ decorate_task(
);
ok(study.studyStartDate, "start assigns a value to the study start date.");
await Addons.uninstall(addonId);
await AddonStudies.stop(args.recipeId);
}
);

Просмотреть файл

@ -4,6 +4,7 @@ Cu.import("resource://gre/modules/AppConstants.jsm");
Cu.import("resource://testing-common/AddonTestUtils.jsm", this);
Cu.import("resource://shield-recipe-client/lib/AddonStudies.jsm", this);
Cu.import("resource://shield-recipe-client/lib/NormandyDriver.jsm", this);
Cu.import("resource://shield-recipe-client/lib/PreferenceExperiments.jsm", this);
add_task(withDriver(Assert, async function uuids(driver) {
// Test that it is a UUID
@ -192,6 +193,7 @@ decorate_task(
decorate_task(
withSandboxManager(Assert),
withWebExtension({id: "driver-addon-studies@example.com"}),
AddonStudies.withStudies(),
async function testAddonStudies(sandboxManager, [addonId, addonFile]) {
const addonUrl = Services.io.newFileURI(addonFile).spec;
const driver = new NormandyDriver(sandboxManager);
@ -319,7 +321,8 @@ decorate_task(
decorate_task(
withSandboxManager(Assert),
withMockPreferences,
async function testAddonStudies(sandboxManager) {
PreferenceExperiments.withMockExperiments,
async function testPreferenceStudies(sandboxManager) {
const driver = new NormandyDriver(sandboxManager);
sandboxManager.cloneIntoGlobal("driver", driver, {cloneFunctions: true});

Просмотреть файл

@ -3,6 +3,7 @@
Cu.import("resource://gre/modules/Preferences.jsm", this);
Cu.import("resource://gre/modules/TelemetryEnvironment.jsm", this);
Cu.import("resource://shield-recipe-client/lib/PreferenceExperiments.jsm", this);
Cu.import("resource://shield-recipe-client/lib/CleanupManager.jsm", this);
// Save ourselves some typing
const {withMockExperiments} = PreferenceExperiments;
@ -25,62 +26,74 @@ function experimentFactory(attrs) {
}
// clearAllExperimentStorage
add_task(withMockExperiments(async function(experiments) {
experiments.test = experimentFactory({name: "test"});
ok(await PreferenceExperiments.has("test"), "Mock experiment is detected.");
await PreferenceExperiments.clearAllExperimentStorage();
ok(
!(await PreferenceExperiments.has("test")),
"clearAllExperimentStorage removed all stored experiments",
);
}));
decorate_task(
withMockExperiments,
async function(experiments) {
experiments.test = experimentFactory({name: "test"});
ok(await PreferenceExperiments.has("test"), "Mock experiment is detected.");
await PreferenceExperiments.clearAllExperimentStorage();
ok(
!(await PreferenceExperiments.has("test")),
"clearAllExperimentStorage removed all stored experiments",
);
}
);
// start should throw if an experiment with the given name already exists
add_task(withMockExperiments(async function(experiments) {
experiments.test = experimentFactory({name: "test"});
await Assert.rejects(
PreferenceExperiments.start({
name: "test",
branch: "branch",
preferenceName: "fake.preference",
preferenceValue: "value",
preferenceType: "string",
preferenceBranchType: "default",
}),
"start threw an error due to a conflicting experiment name",
);
}));
decorate_task(
withMockExperiments,
async function(experiments) {
experiments.test = experimentFactory({name: "test"});
await Assert.rejects(
PreferenceExperiments.start({
name: "test",
branch: "branch",
preferenceName: "fake.preference",
preferenceValue: "value",
preferenceType: "string",
preferenceBranchType: "default",
}),
"start threw an error due to a conflicting experiment name",
);
}
);
// start should throw if an experiment for the given preference is active
add_task(withMockExperiments(async function(experiments) {
experiments.test = experimentFactory({name: "test", preferenceName: "fake.preference"});
await Assert.rejects(
PreferenceExperiments.start({
name: "different",
branch: "branch",
preferenceName: "fake.preference",
preferenceValue: "value",
preferenceType: "string",
preferenceBranchType: "default",
}),
"start threw an error due to an active experiment for the given preference",
);
}));
decorate_task(
withMockExperiments,
async function(experiments) {
experiments.test = experimentFactory({name: "test", preferenceName: "fake.preference"});
await Assert.rejects(
PreferenceExperiments.start({
name: "different",
branch: "branch",
preferenceName: "fake.preference",
preferenceValue: "value",
preferenceType: "string",
preferenceBranchType: "default",
}),
"start threw an error due to an active experiment for the given preference",
);
}
);
// start should throw if an invalid preferenceBranchType is given
add_task(withMockExperiments(async function() {
await Assert.rejects(
PreferenceExperiments.start({
name: "test",
branch: "branch",
preferenceName: "fake.preference",
preferenceValue: "value",
preferenceType: "string",
preferenceBranchType: "invalid",
}),
"start threw an error due to an invalid preference branch type",
);
}));
decorate_task(
withMockExperiments,
async function() {
await Assert.rejects(
PreferenceExperiments.start({
name: "test",
branch: "branch",
preferenceName: "fake.preference",
preferenceValue: "value",
preferenceType: "string",
preferenceBranchType: "invalid",
}),
"start threw an error due to an invalid preference branch type",
);
}
);
// start should save experiment data, modify the preference, and register a
// watcher.
@ -139,217 +152,254 @@ decorate_task(
);
// start should modify the user preference for the user branch type
add_task(withMockExperiments(withMockPreferences(async function(experiments, mockPreferences) {
const startObserver = sinon.stub(PreferenceExperiments, "startObserver");
mockPreferences.set("fake.preference", "oldvalue", "user");
mockPreferences.set("fake.preference", "olddefaultvalue", "default");
decorate_task(
withMockExperiments,
withMockPreferences,
withStub(PreferenceExperiments, "startObserver"),
async function(experiments, mockPreferences, startObserver) {
mockPreferences.set("fake.preference", "olddefaultvalue", "default");
mockPreferences.set("fake.preference", "oldvalue", "user");
await PreferenceExperiments.start({
name: "test",
branch: "branch",
preferenceName: "fake.preference",
preferenceValue: "newvalue",
preferenceType: "string",
preferenceBranchType: "user",
});
ok(
startObserver.calledWith("test", "fake.preference", "string", "newvalue"),
"start registered an observer",
);
const expectedExperiment = {
name: "test",
branch: "branch",
expired: false,
preferenceName: "fake.preference",
preferenceValue: "newvalue",
preferenceType: "string",
previousPreferenceValue: "oldvalue",
preferenceBranchType: "user",
};
const experiment = {};
Object.keys(expectedExperiment).forEach(key => experiment[key] = experiments.test[key]);
Assert.deepEqual(experiment, expectedExperiment, "start saved the experiment");
Assert.notEqual(
DefaultPreferences.get("fake.preference"),
"newvalue",
"start did not modify the default preference",
);
is(Preferences.get("fake.preference"), "newvalue", "start modified the user preference");
startObserver.restore();
})));
// start should detect if a new preference value type matches the previous value type
add_task(withMockPreferences(async function(mockPreferences) {
mockPreferences.set("fake.type_preference", "oldvalue");
await Assert.rejects(
PreferenceExperiments.start({
await PreferenceExperiments.start({
name: "test",
branch: "branch",
preferenceName: "fake.type_preference",
preferenceName: "fake.preference",
preferenceValue: "newvalue",
preferenceType: "string",
preferenceBranchType: "user",
preferenceValue: 12345,
preferenceType: "integer",
}),
"start threw error for incompatible preference type"
);
}));
});
ok(
startObserver.calledWith("test", "fake.preference", "string", "newvalue"),
"start registered an observer",
);
const expectedExperiment = {
name: "test",
branch: "branch",
expired: false,
preferenceName: "fake.preference",
preferenceValue: "newvalue",
preferenceType: "string",
previousPreferenceValue: "oldvalue",
preferenceBranchType: "user",
};
const experiment = {};
Object.keys(expectedExperiment).forEach(key => experiment[key] = experiments.test[key]);
Assert.deepEqual(experiment, expectedExperiment, "start saved the experiment");
Assert.notEqual(
DefaultPreferences.get("fake.preference"),
"newvalue",
"start did not modify the default preference",
);
is(Preferences.get("fake.preference"), "newvalue", "start modified the user preference");
}
);
// start should detect if a new preference value type matches the previous value type
decorate_task(
withMockPreferences,
async function(mockPreferences) {
mockPreferences.set("fake.type_preference", "oldvalue");
await Assert.rejects(
PreferenceExperiments.start({
name: "test",
branch: "branch",
preferenceName: "fake.type_preference",
preferenceBranchType: "user",
preferenceValue: 12345,
preferenceType: "integer",
}),
"start threw error for incompatible preference type"
);
}
);
// startObserver should throw if an observer for the experiment is already
// active.
add_task(withMockExperiments(async function() {
PreferenceExperiments.startObserver("test", "fake.preference", "string", "newvalue");
Assert.throws(
() => PreferenceExperiments.startObserver("test", "another.fake", "string", "othervalue"),
"startObserver threw due to a conflicting active observer",
);
PreferenceExperiments.stopAllObservers();
}));
decorate_task(
withMockExperiments,
async function() {
PreferenceExperiments.startObserver("test", "fake.preference", "string", "newvalue");
Assert.throws(
() => PreferenceExperiments.startObserver("test", "another.fake", "string", "othervalue"),
"startObserver threw due to a conflicting active observer",
);
PreferenceExperiments.stopAllObservers();
}
);
// startObserver should register an observer that calls stop when a preference
// changes from its experimental value.
add_task(withMockExperiments(withMockPreferences(async function(mockExperiments, mockPreferences) {
const tests = [
["string", "startvalue", "experimentvalue", "newvalue"],
["boolean", false, true, false],
["integer", 1, 2, 42],
];
decorate_task(
withMockExperiments,
withMockPreferences,
async function(mockExperiments, mockPreferences) {
const tests = [
["string", "startvalue", "experimentvalue", "newvalue"],
["boolean", false, true, false],
["integer", 1, 2, 42],
];
for (const [type, startvalue, experimentvalue, newvalue] of tests) {
for (const [type, startvalue, experimentvalue, newvalue] of tests) {
const stop = sinon.stub(PreferenceExperiments, "stop");
mockPreferences.set("fake.preference" + type, startvalue);
// NOTE: startObserver does not modify the pref
PreferenceExperiments.startObserver("test" + type, "fake.preference" + type, type, experimentvalue);
// Setting it to the experimental value should not trigger the call.
mockPreferences.set("fake.preference" + type, experimentvalue);
ok(!stop.called, "Changing to the experimental pref value did not trigger the observer");
// Setting it to something different should trigger the call.
mockPreferences.set("fake.preference" + type, newvalue);
ok(stop.called, "Changing to a different value triggered the observer");
PreferenceExperiments.stopAllObservers();
stop.restore();
}
}
);
decorate_task(
withMockExperiments,
async function testHasObserver() {
PreferenceExperiments.startObserver("test", "fake.preference", "string", "experimentValue");
ok(await PreferenceExperiments.hasObserver("test"), "hasObserver should detect active observers");
ok(
!(await PreferenceExperiments.hasObserver("missing")),
"hasObserver shouldn't detect inactive observers",
);
PreferenceExperiments.stopAllObservers();
}
);
// stopObserver should throw if there is no observer active for it to stop.
decorate_task(
withMockExperiments,
async function() {
Assert.throws(
() => PreferenceExperiments.stopObserver("neveractive", "another.fake", "othervalue"),
"stopObserver threw because there was not matching active observer",
);
}
);
// stopObserver should cancel an active observer.
decorate_task(
withMockExperiments,
withMockPreferences,
async function(mockExperiments, mockPreferences) {
const stop = sinon.stub(PreferenceExperiments, "stop");
mockPreferences.set("fake.preference" + type, startvalue);
mockPreferences.set("fake.preference", "startvalue");
// NOTE: startObserver does not modify the pref
PreferenceExperiments.startObserver("test" + type, "fake.preference" + type, type, experimentvalue);
PreferenceExperiments.startObserver("test", "fake.preference", "string", "experimentvalue");
PreferenceExperiments.stopObserver("test");
// Setting it to the experimental value should not trigger the call.
Preferences.set("fake.preference" + type, experimentvalue);
ok(!stop.called, "Changing to the experimental pref value did not trigger the observer");
// Setting the preference now that the observer is stopped should not call
// stop.
mockPreferences.set("fake.preference", "newvalue");
ok(!stop.called, "stopObserver successfully removed the observer");
// Setting it to something different should trigger the call.
Preferences.set("fake.preference" + type, newvalue);
ok(stop.called, "Changing to a different value triggered the observer");
// Now that the observer is stopped, start should be able to start a new one
// without throwing.
try {
PreferenceExperiments.startObserver("test", "fake.preference", "string", "experimentvalue");
} catch (err) {
ok(false, "startObserver did not throw an error for an observer that was already stopped");
}
PreferenceExperiments.stopAllObservers();
stop.restore();
}
})));
add_task(withMockExperiments(async function testHasObserver() {
PreferenceExperiments.startObserver("test", "fake.preference", "string", "experimentValue");
ok(await PreferenceExperiments.hasObserver("test"), "hasObserver detects active observers");
ok(
!(await PreferenceExperiments.hasObserver("missing")),
"hasObserver doesn't detect inactive observers",
);
PreferenceExperiments.stopAllObservers();
}));
// stopObserver should throw if there is no observer active for it to stop.
add_task(withMockExperiments(async function() {
Assert.throws(
() => PreferenceExperiments.stopObserver("neveractive", "another.fake", "othervalue"),
"stopObserver threw because there was not matching active observer",
);
}));
// stopObserver should cancel an active observer.
add_task(withMockExperiments(withMockPreferences(async function(mockExperiments, mockPreferences) {
const stop = sinon.stub(PreferenceExperiments, "stop");
mockPreferences.set("fake.preference", "startvalue");
PreferenceExperiments.startObserver("test", "fake.preference", "string", "experimentvalue");
PreferenceExperiments.stopObserver("test");
// Setting the preference now that the observer is stopped should not call
// stop.
Preferences.set("fake.preference", "newvalue");
ok(!stop.called, "stopObserver successfully removed the observer");
// Now that the observer is stopped, start should be able to start a new one
// without throwing.
try {
PreferenceExperiments.startObserver("test", "fake.preference", "string", "experimentvalue");
} catch (err) {
ok(false, "startObserver did not throw an error for an observer that was already stopped");
}
PreferenceExperiments.stopAllObservers();
stop.restore();
})));
);
// stopAllObservers
add_task(withMockExperiments(withMockPreferences(async function(mockExperiments, mockPreferences) {
const stop = sinon.stub(PreferenceExperiments, "stop");
mockPreferences.set("fake.preference", "startvalue");
mockPreferences.set("other.fake.preference", "startvalue");
decorate_task(
withMockExperiments,
withMockPreferences,
async function(mockExperiments, mockPreferences) {
const stop = sinon.stub(PreferenceExperiments, "stop");
mockPreferences.set("fake.preference", "startvalue");
mockPreferences.set("other.fake.preference", "startvalue");
PreferenceExperiments.startObserver("test", "fake.preference", "string", "experimentvalue");
PreferenceExperiments.startObserver("test2", "other.fake.preference", "string", "experimentvalue");
PreferenceExperiments.stopAllObservers();
// Setting the preference now that the observers are stopped should not call
// stop.
Preferences.set("fake.preference", "newvalue");
Preferences.set("other.fake.preference", "newvalue");
ok(!stop.called, "stopAllObservers successfully removed all observers");
// Now that the observers are stopped, start should be able to start new
// observers without throwing.
try {
PreferenceExperiments.startObserver("test", "fake.preference", "string", "experimentvalue");
PreferenceExperiments.startObserver("test2", "other.fake.preference", "string", "experimentvalue");
} catch (err) {
ok(false, "startObserver did not throw an error for an observer that was already stopped");
}
PreferenceExperiments.stopAllObservers();
PreferenceExperiments.stopAllObservers();
stop.restore();
})));
// Setting the preference now that the observers are stopped should not call
// stop.
mockPreferences.set("fake.preference", "newvalue");
mockPreferences.set("other.fake.preference", "newvalue");
ok(!stop.called, "stopAllObservers successfully removed all observers");
// Now that the observers are stopped, start should be able to start new
// observers without throwing.
try {
PreferenceExperiments.startObserver("test", "fake.preference", "string", "experimentvalue");
PreferenceExperiments.startObserver("test2", "other.fake.preference", "string", "experimentvalue");
} catch (err) {
ok(false, "startObserver did not throw an error for an observer that was already stopped");
}
PreferenceExperiments.stopAllObservers();
stop.restore();
}
);
// markLastSeen should throw if it can't find a matching experiment
add_task(withMockExperiments(async function() {
await Assert.rejects(
PreferenceExperiments.markLastSeen("neveractive"),
"markLastSeen threw because there was not a matching experiment",
);
}));
decorate_task(
withMockExperiments,
async function() {
await Assert.rejects(
PreferenceExperiments.markLastSeen("neveractive"),
"markLastSeen threw because there was not a matching experiment",
);
}
);
// markLastSeen should update the lastSeen date
add_task(withMockExperiments(async function(experiments) {
const oldDate = new Date(1988, 10, 1).toJSON();
experiments.test = experimentFactory({name: "test", lastSeen: oldDate});
await PreferenceExperiments.markLastSeen("test");
Assert.notEqual(
experiments.test.lastSeen,
oldDate,
"markLastSeen updated the experiment lastSeen date",
);
}));
decorate_task(
withMockExperiments,
async function(experiments) {
const oldDate = new Date(1988, 10, 1).toJSON();
experiments.test = experimentFactory({name: "test", lastSeen: oldDate});
await PreferenceExperiments.markLastSeen("test");
Assert.notEqual(
experiments.test.lastSeen,
oldDate,
"markLastSeen updated the experiment lastSeen date",
);
}
);
// stop should throw if an experiment with the given name doesn't exist
add_task(withMockExperiments(async function() {
await Assert.rejects(
PreferenceExperiments.stop("test"),
"stop threw an error because there are no experiments with the given name",
);
}));
decorate_task(
withMockExperiments,
async function() {
await Assert.rejects(
PreferenceExperiments.stop("test"),
"stop threw an error because there are no experiments with the given name",
);
}
);
// stop should throw if the experiment is already expired
add_task(withMockExperiments(async function(experiments) {
experiments.test = experimentFactory({name: "test", expired: true});
await Assert.rejects(
PreferenceExperiments.stop("test"),
"stop threw an error because the experiment was already expired",
);
}));
decorate_task(
withMockExperiments,
async function(experiments) {
experiments.test = experimentFactory({name: "test", expired: true});
await Assert.rejects(
PreferenceExperiments.stop("test"),
"stop threw an error because the experiment was already expired",
);
}
);
// stop should mark the experiment as expired, stop its observer, and revert the
// preference value.
@ -358,6 +408,7 @@ decorate_task(
withMockPreferences,
withSpy(PreferenceExperiments, "stopObserver"),
async function testStop(experiments, mockPreferences, stopObserverSpy) {
is(Preferences.get("fake.preference"), null, "preference should start unset");
mockPreferences.set(`${startupPrefs}.fake.preference`, "experimentvalue", "user");
mockPreferences.set("fake.preference", "experimentvalue", "default");
experiments.test = experimentFactory({
@ -389,158 +440,187 @@ decorate_task(
);
// stop should also support user pref experiments
add_task(withMockExperiments(withMockPreferences(async function(experiments, mockPreferences) {
const stopObserver = sinon.stub(PreferenceExperiments, "stopObserver");
const hasObserver = sinon.stub(PreferenceExperiments, "hasObserver");
hasObserver.returns(true);
decorate_task(
withMockExperiments,
withMockPreferences,
withStub(PreferenceExperiments, "stopObserver"),
withStub(PreferenceExperiments, "hasObserver"),
async function testStopUserPrefs(experiments, mockPreferences, stopObserver, hasObserver) {
hasObserver.returns(true);
mockPreferences.set("fake.preference", "experimentvalue", "user");
experiments.test = experimentFactory({
name: "test",
expired: false,
preferenceName: "fake.preference",
preferenceValue: "experimentvalue",
preferenceType: "string",
previousPreferenceValue: "oldvalue",
preferenceBranchType: "user",
});
PreferenceExperiments.startObserver("test", "fake.preference", "string", "experimentvalue");
mockPreferences.set("fake.preference", "experimentvalue", "user");
experiments.test = experimentFactory({
name: "test",
expired: false,
preferenceName: "fake.preference",
preferenceValue: "experimentvalue",
preferenceType: "string",
previousPreferenceValue: "oldvalue",
preferenceBranchType: "user",
});
PreferenceExperiments.startObserver("test", "fake.preference", "string", "experimentvalue");
await PreferenceExperiments.stop("test");
ok(stopObserver.calledWith("test"), "stop removed an observer");
is(experiments.test.expired, true, "stop marked the experiment as expired");
is(
Preferences.get("fake.preference"),
"oldvalue",
"stop reverted the preference to its previous value",
);
stopObserver.restore();
PreferenceExperiments.stopAllObservers();
})));
await PreferenceExperiments.stop("test");
ok(stopObserver.calledWith("test"), "stop removed an observer");
is(experiments.test.expired, true, "stop marked the experiment as expired");
is(
Preferences.get("fake.preference"),
"oldvalue",
"stop reverted the preference to its previous value",
);
stopObserver.restore();
PreferenceExperiments.stopAllObservers();
}
);
// stop should remove a preference that had no value prior to an experiment for user prefs
add_task(withMockExperiments(withMockPreferences(async function(experiments, mockPreferences) {
const stopObserver = sinon.stub(PreferenceExperiments, "stopObserver");
mockPreferences.set("fake.preference", "experimentvalue", "user");
experiments.test = experimentFactory({
name: "test",
expired: false,
preferenceName: "fake.preference",
preferenceValue: "experimentvalue",
preferenceType: "string",
previousPreferenceValue: null,
preferenceBranchType: "user",
});
decorate_task(
withMockExperiments,
withMockPreferences,
async function(experiments, mockPreferences) {
const stopObserver = sinon.stub(PreferenceExperiments, "stopObserver");
mockPreferences.set("fake.preference", "experimentvalue", "user");
experiments.test = experimentFactory({
name: "test",
expired: false,
preferenceName: "fake.preference",
preferenceValue: "experimentvalue",
preferenceType: "string",
previousPreferenceValue: null,
preferenceBranchType: "user",
});
await PreferenceExperiments.stop("test");
ok(
!Preferences.isSet("fake.preference"),
"stop removed the preference that had no value prior to the experiment",
);
await PreferenceExperiments.stop("test");
ok(
!Preferences.isSet("fake.preference"),
"stop removed the preference that had no value prior to the experiment",
);
stopObserver.restore();
})));
stopObserver.restore();
}
);
// stop should not modify a preference if resetValue is false
add_task(withMockExperiments(withMockPreferences(async function(experiments, mockPreferences) {
const stopObserver = sinon.stub(PreferenceExperiments, "stopObserver");
mockPreferences.set("fake.preference", "customvalue", "default");
experiments.test = experimentFactory({
name: "test",
expired: false,
preferenceName: "fake.preference",
preferenceValue: "experimentvalue",
preferenceType: "string",
previousPreferenceValue: "oldvalue",
peferenceBranchType: "default",
});
decorate_task(
withMockExperiments,
withMockPreferences,
withStub(PreferenceExperiments, "stopObserver"),
await PreferenceExperiments.stop("test", false);
is(
DefaultPreferences.get("fake.preference"),
"customvalue",
"stop did not modify the preference",
);
async function(experiments, mockPreferences, stopObserver) {
mockPreferences.set("fake.preference", "customvalue", "default");
experiments.test = experimentFactory({
name: "test",
expired: false,
preferenceName: "fake.preference",
preferenceValue: "experimentvalue",
preferenceType: "string",
previousPreferenceValue: "oldvalue",
peferenceBranchType: "default",
});
stopObserver.restore();
})));
await PreferenceExperiments.stop("test", false);
is(
DefaultPreferences.get("fake.preference"),
"customvalue",
"stop did not modify the preference",
);
}
);
// get should throw if no experiment exists with the given name
add_task(withMockExperiments(async function() {
await Assert.rejects(
PreferenceExperiments.get("neverexisted"),
"get rejects if no experiment with the given name is found",
);
}));
decorate_task(
withMockExperiments,
async function() {
await Assert.rejects(
PreferenceExperiments.get("neverexisted"),
"get rejects if no experiment with the given name is found",
);
}
);
// get
add_task(withMockExperiments(async function(experiments) {
const experiment = experimentFactory({name: "test"});
experiments.test = experiment;
decorate_task(
withMockExperiments,
async function(experiments) {
const experiment = experimentFactory({name: "test"});
experiments.test = experiment;
const fetchedExperiment = await PreferenceExperiments.get("test");
Assert.deepEqual(fetchedExperiment, experiment, "get fetches the correct experiment");
const fetchedExperiment = await PreferenceExperiments.get("test");
Assert.deepEqual(fetchedExperiment, experiment, "get fetches the correct experiment");
// Modifying the fetched experiment must not edit the data source.
fetchedExperiment.name = "othername";
is(experiments.test.name, "test", "get returns a copy of the experiment");
}));
// Modifying the fetched experiment must not edit the data source.
fetchedExperiment.name = "othername";
is(experiments.test.name, "test", "get returns a copy of the experiment");
}
);
add_task(withMockExperiments(async function testGetAll(experiments) {
const experiment1 = experimentFactory({name: "experiment1"});
const experiment2 = experimentFactory({name: "experiment2", disabled: true});
experiments.experiment1 = experiment1;
experiments.experiment2 = experiment2;
// get all
decorate_task(
withMockExperiments,
async function testGetAll(experiments) {
const experiment1 = experimentFactory({name: "experiment1"});
const experiment2 = experimentFactory({name: "experiment2", disabled: true});
experiments.experiment1 = experiment1;
experiments.experiment2 = experiment2;
const fetchedExperiments = await PreferenceExperiments.getAll();
is(fetchedExperiments.length, 2, "getAll returns a list of all stored experiments");
Assert.deepEqual(
fetchedExperiments.find(e => e.name === "experiment1"),
experiment1,
"getAll returns a list with the correct experiments",
);
const fetchedExperiment2 = fetchedExperiments.find(e => e.name === "experiment2");
Assert.deepEqual(
fetchedExperiment2,
experiment2,
"getAll returns a list with the correct experiments, including disabled ones",
);
const fetchedExperiments = await PreferenceExperiments.getAll();
is(fetchedExperiments.length, 2, "getAll returns a list of all stored experiments");
Assert.deepEqual(
fetchedExperiments.find(e => e.name === "experiment1"),
experiment1,
"getAll returns a list with the correct experiments",
);
const fetchedExperiment2 = fetchedExperiments.find(e => e.name === "experiment2");
Assert.deepEqual(
fetchedExperiment2,
experiment2,
"getAll returns a list with the correct experiments, including disabled ones",
);
fetchedExperiment2.name = "othername";
is(experiment2.name, "experiment2", "getAll returns copies of the experiments");
}));
fetchedExperiment2.name = "othername";
is(experiment2.name, "experiment2", "getAll returns copies of the experiments");
}
);
add_task(withMockExperiments(withMockPreferences(async function testGetAllActive(experiments) {
experiments.active = experimentFactory({
name: "active",
expired: false,
});
experiments.inactive = experimentFactory({
name: "inactive",
expired: true,
});
// get all active
decorate_task(
withMockExperiments,
withMockPreferences,
async function testGetAllActive(experiments) {
experiments.active = experimentFactory({
name: "active",
expired: false,
});
experiments.inactive = experimentFactory({
name: "inactive",
expired: true,
});
const activeExperiments = await PreferenceExperiments.getAllActive();
Assert.deepEqual(
activeExperiments,
[experiments.active],
"getAllActive only returns active experiments",
);
const activeExperiments = await PreferenceExperiments.getAllActive();
Assert.deepEqual(
activeExperiments,
[experiments.active],
"getAllActive only returns active experiments",
);
activeExperiments[0].name = "newfakename";
Assert.notEqual(
experiments.active.name,
"newfakename",
"getAllActive returns copies of stored experiments",
);
})));
activeExperiments[0].name = "newfakename";
Assert.notEqual(
experiments.active.name,
"newfakename",
"getAllActive returns copies of stored experiments",
);
}
);
// has
add_task(withMockExperiments(async function(experiments) {
experiments.test = experimentFactory({name: "test"});
ok(await PreferenceExperiments.has("test"), "has returned true for a stored experiment");
ok(!(await PreferenceExperiments.has("missing")), "has returned false for a missing experiment");
}));
decorate_task(
withMockExperiments,
async function(experiments) {
experiments.test = experimentFactory({name: "test"});
ok(await PreferenceExperiments.has("test"), "has returned true for a stored experiment");
ok(!(await PreferenceExperiments.has("missing")), "has returned false for a missing experiment");
}
);
// init should register telemetry experiments
decorate_task(
@ -639,10 +719,13 @@ decorate_task(
["test", "branch", {type: "normandy-pref-test"}],
"start() should register the experiment with the provided type",
);
// start sets the passed preference in a way that is hard to mock.
// Reset the preference so it doesn't interfere with other tests.
Services.prefs.getDefaultBranch("fake.preference").deleteBranch("");
},
);
// Experiments shouldn't be recorded by init() in telemetry if they are expired
decorate_task(
withMockExperiments,
@ -655,41 +738,52 @@ decorate_task(
);
// Experiments should end if the preference has been changed when init() is called
add_task(withMockExperiments(withMockPreferences(async function testInitChanges(experiments, mockPreferences) {
const stopStub = sinon.stub(PreferenceExperiments, "stop");
mockPreferences.set("fake.preference", "experiment value", "default");
experiments.test = experimentFactory({
name: "test",
preferenceName: "fake.preference",
preferenceValue: "experiment value",
});
mockPreferences.set("fake.preference", "changed value");
await PreferenceExperiments.init();
ok(stopStub.calledWith("test"), "Experiment is stopped because value changed");
ok(Preferences.get("fake.preference"), "changed value", "Preference value was not changed");
stopStub.restore();
})));
decorate_task(
withMockExperiments,
withMockPreferences,
withStub(PreferenceExperiments, "stop"),
async function testInitChanges(experiments, mockPreferences, stopStub) {
mockPreferences.set("fake.preference", "experiment value", "default");
experiments.test = experimentFactory({
name: "test",
preferenceName: "fake.preference",
preferenceValue: "experiment value",
});
mockPreferences.set("fake.preference", "changed value");
await PreferenceExperiments.init();
ok(stopStub.calledWith("test"), "Experiment is stopped because value changed");
is(Preferences.get("fake.preference"), "changed value", "Preference value was not changed");
}
);
// init should register an observer for experiments
add_task(withMockExperiments(withMockPreferences(async function testInitRegistersObserver(experiments, mockPreferences) {
const startObserver = sinon.stub(PreferenceExperiments, "startObserver");
mockPreferences.set("fake.preference", "experiment value", "default");
experiments.test = experimentFactory({
name: "test",
preferenceName: "fake.preference",
preferenceValue: "experiment value",
});
await PreferenceExperiments.init();
decorate_task(
withMockExperiments,
withMockPreferences,
withStub(PreferenceExperiments, "startObserver"),
withStub(PreferenceExperiments, "stop"),
withStub(CleanupManager, "addCleanupHandler"),
async function testInitRegistersObserver(experiments, mockPreferences, startObserver, stop) {
stop.throws("Stop should not be called");
mockPreferences.set("fake.preference", "experiment value", "default");
is(Preferences.get("fake.preference"), "experiment value", "pref shouldn't have a user value");
experiments.test = experimentFactory({
name: "test",
preferenceName: "fake.preference",
preferenceValue: "experiment value",
});
await PreferenceExperiments.init();
ok(
startObserver.calledWith("test", "fake.preference", "string", "experiment value"),
"init registered an observer",
);
startObserver.restore();
})));
ok(startObserver.calledOnce, "init should register an observer");
Assert.deepEqual(
startObserver.getCall(0).args,
["test", "fake.preference", "string", "experiment value"],
"init should register an observer with the right args",
);
}
);
// saveStartupPrefs
decorate_task(
withMockExperiments,
async function testSaveStartupPrefs(experiments) {
@ -731,6 +825,7 @@ decorate_task(
},
);
// saveStartupPrefs errors for invalid pref type
decorate_task(
withMockExperiments,
async function testSaveStartupPrefsError(experiments) {

Просмотреть файл

@ -66,34 +66,35 @@ add_task(async function checkFilter() {
ok(!(await RecipeRunner.checkFilter(recipe)), "The recipe is available in the filter context");
});
add_task(withMockNormandyApi(async function testClientClassificationCache() {
const getStub = sinon.stub(ClientEnvironment, "getClientClassification")
.returns(Promise.resolve(false));
decorate_task(
withMockNormandyApi,
withStub(ClientEnvironment, "getClientClassification"),
async function testClientClassificationCache(api, getStub) {
getStub.returns(Promise.resolve(false));
await SpecialPowers.pushPrefEnv({set: [
["extensions.shield-recipe-client.api_url",
"https://example.com/selfsupport-dummy"],
]});
await SpecialPowers.pushPrefEnv({set: [
["extensions.shield-recipe-client.api_url",
"https://example.com/selfsupport-dummy"],
]});
// When the experiment pref is false, eagerly call getClientClassification.
await SpecialPowers.pushPrefEnv({set: [
["extensions.shield-recipe-client.experiments.lazy_classify", false],
]});
ok(!getStub.called, "getClientClassification hasn't been called");
await RecipeRunner.run();
ok(getStub.called, "getClientClassification was called eagerly");
// When the experiment pref is false, eagerly call getClientClassification.
await SpecialPowers.pushPrefEnv({set: [
["extensions.shield-recipe-client.experiments.lazy_classify", false],
]});
ok(!getStub.called, "getClientClassification hasn't been called");
await RecipeRunner.run();
ok(getStub.called, "getClientClassification was called eagerly");
// When the experiment pref is true, do not eagerly call getClientClassification.
await SpecialPowers.pushPrefEnv({set: [
["extensions.shield-recipe-client.experiments.lazy_classify", true],
]});
getStub.reset();
ok(!getStub.called, "getClientClassification hasn't been called");
await RecipeRunner.run();
ok(!getStub.called, "getClientClassification was not called eagerly");
getStub.restore();
}));
// When the experiment pref is true, do not eagerly call getClientClassification.
await SpecialPowers.pushPrefEnv({set: [
["extensions.shield-recipe-client.experiments.lazy_classify", true],
]});
getStub.reset();
ok(!getStub.called, "getClientClassification hasn't been called");
await RecipeRunner.run();
ok(!getStub.called, "getClientClassification was not called eagerly");
}
);
/**
* Mocks RecipeRunner.loadActionSandboxManagers for testing run.
@ -118,211 +119,223 @@ async function withMockActionSandboxManagers(actions, testFunction) {
}
}
add_task(withMockNormandyApi(async function testRun(mockApi) {
const closeSpy = sinon.spy(AddonStudies, "close");
const reportRunner = sinon.stub(Uptake, "reportRunner");
const reportAction = sinon.stub(Uptake, "reportAction");
const reportRecipe = sinon.stub(Uptake, "reportRecipe");
decorate_task(
withMockNormandyApi,
withSpy(AddonStudies, "close"),
withStub(Uptake, "reportRunner"),
withStub(Uptake, "reportAction"),
withStub(Uptake, "reportRecipe"),
async function testRun(mockApi, closeSpy, reportRunner, reportAction, reportRecipe) {
const matchAction = {name: "matchAction"};
const noMatchAction = {name: "noMatchAction"};
mockApi.actions = [matchAction, noMatchAction];
const matchAction = {name: "matchAction"};
const noMatchAction = {name: "noMatchAction"};
mockApi.actions = [matchAction, noMatchAction];
const matchRecipe = {id: "match", action: "matchAction", filter_expression: "true"};
const noMatchRecipe = {id: "noMatch", action: "noMatchAction", filter_expression: "false"};
const missingRecipe = {id: "missing", action: "missingAction", filter_expression: "true"};
mockApi.recipes = [matchRecipe, noMatchRecipe, missingRecipe];
const matchRecipe = {id: "match", action: "matchAction", filter_expression: "true"};
const noMatchRecipe = {id: "noMatch", action: "noMatchAction", filter_expression: "false"};
const missingRecipe = {id: "missing", action: "missingAction", filter_expression: "true"};
mockApi.recipes = [matchRecipe, noMatchRecipe, missingRecipe];
await withMockActionSandboxManagers(mockApi.actions, async managers => {
const matchManager = managers.matchAction;
const noMatchManager = managers.noMatchAction;
await withMockActionSandboxManagers(mockApi.actions, async managers => {
const matchManager = managers.matchAction;
const noMatchManager = managers.noMatchAction;
await RecipeRunner.run();
await RecipeRunner.run();
// match should be called for preExecution, action, and postExecution
sinon.assert.calledWith(matchManager.runAsyncCallback, "preExecution");
sinon.assert.calledWith(matchManager.runAsyncCallback, "action", matchRecipe);
sinon.assert.calledWith(matchManager.runAsyncCallback, "postExecution");
// match should be called for preExecution, action, and postExecution
sinon.assert.calledWith(matchManager.runAsyncCallback, "preExecution");
sinon.assert.calledWith(matchManager.runAsyncCallback, "action", matchRecipe);
sinon.assert.calledWith(matchManager.runAsyncCallback, "postExecution");
// noMatch should be called for preExecution and postExecution, and skipped
// for action since the filter expression does not match.
sinon.assert.calledWith(noMatchManager.runAsyncCallback, "preExecution");
sinon.assert.neverCalledWith(noMatchManager.runAsyncCallback, "action", noMatchRecipe);
sinon.assert.calledWith(noMatchManager.runAsyncCallback, "postExecution");
// noMatch should be called for preExecution and postExecution, and skipped
// for action since the filter expression does not match.
sinon.assert.calledWith(noMatchManager.runAsyncCallback, "preExecution");
sinon.assert.neverCalledWith(noMatchManager.runAsyncCallback, "action", noMatchRecipe);
sinon.assert.calledWith(noMatchManager.runAsyncCallback, "postExecution");
// missing is never called at all due to no matching action/manager.
// missing is never called at all due to no matching action/manager.
// Test uptake reporting
sinon.assert.calledWith(reportRunner, Uptake.RUNNER_SUCCESS);
sinon.assert.calledWith(reportAction, "matchAction", Uptake.ACTION_SUCCESS);
sinon.assert.calledWith(reportAction, "noMatchAction", Uptake.ACTION_SUCCESS);
sinon.assert.calledWith(reportRecipe, "match", Uptake.RECIPE_SUCCESS);
sinon.assert.neverCalledWith(reportRecipe, "noMatch", Uptake.RECIPE_SUCCESS);
sinon.assert.calledWith(reportRecipe, "missing", Uptake.RECIPE_INVALID_ACTION);
});
// Ensure storage is closed after the run.
sinon.assert.calledOnce(closeSpy);
closeSpy.restore();
reportRunner.restore();
reportAction.restore();
reportRecipe.restore();
}));
add_task(withMockNormandyApi(async function testRunRecipeError(mockApi) {
const reportRecipe = sinon.stub(Uptake, "reportRecipe");
const action = {name: "action"};
mockApi.actions = [action];
const recipe = {id: "recipe", action: "action", filter_expression: "true"};
mockApi.recipes = [recipe];
await withMockActionSandboxManagers(mockApi.actions, async managers => {
const manager = managers.action;
manager.runAsyncCallback.callsFake(async callbackName => {
if (callbackName === "action") {
throw new Error("Action execution failure");
}
// Test uptake reporting
sinon.assert.calledWith(reportRunner, Uptake.RUNNER_SUCCESS);
sinon.assert.calledWith(reportAction, "matchAction", Uptake.ACTION_SUCCESS);
sinon.assert.calledWith(reportAction, "noMatchAction", Uptake.ACTION_SUCCESS);
sinon.assert.calledWith(reportRecipe, "match", Uptake.RECIPE_SUCCESS);
sinon.assert.neverCalledWith(reportRecipe, "noMatch", Uptake.RECIPE_SUCCESS);
sinon.assert.calledWith(reportRecipe, "missing", Uptake.RECIPE_INVALID_ACTION);
});
await RecipeRunner.run();
// Ensure storage is closed after the run.
sinon.assert.calledOnce(closeSpy);
}
);
// Uptake should report that the recipe threw an exception
sinon.assert.calledWith(reportRecipe, "recipe", Uptake.RECIPE_EXECUTION_ERROR);
});
decorate_task(
withMockNormandyApi,
async function testRunRecipeError(mockApi) {
const reportRecipe = sinon.stub(Uptake, "reportRecipe");
reportRecipe.restore();
}));
const action = {name: "action"};
mockApi.actions = [action];
add_task(withMockNormandyApi(async function testRunFetchFail(mockApi) {
const closeSpy = sinon.spy(AddonStudies, "close");
const reportRunner = sinon.stub(Uptake, "reportRunner");
const recipe = {id: "recipe", action: "action", filter_expression: "true"};
mockApi.recipes = [recipe];
const action = {name: "action"};
mockApi.actions = [action];
mockApi.fetchRecipes.rejects(new Error("Signature not valid"));
await withMockActionSandboxManagers(mockApi.actions, async managers => {
const manager = managers.action;
manager.runAsyncCallback.callsFake(async callbackName => {
if (callbackName === "action") {
throw new Error("Action execution failure");
}
});
await withMockActionSandboxManagers(mockApi.actions, async managers => {
const manager = managers.action;
await RecipeRunner.run();
await RecipeRunner.run();
// If the recipe fetch failed, do not run anything.
sinon.assert.notCalled(manager.runAsyncCallback);
sinon.assert.calledWith(reportRunner, Uptake.RUNNER_SERVER_ERROR);
// Test that network errors report a specific uptake error
reportRunner.reset();
mockApi.fetchRecipes.rejects(new Error("NetworkError: The system was down"));
await RecipeRunner.run();
sinon.assert.calledWith(reportRunner, Uptake.RUNNER_NETWORK_ERROR);
// Test that signature issues report a specific uptake error
reportRunner.reset();
mockApi.fetchRecipes.rejects(new NormandyApi.InvalidSignatureError("Signature fail"));
await RecipeRunner.run();
sinon.assert.calledWith(reportRunner, Uptake.RUNNER_INVALID_SIGNATURE);
});
// If the recipe fetch failed, we don't need to call close since nothing
// opened a connection in the first place.
sinon.assert.notCalled(closeSpy);
closeSpy.restore();
reportRunner.restore();
}));
add_task(withMockNormandyApi(async function testRunPreExecutionFailure(mockApi) {
const closeSpy = sinon.spy(AddonStudies, "close");
const reportAction = sinon.stub(Uptake, "reportAction");
const reportRecipe = sinon.stub(Uptake, "reportRecipe");
const passAction = {name: "passAction"};
const failAction = {name: "failAction"};
mockApi.actions = [passAction, failAction];
const passRecipe = {id: "pass", action: "passAction", filter_expression: "true"};
const failRecipe = {id: "fail", action: "failAction", filter_expression: "true"};
mockApi.recipes = [passRecipe, failRecipe];
await withMockActionSandboxManagers(mockApi.actions, async managers => {
const passManager = managers.passAction;
const failManager = managers.failAction;
failManager.runAsyncCallback.returns(Promise.reject(new Error("oh no")));
await RecipeRunner.run();
// pass should be called for preExecution, action, and postExecution
sinon.assert.calledWith(passManager.runAsyncCallback, "preExecution");
sinon.assert.calledWith(passManager.runAsyncCallback, "action", passRecipe);
sinon.assert.calledWith(passManager.runAsyncCallback, "postExecution");
// fail should only be called for preExecution, since it fails during that
sinon.assert.calledWith(failManager.runAsyncCallback, "preExecution");
sinon.assert.neverCalledWith(failManager.runAsyncCallback, "action", failRecipe);
sinon.assert.neverCalledWith(failManager.runAsyncCallback, "postExecution");
sinon.assert.calledWith(reportAction, "passAction", Uptake.ACTION_SUCCESS);
sinon.assert.calledWith(reportAction, "failAction", Uptake.ACTION_PRE_EXECUTION_ERROR);
sinon.assert.calledWith(reportRecipe, "fail", Uptake.RECIPE_ACTION_DISABLED);
});
// Ensure storage is closed after the run, despite the failures.
sinon.assert.calledOnce(closeSpy);
closeSpy.restore();
reportAction.restore();
reportRecipe.restore();
}));
add_task(withMockNormandyApi(async function testRunPostExecutionFailure(mockApi) {
const reportAction = sinon.stub(Uptake, "reportAction");
const failAction = {name: "failAction"};
mockApi.actions = [failAction];
const failRecipe = {action: "failAction", filter_expression: "true"};
mockApi.recipes = [failRecipe];
await withMockActionSandboxManagers(mockApi.actions, async managers => {
const failManager = managers.failAction;
failManager.runAsyncCallback.callsFake(async callbackName => {
if (callbackName === "postExecution") {
throw new Error("postExecution failure");
}
// Uptake should report that the recipe threw an exception
sinon.assert.calledWith(reportRecipe, "recipe", Uptake.RECIPE_EXECUTION_ERROR);
});
await RecipeRunner.run();
reportRecipe.restore();
}
);
// fail should be called for every stage
sinon.assert.calledWith(failManager.runAsyncCallback, "preExecution");
sinon.assert.calledWith(failManager.runAsyncCallback, "action", failRecipe);
sinon.assert.calledWith(failManager.runAsyncCallback, "postExecution");
decorate_task(
withMockNormandyApi,
async function testRunFetchFail(mockApi) {
const closeSpy = sinon.spy(AddonStudies, "close");
const reportRunner = sinon.stub(Uptake, "reportRunner");
// Uptake should report a post-execution error
sinon.assert.calledWith(reportAction, "failAction", Uptake.ACTION_POST_EXECUTION_ERROR);
});
const action = {name: "action"};
mockApi.actions = [action];
mockApi.fetchRecipes.rejects(new Error("Signature not valid"));
reportAction.restore();
}));
await withMockActionSandboxManagers(mockApi.actions, async managers => {
const manager = managers.action;
await RecipeRunner.run();
add_task(withMockNormandyApi(async function testLoadActionSandboxManagers(mockApi) {
mockApi.actions = [
{name: "normalAction"},
{name: "missingImpl"},
];
mockApi.implementations.normalAction = "window.scriptRan = true";
// If the recipe fetch failed, do not run anything.
sinon.assert.notCalled(manager.runAsyncCallback);
sinon.assert.calledWith(reportRunner, Uptake.RUNNER_SERVER_ERROR);
const managers = await RecipeRunner.loadActionSandboxManagers();
ok("normalAction" in managers, "Actions with implementations have managers");
ok(!("missingImpl" in managers), "Actions without implementations are skipped");
// Test that network errors report a specific uptake error
reportRunner.reset();
mockApi.fetchRecipes.rejects(new Error("NetworkError: The system was down"));
await RecipeRunner.run();
sinon.assert.calledWith(reportRunner, Uptake.RUNNER_NETWORK_ERROR);
const normalManager = managers.normalAction;
ok(
await normalManager.evalInSandbox("window.scriptRan"),
"Implementations are run in the sandbox",
);
}));
// Test that signature issues report a specific uptake error
reportRunner.reset();
mockApi.fetchRecipes.rejects(new NormandyApi.InvalidSignatureError("Signature fail"));
await RecipeRunner.run();
sinon.assert.calledWith(reportRunner, Uptake.RUNNER_INVALID_SIGNATURE);
});
// If the recipe fetch failed, we don't need to call close since nothing
// opened a connection in the first place.
sinon.assert.notCalled(closeSpy);
closeSpy.restore();
reportRunner.restore();
}
);
decorate_task(
withMockNormandyApi,
async function testRunPreExecutionFailure(mockApi) {
const closeSpy = sinon.spy(AddonStudies, "close");
const reportAction = sinon.stub(Uptake, "reportAction");
const reportRecipe = sinon.stub(Uptake, "reportRecipe");
const passAction = {name: "passAction"};
const failAction = {name: "failAction"};
mockApi.actions = [passAction, failAction];
const passRecipe = {id: "pass", action: "passAction", filter_expression: "true"};
const failRecipe = {id: "fail", action: "failAction", filter_expression: "true"};
mockApi.recipes = [passRecipe, failRecipe];
await withMockActionSandboxManagers(mockApi.actions, async managers => {
const passManager = managers.passAction;
const failManager = managers.failAction;
failManager.runAsyncCallback.returns(Promise.reject(new Error("oh no")));
await RecipeRunner.run();
// pass should be called for preExecution, action, and postExecution
sinon.assert.calledWith(passManager.runAsyncCallback, "preExecution");
sinon.assert.calledWith(passManager.runAsyncCallback, "action", passRecipe);
sinon.assert.calledWith(passManager.runAsyncCallback, "postExecution");
// fail should only be called for preExecution, since it fails during that
sinon.assert.calledWith(failManager.runAsyncCallback, "preExecution");
sinon.assert.neverCalledWith(failManager.runAsyncCallback, "action", failRecipe);
sinon.assert.neverCalledWith(failManager.runAsyncCallback, "postExecution");
sinon.assert.calledWith(reportAction, "passAction", Uptake.ACTION_SUCCESS);
sinon.assert.calledWith(reportAction, "failAction", Uptake.ACTION_PRE_EXECUTION_ERROR);
sinon.assert.calledWith(reportRecipe, "fail", Uptake.RECIPE_ACTION_DISABLED);
});
// Ensure storage is closed after the run, despite the failures.
sinon.assert.calledOnce(closeSpy);
closeSpy.restore();
reportAction.restore();
reportRecipe.restore();
}
);
decorate_task(
withMockNormandyApi,
async function testRunPostExecutionFailure(mockApi) {
const reportAction = sinon.stub(Uptake, "reportAction");
const failAction = {name: "failAction"};
mockApi.actions = [failAction];
const failRecipe = {action: "failAction", filter_expression: "true"};
mockApi.recipes = [failRecipe];
await withMockActionSandboxManagers(mockApi.actions, async managers => {
const failManager = managers.failAction;
failManager.runAsyncCallback.callsFake(async callbackName => {
if (callbackName === "postExecution") {
throw new Error("postExecution failure");
}
});
await RecipeRunner.run();
// fail should be called for every stage
sinon.assert.calledWith(failManager.runAsyncCallback, "preExecution");
sinon.assert.calledWith(failManager.runAsyncCallback, "action", failRecipe);
sinon.assert.calledWith(failManager.runAsyncCallback, "postExecution");
// Uptake should report a post-execution error
sinon.assert.calledWith(reportAction, "failAction", Uptake.ACTION_POST_EXECUTION_ERROR);
});
reportAction.restore();
}
);
decorate_task(
withMockNormandyApi,
async function testLoadActionSandboxManagers(mockApi) {
mockApi.actions = [
{name: "normalAction"},
{name: "missingImpl"},
];
mockApi.implementations.normalAction = "window.scriptRan = true";
const managers = await RecipeRunner.loadActionSandboxManagers();
ok("normalAction" in managers, "Actions with implementations have managers");
ok(!("missingImpl" in managers), "Actions without implementations are skipped");
const normalManager = managers.normalAction;
ok(
await normalManager.evalInSandbox("window.scriptRan"),
"Implementations are run in the sandbox",
);
}
);
// test init() in dev mode
decorate_task(
@ -402,8 +415,9 @@ decorate_task(
withStub(RecipeRunner, "enable"),
withStub(RecipeRunner, "disable"),
withStub(CleanupManager, "addCleanupHandler"),
withStub(AddonStudies, "stop"),
async function testPrefWatching(runStub, enableStub, disableStub, addCleanupHandlerStub) {
async function testPrefWatching(runStub, enableStub, disableStub, addCleanupHandlerStub, stopStub) {
await RecipeRunner.init();
is(enableStub.callCount, 1, "Enable should be called initially");
is(disableStub.callCount, 0, "Disable should not be called initially");

Просмотреть файл

@ -6,14 +6,13 @@ Cu.import("resource://shield-recipe-client/lib/AddonStudies.jsm", this);
const OPT_OUT_PREF = "app.shield.optoutstudies.enabled";
decorate_task(
withPrefEnv({
set: [[OPT_OUT_PREF, true]],
}),
withMockPreferences,
AddonStudies.withStudies([
studyFactory({active: true}),
studyFactory({active: true}),
]),
async function testDisableStudiesWhenOptOutDisabled([study1, study2]) {
async function testDisableStudiesWhenOptOutDisabled(mockPreferences, [study1, study2]) {
mockPreferences.set(OPT_OUT_PREF, true);
const observers = [
studyEndObserved(study1.recipeId),
studyEndObserved(study2.recipeId),

Просмотреть файл

@ -31,12 +31,10 @@ const experimentPref3 = "test.initExperimentPrefs3";
const experimentPref4 = "test.initExperimentPrefs4";
decorate_task(
withPrefEnv({
clear: [[initPref1], [initPref2], [initPref3]],
}),
withBootstrap,
async function testInitShieldPrefs(Bootstrap) {
const defaultBranch = Services.prefs.getDefaultBranch("");
const prefDefaults = {
[initPref1]: true,
[initPref2]: 2,
@ -74,6 +72,8 @@ decorate_task(
`Pref ${pref} doesn't have a user value after being initialized.`,
);
}
defaultBranch.deleteBranch("test.");
},
);

Просмотреть файл

@ -184,11 +184,27 @@ class MockPreferences {
for (const [branchName, values] of Object.entries(this.oldValues)) {
const preferenceBranch = preferenceBranches[branchName];
for (const [name, {oldValue, existed}] of Object.entries(values)) {
const before = preferenceBranch.get(name);
if (before === oldValue) {
continue;
}
if (existed) {
preferenceBranch.set(name, oldValue);
} else if (branchName === "default") {
Services.prefs.getDefaultBranch(name).deleteBranch("");
} else {
preferenceBranch.reset(name);
}
const after = preferenceBranch.get(name);
if (before === after && before !== undefined) {
throw new Error(
`Couldn't reset pref "${name}" to "${oldValue}" on "${branchName}" branch ` +
`(value stayed "${before}", did ${existed ? "" : "not "}exist)`
);
}
}
}
}