summaryrefslogtreecommitdiffstats
path: root/toolkit/components/normandy/test
diff options
context:
space:
mode:
authorDaniel Baumann <daniel.baumann@progress-linux.org>2024-04-07 19:33:14 +0000
committerDaniel Baumann <daniel.baumann@progress-linux.org>2024-04-07 19:33:14 +0000
commit36d22d82aa202bb199967e9512281e9a53db42c9 (patch)
tree105e8c98ddea1c1e4784a60a5a6410fa416be2de /toolkit/components/normandy/test
parentInitial commit. (diff)
downloadfirefox-esr-36d22d82aa202bb199967e9512281e9a53db42c9.tar.xz
firefox-esr-36d22d82aa202bb199967e9512281e9a53db42c9.zip
Adding upstream version 115.7.0esr.upstream/115.7.0esr
Signed-off-by: Daniel Baumann <daniel.baumann@progress-linux.org>
Diffstat (limited to 'toolkit/components/normandy/test')
-rw-r--r--toolkit/components/normandy/test/.eslintrc.js7
-rw-r--r--toolkit/components/normandy/test/NormandyTestUtils.sys.mjs349
-rw-r--r--toolkit/components/normandy/test/browser/action_server.sjs10
-rw-r--r--toolkit/components/normandy/test/browser/addons/normandydriver-a-1.0/manifest.json11
-rw-r--r--toolkit/components/normandy/test/browser/addons/normandydriver-a-2.0/manifest.json11
-rw-r--r--toolkit/components/normandy/test/browser/addons/normandydriver-b-1.0/manifest.json11
-rw-r--r--toolkit/components/normandy/test/browser/browser.ini49
-rw-r--r--toolkit/components/normandy/test/browser/browser_ActionsManager.js68
-rw-r--r--toolkit/components/normandy/test/browser/browser_AddonRollouts.js141
-rw-r--r--toolkit/components/normandy/test/browser/browser_AddonStudies.js300
-rw-r--r--toolkit/components/normandy/test/browser/browser_BaseAction.js349
-rw-r--r--toolkit/components/normandy/test/browser/browser_CleanupManager.js26
-rw-r--r--toolkit/components/normandy/test/browser/browser_ClientEnvironment.js274
-rw-r--r--toolkit/components/normandy/test/browser/browser_EventEmitter.js110
-rw-r--r--toolkit/components/normandy/test/browser/browser_Heartbeat.js262
-rw-r--r--toolkit/components/normandy/test/browser/browser_LegacyHeartbeat.js88
-rw-r--r--toolkit/components/normandy/test/browser/browser_LogManager.js27
-rw-r--r--toolkit/components/normandy/test/browser/browser_Normandy.js386
-rw-r--r--toolkit/components/normandy/test/browser/browser_NormandyAddonManager.js189
-rw-r--r--toolkit/components/normandy/test/browser/browser_NormandyMigrations.js106
-rw-r--r--toolkit/components/normandy/test/browser/browser_PreferenceExperiments.js2205
-rw-r--r--toolkit/components/normandy/test/browser/browser_PreferenceRollouts.js316
-rw-r--r--toolkit/components/normandy/test/browser/browser_RecipeRunner.js874
-rw-r--r--toolkit/components/normandy/test/browser/browser_ShieldPreferences.js91
-rw-r--r--toolkit/components/normandy/test/browser/browser_Storage.js43
-rw-r--r--toolkit/components/normandy/test/browser/browser_Uptake.js15
-rw-r--r--toolkit/components/normandy/test/browser/browser_about_preferences.js106
-rw-r--r--toolkit/components/normandy/test/browser/browser_about_studies.js825
-rw-r--r--toolkit/components/normandy/test/browser/browser_actions_AddonRollbackAction.js246
-rw-r--r--toolkit/components/normandy/test/browser/browser_actions_AddonRolloutAction.js539
-rw-r--r--toolkit/components/normandy/test/browser/browser_actions_BranchedAddonStudyAction.js1662
-rw-r--r--toolkit/components/normandy/test/browser/browser_actions_ConsoleLogAction.js62
-rw-r--r--toolkit/components/normandy/test/browser/browser_actions_MessagingExperimentAction.js67
-rw-r--r--toolkit/components/normandy/test/browser/browser_actions_PreferenceExperimentAction.js914
-rw-r--r--toolkit/components/normandy/test/browser/browser_actions_PreferenceRollbackAction.js355
-rw-r--r--toolkit/components/normandy/test/browser/browser_actions_PreferenceRolloutAction.js725
-rw-r--r--toolkit/components/normandy/test/browser/browser_actions_ShowHeartbeatAction.js377
-rw-r--r--toolkit/components/normandy/test/browser/head.js642
-rw-r--r--toolkit/components/normandy/test/browser/moz.build27
-rw-r--r--toolkit/components/normandy/test/create_xpi.py12
-rw-r--r--toolkit/components/normandy/test/unit/cookie_server.sjs12
-rw-r--r--toolkit/components/normandy/test/unit/echo_server.sjs21
-rw-r--r--toolkit/components/normandy/test/unit/head_xpc.js5
-rw-r--r--toolkit/components/normandy/test/unit/invalid_recipe_signature_api/api/v1/index.json4
-rw-r--r--toolkit/components/normandy/test/unit/invalid_recipe_signature_api/api/v1/recipe/signed/index.json24
-rw-r--r--toolkit/components/normandy/test/unit/invalid_recipe_signature_api/normandy.content-signature.mozilla.org-20210705.dev.chain123
-rw-r--r--toolkit/components/normandy/test/unit/mock_api/api/v1/classify_client/index.json4
-rw-r--r--toolkit/components/normandy/test/unit/mock_api/api/v1/extension/1/index.json9
-rw-r--r--toolkit/components/normandy/test/unit/mock_api/api/v1/extension/index.json0
-rw-r--r--toolkit/components/normandy/test/unit/mock_api/api/v1/index.json5
-rw-r--r--toolkit/components/normandy/test/unit/mock_api/api/v1/recipe/signed/index.json24
-rw-r--r--toolkit/components/normandy/test/unit/mock_api/normandy.content-signature.mozilla.org-20210705.dev.chain123
-rw-r--r--toolkit/components/normandy/test/unit/query_server.sjs34
-rw-r--r--toolkit/components/normandy/test/unit/test_Normandy.js95
-rw-r--r--toolkit/components/normandy/test/unit/test_NormandyApi.js257
-rw-r--r--toolkit/components/normandy/test/unit/test_PrefUtils.js223
-rw-r--r--toolkit/components/normandy/test/unit/test_RecipeRunner.js34
-rw-r--r--toolkit/components/normandy/test/unit/test_addon_unenroll.js310
-rw-r--r--toolkit/components/normandy/test/unit/utils.js135
-rw-r--r--toolkit/components/normandy/test/unit/xpcshell.ini17
60 files changed, 14336 insertions, 0 deletions
diff --git a/toolkit/components/normandy/test/.eslintrc.js b/toolkit/components/normandy/test/.eslintrc.js
new file mode 100644
index 0000000000..0ee96759d4
--- /dev/null
+++ b/toolkit/components/normandy/test/.eslintrc.js
@@ -0,0 +1,7 @@
+"use strict";
+
+module.exports = {
+ rules: {
+ "require-yield": 0,
+ },
+};
diff --git a/toolkit/components/normandy/test/NormandyTestUtils.sys.mjs b/toolkit/components/normandy/test/NormandyTestUtils.sys.mjs
new file mode 100644
index 0000000000..0a635a4eae
--- /dev/null
+++ b/toolkit/components/normandy/test/NormandyTestUtils.sys.mjs
@@ -0,0 +1,349 @@
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+import { Preferences } from "resource://gre/modules/Preferences.sys.mjs";
+import { AddonStudies } from "resource://normandy/lib/AddonStudies.sys.mjs";
+import { NormandyUtils } from "resource://normandy/lib/NormandyUtils.sys.mjs";
+import { RecipeRunner } from "resource://normandy/lib/RecipeRunner.sys.mjs";
+import { sinon } from "resource://testing-common/Sinon.sys.mjs";
+
+const FIXTURE_ADDON_ID = "normandydriver-a@example.com";
+const UUID_REGEX =
+ /[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}/i;
+
+// Factory IDs
+let _addonStudyFactoryId = 0;
+let _preferenceStudyFactoryId = 0;
+let _preferenceRolloutFactoryId = 0;
+
+let testGlobals = {};
+
+const preferenceBranches = {
+ user: Preferences,
+ default: new Preferences({ defaultBranch: true }),
+};
+
+export const NormandyTestUtils = {
+ init({ add_task, Assert } = {}) {
+ testGlobals.add_task = add_task;
+ testGlobals.Assert = Assert;
+ },
+
+ factories: {
+ addonStudyFactory(attrs = {}) {
+ for (const key of ["name", "description"]) {
+ if (attrs && attrs[key]) {
+ throw new Error(
+ `${key} is no longer a valid key for addon studies, please update to v2 study schema`
+ );
+ }
+ }
+
+ // Generate a slug from userFacingName
+ let recipeId = _addonStudyFactoryId++;
+ let { userFacingName = `Test study ${recipeId}`, slug } = attrs;
+ delete attrs.slug;
+ if (userFacingName && !slug) {
+ slug = userFacingName.replace(" ", "-").toLowerCase();
+ }
+
+ return Object.assign(
+ {
+ recipeId,
+ slug,
+ userFacingName: "Test study",
+ userFacingDescription: "test description",
+ branch: AddonStudies.NO_BRANCHES_MARKER,
+ active: true,
+ addonId: FIXTURE_ADDON_ID,
+ addonUrl: "http://test/addon.xpi",
+ addonVersion: "1.0.0",
+ studyStartDate: new Date(),
+ studyEndDate: null,
+ extensionApiId: 1,
+ extensionHash:
+ "ade1c14196ec4fe0aa0a6ba40ac433d7c8d1ec985581a8a94d43dc58991b5171",
+ extensionHashAlgorithm: "sha256",
+ enrollmentId: NormandyUtils.generateUuid(),
+ temporaryErrorDeadline: null,
+ },
+ attrs
+ );
+ },
+
+ branchedAddonStudyFactory(attrs = {}) {
+ return NormandyTestUtils.factories.addonStudyFactory(
+ Object.assign(
+ {
+ branch: "a",
+ },
+ attrs
+ )
+ );
+ },
+
+ preferenceStudyFactory(attrs = {}) {
+ const defaultPref = {
+ "test.study": {},
+ };
+ const defaultPrefInfo = {
+ preferenceValue: false,
+ preferenceType: "boolean",
+ previousPreferenceValue: null,
+ preferenceBranchType: "default",
+ overridden: false,
+ };
+ const preferences = {};
+ for (const [prefName, prefInfo] of Object.entries(
+ attrs.preferences || defaultPref
+ )) {
+ preferences[prefName] = { ...defaultPrefInfo, ...prefInfo };
+ }
+
+ // Generate a slug from userFacingName
+ let {
+ userFacingName = `Test study ${_preferenceStudyFactoryId++}`,
+ slug,
+ } = attrs;
+ delete attrs.slug;
+ if (userFacingName && !slug) {
+ slug = userFacingName.replace(" ", "-").toLowerCase();
+ }
+
+ return Object.assign(
+ {
+ userFacingName,
+ userFacingDescription: `${userFacingName} description`,
+ slug,
+ branch: "control",
+ expired: false,
+ lastSeen: new Date().toJSON(),
+ experimentType: "exp",
+ enrollmentId: NormandyUtils.generateUuid(),
+ actionName: "PreferenceExperimentAction",
+ },
+ attrs,
+ {
+ preferences,
+ }
+ );
+ },
+
+ preferenceRolloutFactory(attrs = {}) {
+ const defaultPrefInfo = {
+ preferenceName: "test.rollout.{}",
+ value: true,
+ previousValue: false,
+ };
+ const preferences = (attrs.preferences ?? [{}]).map((override, idx) => ({
+ ...defaultPrefInfo,
+ preferenceName: defaultPrefInfo.preferenceName.replace(
+ "{}",
+ (idx + 1).toString()
+ ),
+ ...override,
+ }));
+
+ return Object.assign(
+ {
+ slug: `test-rollout-${_preferenceRolloutFactoryId++}`,
+ state: "active",
+ enrollmentId: NormandyUtils.generateUuid(),
+ },
+ attrs,
+ {
+ preferences,
+ }
+ );
+ },
+ },
+
+ /**
+ * Combine a list of functions right to left. The rightmost function is passed
+ * to the preceding function as the argument; the result of this is passed to
+ * the next function until all are exhausted. For example, this:
+ *
+ * decorate(func1, func2, func3);
+ *
+ * is equivalent to this:
+ *
+ * func1(func2(func3));
+ */
+ decorate(...args) {
+ const funcs = Array.from(args);
+ let decorated = funcs.pop();
+ const origName = decorated.name;
+ funcs.reverse();
+ for (const func of funcs) {
+ decorated = func(decorated);
+ }
+ Object.defineProperty(decorated, "name", { value: origName });
+ return decorated;
+ },
+
+ /**
+ * Wrapper around add_task for declaring tests that use several with-style
+ * wrappers. The last argument should be your test function; all other arguments
+ * should be functions that accept a single test function argument.
+ *
+ * The arguments are combined using decorate and passed to add_task as a single
+ * test function.
+ *
+ * @param {[Function]} args
+ * @example
+ * decorate_task(
+ * withMockPreferences(),
+ * withMockNormandyApi(),
+ * async function myTest(mockPreferences, mockApi) {
+ * // Do a test
+ * }
+ * );
+ */
+ decorate_task(...args) {
+ return testGlobals.add_task(NormandyTestUtils.decorate(...args));
+ },
+
+ isUuid(s) {
+ return UUID_REGEX.test(s);
+ },
+
+ withMockRecipeCollection(recipes = []) {
+ return function wrapper(testFunc) {
+ return async function inner(args) {
+ let recipeIds = new Set();
+ for (const recipe of recipes) {
+ if (!recipe.id || recipeIds.has(recipe.id)) {
+ throw new Error(
+ "To use withMockRecipeCollection each recipe must have a unique ID"
+ );
+ }
+ recipeIds.add(recipe.id);
+ }
+
+ let db = await RecipeRunner._remoteSettingsClientForTesting.db;
+ await db.clear();
+ const fakeSig = { signature: "abc" };
+
+ for (const recipe of recipes) {
+ await db.create({
+ id: `recipe-${recipe.id}`,
+ recipe,
+ signature: fakeSig,
+ });
+ }
+
+ // last modified needs to be some positive integer
+ let lastModified = await db.getLastModified();
+ await db.importChanges({}, lastModified + 1);
+
+ const mockRecipeCollection = {
+ async addRecipes(newRecipes) {
+ for (const recipe of newRecipes) {
+ if (!recipe.id || recipeIds.has(recipe)) {
+ throw new Error(
+ "To use withMockRecipeCollection each recipe must have a unique ID"
+ );
+ }
+ }
+ db = await RecipeRunner._remoteSettingsClientForTesting.db;
+ for (const recipe of newRecipes) {
+ recipeIds.add(recipe.id);
+ await db.create({
+ id: `recipe-${recipe.id}`,
+ recipe,
+ signature: fakeSig,
+ });
+ }
+ lastModified = (await db.getLastModified()) || 0;
+ await db.importChanges({}, lastModified + 1);
+ },
+ };
+
+ try {
+ await testFunc({ ...args, mockRecipeCollection });
+ } finally {
+ db = await RecipeRunner._remoteSettingsClientForTesting.db;
+ await db.clear();
+ lastModified = await db.getLastModified();
+ await db.importChanges({}, lastModified + 1);
+ }
+ };
+ };
+ },
+
+ MockPreferences: class {
+ constructor() {
+ this.oldValues = { user: {}, default: {} };
+ }
+
+ set(name, value, branch = "user") {
+ this.preserve(name, branch);
+ preferenceBranches[branch].set(name, value);
+ }
+
+ preserve(name, branch) {
+ if (!(name in this.oldValues[branch])) {
+ this.oldValues[branch][name] = preferenceBranches[branch].get(
+ name,
+ undefined
+ );
+ }
+ }
+
+ cleanup() {
+ for (const [branchName, values] of Object.entries(this.oldValues)) {
+ const preferenceBranch = preferenceBranches[branchName];
+ for (const [name, value] of Object.entries(values)) {
+ if (value !== undefined) {
+ preferenceBranch.set(name, value);
+ } else {
+ preferenceBranch.reset(name);
+ }
+ }
+ }
+ }
+ },
+
+ withMockPreferences() {
+ return function (testFunction) {
+ return async function inner(args) {
+ const mockPreferences = new NormandyTestUtils.MockPreferences();
+ try {
+ await testFunction({ ...args, mockPreferences });
+ } finally {
+ mockPreferences.cleanup();
+ }
+ };
+ };
+ },
+
+ withStub(object, method, { returnValue, as = `${method}Stub` } = {}) {
+ return function wrapper(testFunction) {
+ return async function wrappedTestFunction(args) {
+ const stub = sinon.stub(object, method);
+ if (returnValue) {
+ stub.returns(returnValue);
+ }
+ try {
+ await testFunction({ ...args, [as]: stub });
+ } finally {
+ stub.restore();
+ }
+ };
+ };
+ },
+
+ withSpy(object, method, { as = `${method}Spy` } = {}) {
+ return function wrapper(testFunction) {
+ return async function wrappedTestFunction(args) {
+ const spy = sinon.spy(object, method);
+ try {
+ await testFunction({ ...args, [as]: spy });
+ } finally {
+ spy.restore();
+ }
+ };
+ };
+ },
+};
diff --git a/toolkit/components/normandy/test/browser/action_server.sjs b/toolkit/components/normandy/test/browser/action_server.sjs
new file mode 100644
index 0000000000..5d6a3e6bb0
--- /dev/null
+++ b/toolkit/components/normandy/test/browser/action_server.sjs
@@ -0,0 +1,10 @@
+// Returns JS for an action, regardless of the URL.
+function handleRequest(request, response) {
+ // Allow cross-origin, so you can XHR to it!
+ response.setHeader("Access-Control-Allow-Origin", "*", false);
+ // Avoid confusing cache behaviors
+ response.setHeader("Cache-Control", "no-cache", false);
+
+ // Write response body
+ response.write('registerAsyncCallback("action", async () => {});');
+}
diff --git a/toolkit/components/normandy/test/browser/addons/normandydriver-a-1.0/manifest.json b/toolkit/components/normandy/test/browser/addons/normandydriver-a-1.0/manifest.json
new file mode 100644
index 0000000000..fca9426a3f
--- /dev/null
+++ b/toolkit/components/normandy/test/browser/addons/normandydriver-a-1.0/manifest.json
@@ -0,0 +1,11 @@
+{
+ "manifest_version": 2,
+ "name": "normandy_fixture_a",
+ "version": "1.0",
+ "description": "Dummy test fixture that's a webextension, branch A",
+ "browser_specific_settings": {
+ "gecko": {
+ "id": "normandydriver-a@example.com"
+ }
+ }
+}
diff --git a/toolkit/components/normandy/test/browser/addons/normandydriver-a-2.0/manifest.json b/toolkit/components/normandy/test/browser/addons/normandydriver-a-2.0/manifest.json
new file mode 100644
index 0000000000..40f7351425
--- /dev/null
+++ b/toolkit/components/normandy/test/browser/addons/normandydriver-a-2.0/manifest.json
@@ -0,0 +1,11 @@
+{
+ "manifest_version": 2,
+ "name": "normandy_fixture_a",
+ "version": "2.0",
+ "description": "Dummy test fixture that's a webextension, branch A",
+ "browser_specific_settings": {
+ "gecko": {
+ "id": "normandydriver-a@example.com"
+ }
+ }
+}
diff --git a/toolkit/components/normandy/test/browser/addons/normandydriver-b-1.0/manifest.json b/toolkit/components/normandy/test/browser/addons/normandydriver-b-1.0/manifest.json
new file mode 100644
index 0000000000..044ae7ebc3
--- /dev/null
+++ b/toolkit/components/normandy/test/browser/addons/normandydriver-b-1.0/manifest.json
@@ -0,0 +1,11 @@
+{
+ "manifest_version": 2,
+ "name": "normandy_fixture_b",
+ "version": "1.0",
+ "description": "Dummy test fixture that's a webextension, branch B",
+ "browser_specific_settings": {
+ "gecko": {
+ "id": "normandydriver-b@example.com"
+ }
+ }
+}
diff --git a/toolkit/components/normandy/test/browser/browser.ini b/toolkit/components/normandy/test/browser/browser.ini
new file mode 100644
index 0000000000..9236d38c1a
--- /dev/null
+++ b/toolkit/components/normandy/test/browser/browser.ini
@@ -0,0 +1,49 @@
+[DEFAULT]
+tags = addons
+support-files =
+ action_server.sjs
+ addons/normandydriver-a-1.0.xpi
+ addons/normandydriver-b-1.0.xpi
+ addons/normandydriver-a-2.0.xpi
+generated-files =
+ addons/normandydriver-a-1.0.xpi
+ addons/normandydriver-b-1.0.xpi
+ addons/normandydriver-a-2.0.xpi
+head = head.js
+[browser_ActionsManager.js]
+[browser_AddonRollouts.js]
+[browser_AddonStudies.js]
+skip-if = (verify && (os == 'linux'))
+[browser_BaseAction.js]
+[browser_CleanupManager.js]
+[browser_ClientEnvironment.js]
+[browser_EventEmitter.js]
+[browser_Heartbeat.js]
+skip-if =
+ os == "win" && os_version == "6.1" # Skip on Azure - frequent failure
+[browser_LegacyHeartbeat.js]
+[browser_LogManager.js]
+[browser_Normandy.js]
+[browser_NormandyAddonManager.js]
+[browser_NormandyMigrations.js]
+[browser_PreferenceExperiments.js]
+[browser_PreferenceRollouts.js]
+[browser_RecipeRunner.js]
+tags = remote-settings
+[browser_ShieldPreferences.js]
+[browser_Storage.js]
+[browser_Uptake.js]
+[browser_about_preferences.js]
+[browser_about_studies.js]
+https_first_disabled = true
+[browser_actions_AddonRollbackAction.js]
+[browser_actions_AddonRolloutAction.js]
+[browser_actions_BranchedAddonStudyAction.js]
+[browser_actions_ConsoleLogAction.js]
+[browser_actions_MessagingExperimentAction.js]
+[browser_actions_PreferenceExperimentAction.js]
+skip-if =
+ os == "win" && os_version == "6.1" # Skip on Azure - frequent failure
+[browser_actions_PreferenceRollbackAction.js]
+[browser_actions_PreferenceRolloutAction.js]
+[browser_actions_ShowHeartbeatAction.js]
diff --git a/toolkit/components/normandy/test/browser/browser_ActionsManager.js b/toolkit/components/normandy/test/browser/browser_ActionsManager.js
new file mode 100644
index 0000000000..8b5772fa26
--- /dev/null
+++ b/toolkit/components/normandy/test/browser/browser_ActionsManager.js
@@ -0,0 +1,68 @@
+"use strict";
+
+const { BaseAction } = ChromeUtils.importESModule(
+ "resource://normandy/actions/BaseAction.sys.mjs"
+);
+const { ActionsManager } = ChromeUtils.importESModule(
+ "resource://normandy/lib/ActionsManager.sys.mjs"
+);
+const { Uptake } = ChromeUtils.importESModule(
+ "resource://normandy/lib/Uptake.sys.mjs"
+);
+const { ActionSchemas } = ChromeUtils.importESModule(
+ "resource://normandy/actions/schemas/index.sys.mjs"
+);
+
+// Test life cycle methods for actions
+decorate_task(async function (reportActionStub, Stub) {
+ let manager = new ActionsManager();
+ const recipe = { id: 1, action: "test-local-action-used" };
+
+ let actionUsed = {
+ processRecipe: sinon.stub(),
+ finalize: sinon.stub(),
+ };
+ let actionUnused = {
+ processRecipe: sinon.stub(),
+ finalize: sinon.stub(),
+ };
+ manager.localActions = {
+ "test-local-action-used": actionUsed,
+ "test-local-action-unused": actionUnused,
+ };
+
+ await manager.processRecipe(recipe, BaseAction.suitability.FILTER_MATCH);
+ await manager.finalize();
+
+ Assert.deepEqual(
+ actionUsed.processRecipe.args,
+ [[recipe, BaseAction.suitability.FILTER_MATCH]],
+ "used action should be called with the recipe"
+ );
+ ok(
+ actionUsed.finalize.calledOnce,
+ "finalize should be called on used action"
+ );
+ Assert.deepEqual(
+ actionUnused.processRecipe.args,
+ [],
+ "unused action should not be called with the recipe"
+ );
+ ok(
+ actionUnused.finalize.calledOnce,
+ "finalize should be called on the unused action"
+ );
+});
+
+decorate_task(async function () {
+ for (const [name, Constructor] of Object.entries(
+ ActionsManager.actionConstructors
+ )) {
+ const action = new Constructor();
+ Assert.deepEqual(
+ ActionSchemas[name],
+ action.schema,
+ "action name should map to a schema"
+ );
+ }
+});
diff --git a/toolkit/components/normandy/test/browser/browser_AddonRollouts.js b/toolkit/components/normandy/test/browser/browser_AddonRollouts.js
new file mode 100644
index 0000000000..0826907b68
--- /dev/null
+++ b/toolkit/components/normandy/test/browser/browser_AddonRollouts.js
@@ -0,0 +1,141 @@
+"use strict";
+
+const { IndexedDB } = ChromeUtils.importESModule(
+ "resource://gre/modules/IndexedDB.sys.mjs"
+);
+
+const { AddonRollouts } = ChromeUtils.importESModule(
+ "resource://normandy/lib/AddonRollouts.sys.mjs"
+);
+
+decorate_task(AddonRollouts.withTestMock(), async function testGetMissing() {
+ ok(
+ !(await AddonRollouts.get("does-not-exist")),
+ "get should return null when the requested rollout does not exist"
+ );
+});
+
+decorate_task(
+ AddonRollouts.withTestMock(),
+ async function testAddUpdateAndGet() {
+ const rollout = {
+ slug: "test-rollout",
+ state: AddonRollouts.STATE_ACTIVE,
+ extension: {},
+ };
+ await AddonRollouts.add(rollout);
+ let storedRollout = await AddonRollouts.get(rollout.slug);
+ Assert.deepEqual(
+ rollout,
+ storedRollout,
+ "get should retrieve a rollout from storage."
+ );
+
+ rollout.state = AddonRollouts.STATE_ROLLED_BACK;
+ await AddonRollouts.update(rollout);
+ storedRollout = await AddonRollouts.get(rollout.slug);
+ Assert.deepEqual(
+ rollout,
+ storedRollout,
+ "get should retrieve a rollout from storage."
+ );
+ }
+);
+
+decorate_task(
+ AddonRollouts.withTestMock(),
+ async function testCantUpdateNonexistent() {
+ const rollout = {
+ slug: "test-rollout",
+ state: AddonRollouts.STATE_ACTIVE,
+ extensions: {},
+ };
+ await Assert.rejects(
+ AddonRollouts.update(rollout),
+ /doesn't already exist/,
+ "Update should fail if the rollout doesn't exist"
+ );
+ ok(
+ !(await AddonRollouts.has("test-rollout")),
+ "rollout should not have been added"
+ );
+ }
+);
+
+decorate_task(AddonRollouts.withTestMock(), async function testGetAll() {
+ const rollout1 = { slug: "test-rollout-1", extension: {} };
+ const rollout2 = { slug: "test-rollout-2", extension: {} };
+ await AddonRollouts.add(rollout1);
+ await AddonRollouts.add(rollout2);
+
+ const storedRollouts = await AddonRollouts.getAll();
+ Assert.deepEqual(
+ storedRollouts.sort((a, b) => a.id - b.id),
+ [rollout1, rollout2],
+ "getAll should return every stored rollout."
+ );
+});
+
+decorate_task(AddonRollouts.withTestMock(), async function testGetAllActive() {
+ const rollout1 = {
+ slug: "test-rollout-1",
+ state: AddonRollouts.STATE_ACTIVE,
+ };
+ const rollout3 = {
+ slug: "test-rollout-2",
+ state: AddonRollouts.STATE_ROLLED_BACK,
+ };
+ await AddonRollouts.add(rollout1);
+ await AddonRollouts.add(rollout3);
+
+ const activeRollouts = await AddonRollouts.getAllActive();
+ Assert.deepEqual(
+ activeRollouts,
+ [rollout1],
+ "getAllActive should return only active rollouts"
+ );
+});
+
+decorate_task(AddonRollouts.withTestMock(), async function testHas() {
+ const rollout = { slug: "test-rollout", extensions: {} };
+ await AddonRollouts.add(rollout);
+ ok(
+ await AddonRollouts.has(rollout.slug),
+ "has should return true for an existing rollout"
+ );
+ ok(
+ !(await AddonRollouts.has("does not exist")),
+ "has should return false for a missing rollout"
+ );
+});
+
+// init should mark active rollouts in telemetry
+decorate_task(
+ AddonRollouts.withTestMock(),
+ withStub(TelemetryEnvironment, "setExperimentActive"),
+ async function testInitTelemetry({ setExperimentActiveStub }) {
+ await AddonRollouts.add({
+ slug: "test-rollout-active-1",
+ state: AddonRollouts.STATE_ACTIVE,
+ });
+ await AddonRollouts.add({
+ slug: "test-rollout-active-2",
+ state: AddonRollouts.STATE_ACTIVE,
+ });
+ await AddonRollouts.add({
+ slug: "test-rollout-rolled-back",
+ state: AddonRollouts.STATE_ROLLED_BACK,
+ });
+
+ await AddonRollouts.init();
+
+ Assert.deepEqual(
+ setExperimentActiveStub.args,
+ [
+ ["test-rollout-active-1", "active", { type: "normandy-addonrollout" }],
+ ["test-rollout-active-2", "active", { type: "normandy-addonrollout" }],
+ ],
+ "init should set activate a telemetry experiment for active addons"
+ );
+ }
+);
diff --git a/toolkit/components/normandy/test/browser/browser_AddonStudies.js b/toolkit/components/normandy/test/browser/browser_AddonStudies.js
new file mode 100644
index 0000000000..44417fef89
--- /dev/null
+++ b/toolkit/components/normandy/test/browser/browser_AddonStudies.js
@@ -0,0 +1,300 @@
+"use strict";
+
+const { IndexedDB } = ChromeUtils.importESModule(
+ "resource://gre/modules/IndexedDB.sys.mjs"
+);
+
+const { NormandyTestUtils } = ChromeUtils.importESModule(
+ "resource://testing-common/NormandyTestUtils.sys.mjs"
+);
+const { addonStudyFactory, branchedAddonStudyFactory } =
+ NormandyTestUtils.factories;
+
+// Initialize test utils
+AddonTestUtils.initMochitest(this);
+
+decorate_task(AddonStudies.withStudies(), async function testGetMissing() {
+ ok(
+ !(await AddonStudies.get("does-not-exist")),
+ "get returns null when the requested study does not exist"
+ );
+});
+
+decorate_task(
+ AddonStudies.withStudies([addonStudyFactory({ slug: "test-study" })]),
+ async function testGet({ addonStudies: [study] }) {
+ const storedStudy = await AddonStudies.get(study.recipeId);
+ Assert.deepEqual(study, storedStudy, "get retrieved a study from storage.");
+ }
+);
+
+decorate_task(
+ AddonStudies.withStudies([addonStudyFactory(), addonStudyFactory()]),
+ async function testGetAll({ addonStudies }) {
+ const storedStudies = await AddonStudies.getAll();
+ Assert.deepEqual(
+ new Set(storedStudies),
+ new Set(addonStudies),
+ "getAll returns every stored study."
+ );
+ }
+);
+
+decorate_task(
+ AddonStudies.withStudies([addonStudyFactory({ slug: "test-study" })]),
+ async function testHas({ addonStudies: [study] }) {
+ let hasStudy = await AddonStudies.has(study.recipeId);
+ ok(hasStudy, "has returns true for a study that exists in storage.");
+
+ hasStudy = await AddonStudies.has("does-not-exist");
+ ok(
+ !hasStudy,
+ "has returns false for a study that doesn't exist in storage."
+ );
+ }
+);
+
+decorate_task(
+ AddonStudies.withStudies([
+ addonStudyFactory({ slug: "test-study1" }),
+ addonStudyFactory({ slug: "test-study2" }),
+ ]),
+ async function testClear({ addonStudies: [study1, study2] }) {
+ const hasAll =
+ (await AddonStudies.has(study1.recipeId)) &&
+ (await AddonStudies.has(study2.recipeId));
+ ok(hasAll, "Before calling clear, both studies are in storage.");
+
+ await AddonStudies.clear();
+ const hasAny =
+ (await AddonStudies.has(study1.recipeId)) ||
+ (await AddonStudies.has(study2.recipeId));
+ ok(!hasAny, "After calling clear, all studies are removed from storage.");
+ }
+);
+
+decorate_task(
+ AddonStudies.withStudies([addonStudyFactory({ slug: "foo" })]),
+ async function testUpdate({ addonStudies: [study] }) {
+ Assert.deepEqual(await AddonStudies.get(study.recipeId), study);
+
+ const updatedStudy = {
+ ...study,
+ slug: "bar",
+ };
+ await AddonStudies.update(updatedStudy);
+
+ Assert.deepEqual(await AddonStudies.get(study.recipeId), updatedStudy);
+ }
+);
+
+decorate_task(
+ AddonStudies.withStudies([
+ addonStudyFactory({
+ active: true,
+ addonId: "does.not.exist@example.com",
+ studyEndDate: null,
+ }),
+ addonStudyFactory({ active: true, addonId: "installed@example.com" }),
+ addonStudyFactory({
+ active: false,
+ addonId: "already.gone@example.com",
+ studyEndDate: new Date(2012, 1),
+ }),
+ ]),
+ withSendEventSpy(),
+ withInstalledWebExtension(
+ { id: "installed@example.com" },
+ { expectUninstall: true }
+ ),
+ async function testInit({
+ addonStudies: [activeUninstalledStudy, activeInstalledStudy, inactiveStudy],
+ sendEventSpy,
+ installedWebExtension: { addonId },
+ }) {
+ await AddonStudies.init();
+
+ const newActiveStudy = await AddonStudies.get(
+ activeUninstalledStudy.recipeId
+ );
+ ok(
+ !newActiveStudy.active,
+ "init marks studies as inactive if their add-on is not installed."
+ );
+ ok(
+ newActiveStudy.studyEndDate,
+ "init sets the study end date if a study's add-on is not installed."
+ );
+ let events = Services.telemetry.snapshotEvents(
+ Ci.nsITelemetry.DATASET_PRERELEASE_CHANNELS,
+ false
+ );
+ events = (events.parent || []).filter(e => e[1] == "normandy");
+ Assert.deepEqual(
+ events[0].slice(2), // strip timestamp and "normandy"
+ [
+ "unenroll",
+ "addon_study",
+ activeUninstalledStudy.slug,
+ {
+ addonId: activeUninstalledStudy.addonId,
+ addonVersion: activeUninstalledStudy.addonVersion,
+ reason: "uninstalled-sideload",
+ branch: AddonStudies.NO_BRANCHES_MARKER,
+ enrollmentId: events[0][5].enrollmentId,
+ },
+ ],
+ "AddonStudies.init() should send the correct telemetry event"
+ );
+ ok(
+ NormandyTestUtils.isUuid(events[0][5].enrollmentId),
+ "enrollment ID should be a UUID"
+ );
+
+ const newInactiveStudy = await AddonStudies.get(inactiveStudy.recipeId);
+ is(
+ newInactiveStudy.studyEndDate.getFullYear(),
+ 2012,
+ "init does not modify inactive studies."
+ );
+
+ const newActiveInstalledStudy = await AddonStudies.get(
+ activeInstalledStudy.recipeId
+ );
+ Assert.deepEqual(
+ activeInstalledStudy,
+ newActiveInstalledStudy,
+ "init does not modify studies whose add-on is still installed."
+ );
+
+ // Only activeUninstalledStudy should have generated any events
+ ok(sendEventSpy.calledOnce, "no extra events should be generated");
+
+ // Clean up
+ const addon = await AddonManager.getAddonByID(addonId);
+ await addon.uninstall();
+ await TestUtils.topicObserved("shield-study-ended", (subject, message) => {
+ return message === `${activeInstalledStudy.recipeId}`;
+ });
+ }
+);
+
+// init should register telemetry experiments
+decorate_task(
+ AddonStudies.withStudies([
+ branchedAddonStudyFactory({
+ active: true,
+ addonId: "installed1@example.com",
+ }),
+ branchedAddonStudyFactory({
+ active: true,
+ addonId: "installed2@example.com",
+ }),
+ ]),
+ withInstalledWebExtensionSafe({ id: "installed1@example.com" }),
+ withInstalledWebExtension({ id: "installed2@example.com" }),
+ withStub(TelemetryEnvironment, "setExperimentActive"),
+ async function testInit({ addonStudies, setExperimentActiveStub }) {
+ await AddonStudies.init();
+ Assert.deepEqual(
+ setExperimentActiveStub.args,
+ [
+ [
+ addonStudies[0].slug,
+ addonStudies[0].branch,
+ {
+ type: "normandy-addonstudy",
+ enrollmentId: addonStudies[0].enrollmentId,
+ },
+ ],
+ [
+ addonStudies[1].slug,
+ addonStudies[1].branch,
+ {
+ type: "normandy-addonstudy",
+ enrollmentId: addonStudies[1].enrollmentId,
+ },
+ ],
+ ],
+ "Add-on studies are registered in Telemetry by AddonStudies.init"
+ );
+ }
+);
+
+// Test that AddonStudies.init() ends studies that have been uninstalled
+decorate_task(
+ AddonStudies.withStudies([
+ addonStudyFactory({
+ active: true,
+ addonId: "installed@example.com",
+ studyEndDate: null,
+ }),
+ ]),
+ withInstalledWebExtension(
+ { id: "installed@example.com" },
+ { expectUninstall: true }
+ ),
+ async function testInit({
+ addonStudies: [study],
+ installedWebExtension: { addonId },
+ }) {
+ const addon = await AddonManager.getAddonByID(addonId);
+ await addon.uninstall();
+ await TestUtils.topicObserved("shield-study-ended", (subject, message) => {
+ return message === `${study.recipeId}`;
+ });
+
+ const newStudy = await AddonStudies.get(study.recipeId);
+ ok(
+ !newStudy.active,
+ "Studies are marked as inactive when their add-on is uninstalled."
+ );
+ ok(
+ newStudy.studyEndDate,
+ "The study end date is set when the add-on for the study is uninstalled."
+ );
+ }
+);
+
+decorate_task(
+ AddonStudies.withStudies([
+ NormandyTestUtils.factories.addonStudyFactory({ active: true }),
+ NormandyTestUtils.factories.branchedAddonStudyFactory(),
+ ]),
+ async function testRemoveOldAddonStudies({
+ addonStudies: [noBranchStudy, branchedStudy],
+ }) {
+ // pre check, both studies are active
+ const preActiveIds = (await AddonStudies.getAllActive()).map(
+ addon => addon.recipeId
+ );
+ Assert.deepEqual(
+ preActiveIds,
+ [noBranchStudy.recipeId, branchedStudy.recipeId],
+ "Both studies should be active"
+ );
+
+ // run the migration
+ await AddonStudies.migrations.migration02RemoveOldAddonStudyAction();
+
+ // The unbrached study should end
+ const postActiveIds = (await AddonStudies.getAllActive()).map(
+ addon => addon.recipeId
+ );
+ Assert.deepEqual(
+ postActiveIds,
+ [branchedStudy.recipeId],
+ "The unbranched study should end"
+ );
+
+ // But both studies should still be present
+ const postAllIds = (await AddonStudies.getAll()).map(
+ addon => addon.recipeId
+ );
+ Assert.deepEqual(
+ postAllIds,
+ [noBranchStudy.recipeId, branchedStudy.recipeId],
+ "Both studies should still be present"
+ );
+ }
+);
diff --git a/toolkit/components/normandy/test/browser/browser_BaseAction.js b/toolkit/components/normandy/test/browser/browser_BaseAction.js
new file mode 100644
index 0000000000..0de9ce2405
--- /dev/null
+++ b/toolkit/components/normandy/test/browser/browser_BaseAction.js
@@ -0,0 +1,349 @@
+"use strict";
+
+const { BaseAction } = ChromeUtils.importESModule(
+ "resource://normandy/actions/BaseAction.sys.mjs"
+);
+const { Uptake } = ChromeUtils.importESModule(
+ "resource://normandy/lib/Uptake.sys.mjs"
+);
+
+class NoopAction extends BaseAction {
+ constructor() {
+ super();
+ this._testPreExecutionFlag = false;
+ this._testRunFlag = false;
+ this._testFinalizeFlag = false;
+ }
+
+ _preExecution() {
+ this._testPreExecutionFlag = true;
+ }
+
+ _run(recipe) {
+ this._testRunFlag = true;
+ }
+
+ _finalize() {
+ this._testFinalizeFlag = true;
+ }
+}
+
+NoopAction._errorToThrow = new Error("test error");
+
+class FailPreExecutionAction extends NoopAction {
+ _preExecution() {
+ throw NoopAction._errorToThrow;
+ }
+}
+
+class FailRunAction extends NoopAction {
+ _run(recipe) {
+ throw NoopAction._errorToThrow;
+ }
+}
+
+class FailFinalizeAction extends NoopAction {
+ _finalize() {
+ throw NoopAction._errorToThrow;
+ }
+}
+
+// Test that constructor and override methods are run
+decorate_task(
+ withStub(Uptake, "reportRecipe"),
+ withStub(Uptake, "reportAction"),
+ async () => {
+ let action = new NoopAction();
+ is(
+ action._testPreExecutionFlag,
+ false,
+ "_preExecution should not have been called on a new action"
+ );
+ is(
+ action._testRunFlag,
+ false,
+ "_run has should not have been called on a new action"
+ );
+ is(
+ action._testFinalizeFlag,
+ false,
+ "_finalize should not be called on a new action"
+ );
+
+ const recipe = recipeFactory();
+ await action.processRecipe(recipe, BaseAction.suitability.FILTER_MATCH);
+ is(
+ action._testPreExecutionFlag,
+ true,
+ "_preExecution should be called when a recipe is executed"
+ );
+ is(
+ action._testRunFlag,
+ true,
+ "_run should be called when a recipe is executed"
+ );
+ is(
+ action._testFinalizeFlag,
+ false,
+ "_finalize should not have been called when a recipe is executed"
+ );
+
+ await action.finalize();
+ is(
+ action._testFinalizeFlag,
+ true,
+ "_finalizeExecution should be called when finalize was called"
+ );
+
+ action = new NoopAction();
+ await action.finalize();
+ is(
+ action._testPreExecutionFlag,
+ true,
+ "_preExecution should be called when finalized even if no recipes"
+ );
+ is(
+ action._testRunFlag,
+ false,
+ "_run should be called if no recipes were run"
+ );
+ is(
+ action._testFinalizeFlag,
+ true,
+ "_finalize should be called when finalized"
+ );
+ }
+);
+
+// Test that per-recipe uptake telemetry is recorded
+decorate_task(
+ withStub(Uptake, "reportRecipe"),
+ async function ({ reportRecipeStub }) {
+ const action = new NoopAction();
+ const recipe = recipeFactory();
+ await action.processRecipe(recipe, BaseAction.suitability.FILTER_MATCH);
+ Assert.deepEqual(
+ reportRecipeStub.args,
+ [[recipe, Uptake.RECIPE_SUCCESS]],
+ "per-recipe uptake telemetry should be reported"
+ );
+ }
+);
+
+// Finalize causes action telemetry to be recorded
+decorate_task(
+ withStub(Uptake, "reportAction"),
+ async function ({ reportActionStub }) {
+ const action = new NoopAction();
+ await action.finalize();
+ ok(
+ action.state == NoopAction.STATE_FINALIZED,
+ "Action should be marked as finalized"
+ );
+ Assert.deepEqual(
+ reportActionStub.args,
+ [[action.name, Uptake.ACTION_SUCCESS]],
+ "action uptake telemetry should be reported"
+ );
+ }
+);
+
+// Recipes can't be run after finalize is called
+decorate_task(
+ withStub(Uptake, "reportRecipe"),
+ async function ({ reportRecipeStub }) {
+ const action = new NoopAction();
+ const recipe1 = recipeFactory();
+ const recipe2 = recipeFactory();
+
+ await action.processRecipe(recipe1, BaseAction.suitability.FILTER_MATCH);
+ await action.finalize();
+
+ await Assert.rejects(
+ action.processRecipe(recipe2, BaseAction.suitability.FILTER_MATCH),
+ /^Error: Action has already been finalized$/,
+ "running recipes after finalization is an error"
+ );
+
+ Assert.deepEqual(
+ reportRecipeStub.args,
+ [[recipe1, Uptake.RECIPE_SUCCESS]],
+ "Only recipes executed prior to finalizer should report uptake telemetry"
+ );
+ }
+);
+
+// Test an action with a failing pre-execution step
+decorate_task(
+ withStub(Uptake, "reportRecipe"),
+ withStub(Uptake, "reportAction"),
+ async function ({ reportRecipeStub, reportActionStub }) {
+ const recipe = recipeFactory();
+ const action = new FailPreExecutionAction();
+ is(
+ action.state,
+ FailPreExecutionAction.STATE_PREPARING,
+ "Pre-execution should not happen immediately"
+ );
+
+ // Should fail, putting the action into a "failed" state, but the entry
+ // point `processRecipe` should not itself throw an exception.
+ await action.processRecipe(recipe, BaseAction.suitability.FILTER_MATCH);
+ is(
+ action.state,
+ FailPreExecutionAction.STATE_FAILED,
+ "Action fails if pre-execution fails"
+ );
+ is(
+ action.lastError,
+ NoopAction._errorToThrow,
+ "The thrown error should be stored in lastError"
+ );
+
+ // Should not throw, even though the action is in a disabled state.
+ await action.finalize();
+ is(
+ action.state,
+ FailPreExecutionAction.STATE_FINALIZED,
+ "Action should be finalized"
+ );
+ is(
+ action.lastError,
+ NoopAction._errorToThrow,
+ "lastError should not have changed"
+ );
+
+ is(action._testRunFlag, false, "_run should not have been called");
+ is(
+ action._testFinalizeFlag,
+ false,
+ "_finalize should not have been called"
+ );
+
+ Assert.deepEqual(
+ reportRecipeStub.args,
+ [[recipe, Uptake.RECIPE_ACTION_DISABLED]],
+ "Recipe should report recipe status as action disabled"
+ );
+
+ Assert.deepEqual(
+ reportActionStub.args,
+ [[action.name, Uptake.ACTION_PRE_EXECUTION_ERROR]],
+ "Action should report pre execution error"
+ );
+ }
+);
+
+// Test an action with a failing recipe step
+decorate_task(
+ withStub(Uptake, "reportRecipe"),
+ withStub(Uptake, "reportAction"),
+ async function ({ reportRecipeStub, reportActionStub }) {
+ const recipe = recipeFactory();
+ const action = new FailRunAction();
+ await action.processRecipe(recipe, BaseAction.suitability.FILTER_MATCH);
+ is(
+ action.state,
+ FailRunAction.STATE_READY,
+ "Action should not be marked as failed due to a recipe failure"
+ );
+ await action.finalize();
+ is(
+ action.state,
+ FailRunAction.STATE_FINALIZED,
+ "Action should be marked as finalized after finalize is called"
+ );
+
+ ok(action._testFinalizeFlag, "_finalize should have been called");
+
+ Assert.deepEqual(
+ reportRecipeStub.args,
+ [[recipe, Uptake.RECIPE_EXECUTION_ERROR]],
+ "Recipe should report recipe execution error"
+ );
+
+ Assert.deepEqual(
+ reportActionStub.args,
+ [[action.name, Uptake.ACTION_SUCCESS]],
+ "Action should report success"
+ );
+ }
+);
+
+// Test an action with a failing finalize step
+decorate_task(
+ withStub(Uptake, "reportRecipe"),
+ withStub(Uptake, "reportAction"),
+ async function ({ reportRecipeStub, reportActionStub }) {
+ const recipe = recipeFactory();
+ const action = new FailFinalizeAction();
+ await action.processRecipe(recipe, BaseAction.suitability.FILTER_MATCH);
+ await action.finalize();
+
+ Assert.deepEqual(
+ reportRecipeStub.args,
+ [[recipe, Uptake.RECIPE_SUCCESS]],
+ "Recipe should report success"
+ );
+
+ Assert.deepEqual(
+ reportActionStub.args,
+ [[action.name, Uptake.ACTION_POST_EXECUTION_ERROR]],
+ "Action should report post execution error"
+ );
+ }
+);
+
+// Disable disables an action
+decorate_task(
+ withStub(Uptake, "reportRecipe"),
+ withStub(Uptake, "reportAction"),
+ async function ({ reportRecipeStub, reportActionStub }) {
+ const recipe = recipeFactory();
+ const action = new NoopAction();
+
+ action.disable();
+ is(
+ action.state,
+ NoopAction.STATE_DISABLED,
+ "Action should be marked as disabled"
+ );
+
+ // Should not throw, even though the action is disabled
+ await action.processRecipe(recipe, BaseAction.suitability.FILTER_MATCH);
+
+ // Should not throw, even though the action is disabled
+ await action.finalize();
+
+ is(action._testRunFlag, false, "_run should not have been called");
+ is(
+ action._testFinalizeFlag,
+ false,
+ "_finalize should not have been called"
+ );
+
+ Assert.deepEqual(
+ reportActionStub.args,
+ [[action.name, Uptake.ACTION_SUCCESS]],
+ "Action should not report pre execution error"
+ );
+
+ Assert.deepEqual(
+ reportRecipeStub.args,
+ [[recipe, Uptake.RECIPE_ACTION_DISABLED]],
+ "Recipe should report recipe status as action disabled"
+ );
+ }
+);
+
+// If the capabilities don't match, processRecipe shouldn't validate the arguments
+decorate_task(async function () {
+ const recipe = recipeFactory();
+ const action = new NoopAction();
+ const verifySpy = sinon.spy(action, "validateArguments");
+ await action.processRecipe(
+ recipe,
+ BaseAction.suitability.CAPABILITIES_MISMATCH
+ );
+ ok(!verifySpy.called, "validateArguments should not be called");
+});
diff --git a/toolkit/components/normandy/test/browser/browser_CleanupManager.js b/toolkit/components/normandy/test/browser/browser_CleanupManager.js
new file mode 100644
index 0000000000..f1b4930394
--- /dev/null
+++ b/toolkit/components/normandy/test/browser/browser_CleanupManager.js
@@ -0,0 +1,26 @@
+"use strict";
+
+const { CleanupManager } = ChromeUtils.importESModule(
+ "resource://normandy/lib/CleanupManager.sys.mjs"
+); /* global CleanupManagerClass */
+
+add_task(async function testCleanupManager() {
+ const spy1 = sinon.spy();
+ const spy2 = sinon.spy();
+ const spy3 = sinon.spy();
+
+ const manager = new CleanupManager.constructor();
+ manager.addCleanupHandler(spy1);
+ manager.addCleanupHandler(spy2);
+ manager.addCleanupHandler(spy3);
+ manager.removeCleanupHandler(spy2); // Test removal
+
+ await manager.cleanup();
+ ok(spy1.called, "cleanup called the spy1 handler");
+ ok(!spy2.called, "cleanup did not call the spy2 handler");
+ ok(spy3.called, "cleanup called the spy3 handler");
+
+ await manager.cleanup();
+ ok(spy1.calledOnce, "cleanup only called the spy1 handler once");
+ ok(spy3.calledOnce, "cleanup only called the spy3 handler once");
+});
diff --git a/toolkit/components/normandy/test/browser/browser_ClientEnvironment.js b/toolkit/components/normandy/test/browser/browser_ClientEnvironment.js
new file mode 100644
index 0000000000..1b6d2c5ff9
--- /dev/null
+++ b/toolkit/components/normandy/test/browser/browser_ClientEnvironment.js
@@ -0,0 +1,274 @@
+"use strict";
+
+const { TelemetryController } = ChromeUtils.importESModule(
+ "resource://gre/modules/TelemetryController.sys.mjs"
+);
+
+const { AddonRollouts } = ChromeUtils.importESModule(
+ "resource://normandy/lib/AddonRollouts.sys.mjs"
+);
+const { ClientEnvironment } = ChromeUtils.importESModule(
+ "resource://normandy/lib/ClientEnvironment.sys.mjs"
+);
+const { PreferenceExperiments } = ChromeUtils.importESModule(
+ "resource://normandy/lib/PreferenceExperiments.sys.mjs"
+);
+const { PreferenceRollouts } = ChromeUtils.importESModule(
+ "resource://normandy/lib/PreferenceRollouts.sys.mjs"
+);
+const { RecipeRunner } = ChromeUtils.importESModule(
+ "resource://normandy/lib/RecipeRunner.sys.mjs"
+);
+const { NormandyTestUtils } = ChromeUtils.importESModule(
+ "resource://testing-common/NormandyTestUtils.sys.mjs"
+);
+
+add_task(async function testTelemetry() {
+ // setup
+ await SpecialPowers.pushPrefEnv({
+ set: [["privacy.reduceTimerPrecision", true]],
+ });
+
+ await TelemetryController.submitExternalPing("testfoo", { foo: 1 });
+ await TelemetryController.submitExternalPing("testbar", { bar: 2 });
+ await TelemetryController.submitExternalPing("testfoo", { foo: 3 });
+
+ // Test it can access telemetry
+ const telemetry = await ClientEnvironment.telemetry;
+ is(typeof telemetry, "object", "Telemetry is accesible");
+
+ // Test it reads different types of telemetry
+ is(
+ telemetry.testfoo.payload.foo,
+ 3,
+ "telemetry filters pull the latest ping from a type"
+ );
+ is(
+ telemetry.testbar.payload.bar,
+ 2,
+ "telemetry filters pull from submitted telemetry pings"
+ );
+});
+
+add_task(async function testUserId() {
+ // Test that userId is available
+ ok(NormandyTestUtils.isUuid(ClientEnvironment.userId), "userId available");
+
+ // test that it pulls from the right preference
+ await SpecialPowers.pushPrefEnv({
+ set: [["app.normandy.user_id", "fake id"]],
+ });
+ is(ClientEnvironment.userId, "fake id", "userId is pulled from preferences");
+});
+
+add_task(async function testDistribution() {
+ // distribution id defaults to "default" for most builds, and
+ // "mozilla-MSIX" for MSIX builds.
+ is(
+ ClientEnvironment.distribution,
+ AppConstants.platform === "win" &&
+ Services.sysinfo.getProperty("hasWinPackageId")
+ ? "mozilla-MSIX"
+ : "default",
+ "distribution has a default value"
+ );
+
+ // distribution id is read from a preference
+ Services.prefs
+ .getDefaultBranch(null)
+ .setStringPref("distribution.id", "funnelcake");
+ is(
+ ClientEnvironment.distribution,
+ "funnelcake",
+ "distribution is read from preferences"
+ );
+ Services.prefs
+ .getDefaultBranch(null)
+ .setStringPref("distribution.id", "default");
+});
+
+const mockClassify = { country: "FR", request_time: new Date(2017, 1, 1) };
+add_task(
+ ClientEnvironment.withMockClassify(
+ mockClassify,
+ async function testCountryRequestTime() {
+ // Test that country and request_time pull their data from the server.
+ is(
+ await ClientEnvironment.country,
+ mockClassify.country,
+ "country is read from the server API"
+ );
+ is(
+ await ClientEnvironment.request_time,
+ mockClassify.request_time,
+ "request_time is read from the server API"
+ );
+ }
+ )
+);
+
+add_task(async function testSync() {
+ is(
+ ClientEnvironment.syncMobileDevices,
+ 0,
+ "syncMobileDevices defaults to zero"
+ );
+ is(
+ ClientEnvironment.syncDesktopDevices,
+ 0,
+ "syncDesktopDevices defaults to zero"
+ );
+ is(
+ ClientEnvironment.syncTotalDevices,
+ 0,
+ "syncTotalDevices defaults to zero"
+ );
+ await SpecialPowers.pushPrefEnv({
+ set: [
+ ["services.sync.clients.devices.mobile", 5],
+ ["services.sync.clients.devices.desktop", 4],
+ ],
+ });
+ is(
+ ClientEnvironment.syncMobileDevices,
+ 5,
+ "syncMobileDevices is read when set"
+ );
+ is(
+ ClientEnvironment.syncDesktopDevices,
+ 4,
+ "syncDesktopDevices is read when set"
+ );
+ is(
+ ClientEnvironment.syncTotalDevices,
+ 9,
+ "syncTotalDevices is read when set"
+ );
+});
+
+add_task(async function testDoNotTrack() {
+ // doNotTrack defaults to false
+ ok(!ClientEnvironment.doNotTrack, "doNotTrack has a default value");
+
+ // doNotTrack is read from a preference
+ await SpecialPowers.pushPrefEnv({
+ set: [["privacy.donottrackheader.enabled", true]],
+ });
+ ok(ClientEnvironment.doNotTrack, "doNotTrack is read from preferences");
+});
+
+add_task(async function testExperiments() {
+ const active = { slug: "active", expired: false };
+ const expired = { slug: "expired", expired: true };
+ const getAll = sinon
+ .stub(PreferenceExperiments, "getAll")
+ .callsFake(async () => [active, expired]);
+
+ const experiments = await ClientEnvironment.experiments;
+ Assert.deepEqual(
+ experiments.all,
+ ["active", "expired"],
+ "experiments.all returns all stored experiment names"
+ );
+ Assert.deepEqual(
+ experiments.active,
+ ["active"],
+ "experiments.active returns all active experiment names"
+ );
+ Assert.deepEqual(
+ experiments.expired,
+ ["expired"],
+ "experiments.expired returns all expired experiment names"
+ );
+
+ getAll.restore();
+});
+
+add_task(async function isFirstRun() {
+ await SpecialPowers.pushPrefEnv({ set: [["app.normandy.first_run", true]] });
+ ok(ClientEnvironment.isFirstRun, "isFirstRun is read from preferences");
+});
+
+decorate_task(
+ PreferenceExperiments.withMockExperiments([
+ NormandyTestUtils.factories.preferenceStudyFactory({
+ branch: "a-test-branch",
+ }),
+ ]),
+ AddonStudies.withStudies([
+ NormandyTestUtils.factories.branchedAddonStudyFactory({
+ branch: "b-test-branch",
+ }),
+ ]),
+ async function testStudies({
+ prefExperiments: [prefExperiment],
+ addonStudies: [addonStudy],
+ }) {
+ Assert.deepEqual(
+ await ClientEnvironment.studies,
+ {
+ pref: {
+ [prefExperiment.slug]: prefExperiment,
+ },
+ addon: {
+ [addonStudy.slug]: addonStudy,
+ },
+ },
+ "addon and preference studies shold be accessible"
+ );
+ is(
+ (await ClientEnvironment.studies).pref[prefExperiment.slug].branch,
+ "a-test-branch",
+ "A specific preference experiment field should be accessible in the context"
+ );
+ is(
+ (await ClientEnvironment.studies).addon[addonStudy.slug].branch,
+ "b-test-branch",
+ "A specific addon study field should be accessible in the context"
+ );
+
+ ok(RecipeRunner.getCapabilities().has("jexl.context.normandy.studies"));
+ ok(RecipeRunner.getCapabilities().has("jexl.context.env.studies"));
+ }
+);
+
+decorate_task(PreferenceRollouts.withTestMock(), async function testRollouts() {
+ const prefRollout = {
+ slug: "test-rollout",
+ preference: [],
+ enrollmentId: "test-enrollment-id-1",
+ };
+ await PreferenceRollouts.add(prefRollout);
+ const addonRollout = {
+ slug: "test-rollout-1",
+ extension: {},
+ enrollmentId: "test-enrollment-id-2",
+ };
+ await AddonRollouts.add(addonRollout);
+
+ Assert.deepEqual(
+ await ClientEnvironment.rollouts,
+ {
+ pref: {
+ [prefRollout.slug]: prefRollout,
+ },
+ addon: {
+ [addonRollout.slug]: addonRollout,
+ },
+ },
+ "addon and preference rollouts should be accessible"
+ );
+ is(
+ (await ClientEnvironment.rollouts).pref[prefRollout.slug].enrollmentId,
+ "test-enrollment-id-1",
+ "A specific preference rollout field should be accessible in the context"
+ );
+ is(
+ (await ClientEnvironment.rollouts).addon[addonRollout.slug].enrollmentId,
+ "test-enrollment-id-2",
+ "A specific addon rollout field should be accessible in the context"
+ );
+
+ ok(RecipeRunner.getCapabilities().has("jexl.context.normandy.rollouts"));
+ ok(RecipeRunner.getCapabilities().has("jexl.context.env.rollouts"));
+});
diff --git a/toolkit/components/normandy/test/browser/browser_EventEmitter.js b/toolkit/components/normandy/test/browser/browser_EventEmitter.js
new file mode 100644
index 0000000000..a64c52896f
--- /dev/null
+++ b/toolkit/components/normandy/test/browser/browser_EventEmitter.js
@@ -0,0 +1,110 @@
+"use strict";
+
+const { EventEmitter } = ChromeUtils.importESModule(
+ "resource://normandy/lib/EventEmitter.sys.mjs"
+);
+
+const evidence = {
+ a: 0,
+ b: 0,
+ c: 0,
+ log: "",
+};
+
+function listenerA(x) {
+ evidence.a += x;
+ evidence.log += "a";
+}
+
+function listenerB(x) {
+ evidence.b += x;
+ evidence.log += "b";
+}
+
+function listenerC(x) {
+ evidence.c += x;
+ evidence.log += "c";
+}
+
+decorate_task(async function () {
+ const eventEmitter = new EventEmitter();
+
+ // Fire an unrelated event, to make sure nothing goes wrong
+ eventEmitter.on("nothing");
+
+ // bind listeners
+ eventEmitter.on("event", listenerA);
+ eventEmitter.on("event", listenerB);
+ eventEmitter.once("event", listenerC);
+
+ // one event for all listeners
+ eventEmitter.emit("event", 1);
+ // another event for a and b, since c should have turned off already
+ eventEmitter.emit("event", 10);
+
+ // make sure events haven't actually fired yet, just queued
+ Assert.deepEqual(
+ evidence,
+ {
+ a: 0,
+ b: 0,
+ c: 0,
+ log: "",
+ },
+ "events are fired async"
+ );
+
+ // Spin the event loop to run events, so we can safely "off"
+ await Promise.resolve();
+
+ // Check intermediate event results
+ Assert.deepEqual(
+ evidence,
+ {
+ a: 11,
+ b: 11,
+ c: 1,
+ log: "abcab",
+ },
+ "intermediate events are fired"
+ );
+
+ // one more event for a
+ eventEmitter.off("event", listenerB);
+ eventEmitter.emit("event", 100);
+
+ // And another unrelated event
+ eventEmitter.on("nothing");
+
+ // Spin the event loop to run events
+ await Promise.resolve();
+
+ Assert.deepEqual(
+ evidence,
+ {
+ a: 111,
+ b: 11,
+ c: 1,
+ log: "abcaba", // events are in order
+ },
+ "events fired as expected"
+ );
+
+ // Test that mutating the data passed to the event doesn't actually
+ // mutate it for other events.
+ let handlerRunCount = 0;
+ const mutationHandler = data => {
+ handlerRunCount++;
+ data.count++;
+ is(data.count, 1, "Event data is not mutated between handlers.");
+ };
+ eventEmitter.on("mutationTest", mutationHandler);
+ eventEmitter.on("mutationTest", mutationHandler);
+
+ const data = { count: 0 };
+ eventEmitter.emit("mutationTest", data);
+ await Promise.resolve();
+
+ is(handlerRunCount, 2, "Mutation handler was executed twice.");
+ is(data.count, 0, "Event data cannot be mutated by handlers.");
+});
diff --git a/toolkit/components/normandy/test/browser/browser_Heartbeat.js b/toolkit/components/normandy/test/browser/browser_Heartbeat.js
new file mode 100644
index 0000000000..0166c4d7b0
--- /dev/null
+++ b/toolkit/components/normandy/test/browser/browser_Heartbeat.js
@@ -0,0 +1,262 @@
+"use strict";
+
+const { Heartbeat } = ChromeUtils.importESModule(
+ "resource://normandy/lib/Heartbeat.sys.mjs"
+);
+
+/**
+ * Assert an array is in non-descending order, and that every element is a number
+ */
+function assertOrdered(arr) {
+ for (let i = 0; i < arr.length; i++) {
+ Assert.equal(typeof arr[i], "number", `element ${i} is type "number"`);
+ }
+ for (let i = 0; i < arr.length - 1; i++) {
+ Assert.lessOrEqual(
+ arr[i],
+ arr[i + 1],
+ `element ${i} is less than or equal to element ${i + 1}`
+ );
+ }
+}
+
+/* Close every notification in a target window and notification box */
+function closeAllNotifications(targetWindow, notificationBox) {
+ if (notificationBox.allNotifications.length === 0) {
+ return Promise.resolve();
+ }
+
+ return new Promise(resolve => {
+ const notificationSet = new Set(notificationBox.allNotifications);
+
+ const observer = new targetWindow.MutationObserver(mutations => {
+ for (const mutation of mutations) {
+ for (let i = 0; i < mutation.removedNodes.length; i++) {
+ const node = mutation.removedNodes.item(i);
+ if (notificationSet.has(node)) {
+ notificationSet.delete(node);
+ }
+ }
+ }
+ if (notificationSet.size === 0) {
+ Assert.equal(
+ notificationBox.allNotifications.length,
+ 0,
+ "No notifications left"
+ );
+ observer.disconnect();
+ resolve();
+ }
+ });
+
+ observer.observe(notificationBox.stack, { childList: true });
+
+ for (const notification of notificationBox.allNotifications) {
+ notification.close();
+ }
+ });
+}
+
+/* Check that the correct telemetry was sent */
+function assertTelemetrySent(hb, eventNames) {
+ return new Promise(resolve => {
+ hb.eventEmitter.once("TelemetrySent", payload => {
+ const events = [0];
+ for (const name of eventNames) {
+ Assert.equal(
+ typeof payload[name],
+ "number",
+ `payload field ${name} is a number`
+ );
+ events.push(payload[name]);
+ }
+ events.push(Date.now());
+
+ assertOrdered(events);
+ resolve();
+ });
+ });
+}
+
+function getStars(notice) {
+ return notice.buttonContainer.querySelectorAll(".star-x");
+}
+
+add_setup(async function () {
+ let win = await BrowserTestUtils.openNewBrowserWindow();
+ // Open a new tab to keep the window open.
+ await BrowserTestUtils.openNewForegroundTab(
+ win.gBrowser,
+ "https://example.com"
+ );
+});
+
+// Several of the behaviors of heartbeat prompt are mutually exclusive, so checks are broken up
+// into three batches.
+
+/* Batch #1 - General UI, Stars, and telemetry data */
+add_task(async function () {
+ const targetWindow = Services.wm.getMostRecentWindow("navigator:browser");
+ const notificationBox = targetWindow.gNotificationBox;
+
+ const preCount = notificationBox.allNotifications.length;
+ const hb = new Heartbeat(targetWindow, {
+ testing: true,
+ flowId: "test",
+ message: "test",
+ engagementButtonLabel: undefined,
+ learnMoreMessage: "Learn More",
+ learnMoreUrl: "https://example.org/learnmore",
+ });
+
+ // Check UI
+ const learnMoreEl = hb.notice.messageText.querySelector(".text-link");
+ Assert.equal(
+ notificationBox.allNotifications.length,
+ preCount + 1,
+ "Correct number of notifications open"
+ );
+ Assert.equal(getStars(hb.notice).length, 5, "Correct number of stars");
+ Assert.equal(
+ hb.notice.buttonContainer.querySelectorAll(".notification-button").length,
+ 0,
+ "Engagement button not shown"
+ );
+ Assert.equal(
+ learnMoreEl.href,
+ "https://example.org/learnmore",
+ "Learn more url correct"
+ );
+ Assert.equal(learnMoreEl.value, "Learn More", "Learn more label correct");
+ // There's a space included before the learn more link in proton.
+ Assert.equal(
+ hb.notice.messageText.textContent,
+ "test ",
+ "Message is correct"
+ );
+
+ // Check that when clicking the learn more link, a tab opens with the right URL
+ let loadedPromise;
+ const tabOpenPromise = new Promise(resolve => {
+ targetWindow.gBrowser.tabContainer.addEventListener(
+ "TabOpen",
+ event => {
+ let tab = event.target;
+ loadedPromise = BrowserTestUtils.browserLoaded(
+ tab.linkedBrowser,
+ true,
+ url => url && url !== "about:blank"
+ );
+ resolve(tab);
+ },
+ { once: true }
+ );
+ });
+ learnMoreEl.click();
+ const tab = await tabOpenPromise;
+ const tabUrl = await loadedPromise;
+
+ Assert.equal(
+ tabUrl,
+ "https://example.org/learnmore",
+ "Learn more link opened the right url"
+ );
+
+ const telemetrySentPromise = assertTelemetrySent(hb, [
+ "offeredTS",
+ "learnMoreTS",
+ "closedTS",
+ ]);
+ // Close notification to trigger telemetry to be sent
+ await closeAllNotifications(targetWindow, notificationBox);
+ await telemetrySentPromise;
+ BrowserTestUtils.removeTab(tab);
+});
+
+// Batch #2 - Engagement buttons
+add_task(async function () {
+ const targetWindow = Services.wm.getMostRecentWindow("navigator:browser");
+ const notificationBox = targetWindow.gNotificationBox;
+ const hb = new Heartbeat(targetWindow, {
+ testing: true,
+ flowId: "test",
+ message: "test",
+ engagementButtonLabel: "Click me!",
+ postAnswerUrl: "https://example.org/postAnswer",
+ learnMoreMessage: "Learn More",
+ learnMoreUrl: "https://example.org/learnMore",
+ });
+ const engagementButton = hb.notice.buttonContainer.querySelector(
+ ".notification-button"
+ );
+
+ Assert.equal(getStars(hb.notice).length, 0, "Stars not shown");
+ Assert.ok(engagementButton, "Engagement button added");
+ Assert.equal(
+ engagementButton.label,
+ "Click me!",
+ "Engagement button has correct label"
+ );
+
+ const engagementEl = hb.notice.buttonContainer.querySelector(
+ ".notification-button"
+ );
+ let loadedPromise;
+ const tabOpenPromise = new Promise(resolve => {
+ targetWindow.gBrowser.tabContainer.addEventListener(
+ "TabOpen",
+ event => {
+ let tab = event.target;
+ loadedPromise = BrowserTestUtils.browserLoaded(
+ tab.linkedBrowser,
+ true,
+ url => url && url !== "about:blank"
+ );
+ resolve(tab);
+ },
+ { once: true }
+ );
+ });
+ engagementEl.click();
+ const tab = await tabOpenPromise;
+ const tabUrl = await loadedPromise;
+ // the postAnswer url gets query parameters appended onto the end, so use Assert.startsWith instead of Assert.equal
+ Assert.ok(
+ tabUrl.startsWith("https://example.org/postAnswer"),
+ "Engagement button opened the right url"
+ );
+
+ const telemetrySentPromise = assertTelemetrySent(hb, [
+ "offeredTS",
+ "engagedTS",
+ "closedTS",
+ ]);
+ // Close notification to trigger telemetry to be sent
+ await closeAllNotifications(targetWindow, notificationBox);
+ await telemetrySentPromise;
+ BrowserTestUtils.removeTab(tab);
+});
+
+// Batch 3 - Closing the window while heartbeat is open
+add_task(async function () {
+ const targetWindow = await BrowserTestUtils.openNewBrowserWindow();
+
+ const hb = new Heartbeat(targetWindow, {
+ testing: true,
+ flowId: "test",
+ message: "test",
+ });
+
+ const telemetrySentPromise = assertTelemetrySent(hb, [
+ "offeredTS",
+ "windowClosedTS",
+ ]);
+ // triggers sending ping to normandy
+ await BrowserTestUtils.closeWindow(targetWindow);
+ await telemetrySentPromise;
+});
+
+add_task(async function cleanup() {
+ const win = Services.wm.getMostRecentWindow("navigator:browser");
+ await BrowserTestUtils.closeWindow(win);
+});
diff --git a/toolkit/components/normandy/test/browser/browser_LegacyHeartbeat.js b/toolkit/components/normandy/test/browser/browser_LegacyHeartbeat.js
new file mode 100644
index 0000000000..465e5c1040
--- /dev/null
+++ b/toolkit/components/normandy/test/browser/browser_LegacyHeartbeat.js
@@ -0,0 +1,88 @@
+/* Any copyright is dedicated to the Public Domain.
+ * http://creativecommons.org/publicdomain/zero/1.0/ */
+
+"use strict";
+
+const { BaseAction } = ChromeUtils.importESModule(
+ "resource://normandy/actions/BaseAction.sys.mjs"
+);
+const { ClientEnvironment } = ChromeUtils.importESModule(
+ "resource://normandy/lib/ClientEnvironment.sys.mjs"
+);
+const { Heartbeat } = ChromeUtils.importESModule(
+ "resource://normandy/lib/Heartbeat.sys.mjs"
+);
+const { Normandy } = ChromeUtils.importESModule(
+ "resource://normandy/Normandy.sys.mjs"
+);
+const { ExperimentAPI } = ChromeUtils.importESModule(
+ "resource://nimbus/ExperimentAPI.sys.mjs"
+);
+const { ExperimentFakes } = ChromeUtils.importESModule(
+ "resource://testing-common/NimbusTestUtils.sys.mjs"
+);
+const { RecipeRunner } = ChromeUtils.importESModule(
+ "resource://normandy/lib/RecipeRunner.sys.mjs"
+);
+const { RemoteSettings } = ChromeUtils.importESModule(
+ "resource://services-settings/remote-settings.sys.mjs"
+);
+
+const SURVEY = {
+ surveyId: "a survey",
+ message: "test message",
+ engagementButtonLabel: "",
+ thanksMessage: "thanks!",
+ postAnswerUrl: "https://example.com",
+ learnMoreMessage: "Learn More",
+ learnMoreUrl: "https://example.com",
+ repeatOption: "once",
+};
+
+function assertSurvey(actual, expected) {
+ for (const key of Object.keys(actual)) {
+ if (["postAnswerUrl", "flowId"].includes(key)) {
+ continue;
+ }
+
+ Assert.equal(
+ actual[key],
+ expected[key],
+ `Heartbeat should receive correct ${key} parameter`
+ );
+ }
+
+ Assert.ok(actual.postAnswerUrl.startsWith(expected.postAnswerUrl));
+}
+
+decorate_task(
+ withStubbedHeartbeat(),
+ withClearStorage(),
+ async function testLegacyHeartbeatTrigger({ heartbeatClassStub }) {
+ const sandbox = sinon.createSandbox();
+
+ const cleanupEnrollment = await ExperimentFakes.enrollWithFeatureConfig({
+ featureId: "legacyHeartbeat",
+ value: {
+ survey: SURVEY,
+ },
+ });
+
+ const client = RemoteSettings("normandy-recipes-capabilities");
+ sandbox.stub(client, "get").resolves([]);
+
+ try {
+ await RecipeRunner.run();
+ Assert.equal(
+ heartbeatClassStub.args.length,
+ 1,
+ "Heartbeat should be instantiated once"
+ );
+ assertSurvey(heartbeatClassStub.args[0][1], SURVEY);
+
+ await cleanupEnrollment();
+ } finally {
+ sandbox.restore();
+ }
+ }
+);
diff --git a/toolkit/components/normandy/test/browser/browser_LogManager.js b/toolkit/components/normandy/test/browser/browser_LogManager.js
new file mode 100644
index 0000000000..6f41b46c63
--- /dev/null
+++ b/toolkit/components/normandy/test/browser/browser_LogManager.js
@@ -0,0 +1,27 @@
+"use strict";
+
+const { LogManager } = ChromeUtils.importESModule(
+ "resource://normandy/lib/LogManager.sys.mjs"
+);
+
+add_task(async function () {
+ // Ensure that configuring the logger affects all generated loggers.
+ const firstLogger = LogManager.getLogger("first");
+ LogManager.configure(5);
+ const secondLogger = LogManager.getLogger("second");
+ is(firstLogger.level, 5, "First logger level inherited from root logger.");
+ is(secondLogger.level, 5, "Second logger level inherited from root logger.");
+
+ // Ensure that our loggers have at least one appender.
+ LogManager.configure(Log.Level.Warn);
+ const logger = LogManager.getLogger("test");
+ ok(!!logger.appenders.length, "Loggers have at least one appender.");
+
+ // Ensure our loggers log to the console.
+ await new Promise(resolve => {
+ SimpleTest.waitForExplicitFinish();
+ SimpleTest.monitorConsole(resolve, [{ message: /legend has it/ }]);
+ logger.warn("legend has it");
+ SimpleTest.endMonitorConsole();
+ });
+});
diff --git a/toolkit/components/normandy/test/browser/browser_Normandy.js b/toolkit/components/normandy/test/browser/browser_Normandy.js
new file mode 100644
index 0000000000..1480bd13a4
--- /dev/null
+++ b/toolkit/components/normandy/test/browser/browser_Normandy.js
@@ -0,0 +1,386 @@
+"use strict";
+
+const { TelemetryUtils } = ChromeUtils.importESModule(
+ "resource://gre/modules/TelemetryUtils.sys.mjs"
+);
+const { Normandy } = ChromeUtils.importESModule(
+ "resource://normandy/Normandy.sys.mjs"
+);
+const { AddonRollouts } = ChromeUtils.importESModule(
+ "resource://normandy/lib/AddonRollouts.sys.mjs"
+);
+const { PreferenceExperiments } = ChromeUtils.importESModule(
+ "resource://normandy/lib/PreferenceExperiments.sys.mjs"
+);
+const { PreferenceRollouts } = ChromeUtils.importESModule(
+ "resource://normandy/lib/PreferenceRollouts.sys.mjs"
+);
+const { RecipeRunner } = ChromeUtils.importESModule(
+ "resource://normandy/lib/RecipeRunner.sys.mjs"
+);
+const {
+ NormandyTestUtils: { factories },
+} = ChromeUtils.importESModule(
+ "resource://testing-common/NormandyTestUtils.sys.mjs"
+);
+
+const experimentPref1 = "test.initExperimentPrefs1";
+const experimentPref2 = "test.initExperimentPrefs2";
+const experimentPref3 = "test.initExperimentPrefs3";
+const experimentPref4 = "test.initExperimentPrefs4";
+
+function withStubInits() {
+ return function (testFunction) {
+ return decorate(
+ withStub(AddonRollouts, "init"),
+ withStub(AddonStudies, "init"),
+ withStub(PreferenceRollouts, "init"),
+ withStub(PreferenceExperiments, "init"),
+ withStub(RecipeRunner, "init"),
+ withStub(TelemetryEvents, "init"),
+ testFunction
+ );
+ };
+}
+
+decorate_task(
+ withPrefEnv({
+ set: [
+ [`app.normandy.startupExperimentPrefs.${experimentPref1}`, true],
+ [`app.normandy.startupExperimentPrefs.${experimentPref2}`, 2],
+ [`app.normandy.startupExperimentPrefs.${experimentPref3}`, "string"],
+ ],
+ }),
+ async function testApplyStartupPrefs() {
+ const defaultBranch = Services.prefs.getDefaultBranch("");
+ for (const pref of [experimentPref1, experimentPref2, experimentPref3]) {
+ is(
+ defaultBranch.getPrefType(pref),
+ defaultBranch.PREF_INVALID,
+ `Pref ${pref} don't exist before being initialized.`
+ );
+ }
+
+ let oldValues = Normandy.applyStartupPrefs(
+ "app.normandy.startupExperimentPrefs."
+ );
+
+ Assert.deepEqual(
+ oldValues,
+ {
+ [experimentPref1]: null,
+ [experimentPref2]: null,
+ [experimentPref3]: null,
+ },
+ "the correct set of old values should be reported"
+ );
+
+ ok(
+ defaultBranch.getBoolPref(experimentPref1),
+ `Pref ${experimentPref1} has a default value after being initialized.`
+ );
+ is(
+ defaultBranch.getIntPref(experimentPref2),
+ 2,
+ `Pref ${experimentPref2} has a default value after being initialized.`
+ );
+ is(
+ defaultBranch.getCharPref(experimentPref3),
+ "string",
+ `Pref ${experimentPref3} has a default value after being initialized.`
+ );
+
+ for (const pref of [experimentPref1, experimentPref2, experimentPref3]) {
+ ok(
+ !defaultBranch.prefHasUserValue(pref),
+ `Pref ${pref} doesn't have a user value after being initialized.`
+ );
+ Services.prefs.clearUserPref(pref);
+ defaultBranch.deleteBranch(pref);
+ }
+ }
+);
+
+decorate_task(
+ withPrefEnv({
+ set: [
+ ["app.normandy.startupExperimentPrefs.test.existingPref", "experiment"],
+ ],
+ }),
+ async function testApplyStartupPrefsExisting() {
+ const defaultBranch = Services.prefs.getDefaultBranch("");
+ defaultBranch.setCharPref("test.existingPref", "default");
+ Normandy.applyStartupPrefs("app.normandy.startupExperimentPrefs.");
+ is(
+ defaultBranch.getCharPref("test.existingPref"),
+ "experiment",
+ "applyStartupPrefs overwrites the default values of existing preferences."
+ );
+ }
+);
+
+decorate_task(
+ withPrefEnv({
+ set: [
+ ["app.normandy.startupExperimentPrefs.test.mismatchPref", "experiment"],
+ ],
+ }),
+ async function testApplyStartupPrefsMismatch() {
+ const defaultBranch = Services.prefs.getDefaultBranch("");
+ defaultBranch.setIntPref("test.mismatchPref", 2);
+ Normandy.applyStartupPrefs("app.normandy.startupExperimentPrefs.");
+ is(
+ defaultBranch.getPrefType("test.mismatchPref"),
+ Services.prefs.PREF_INT,
+ "applyStartupPrefs skips prefs that don't match the existing default value's type."
+ );
+ }
+);
+
+decorate_task(
+ withStub(Normandy, "finishInit"),
+ async function testStartupDelayed({ finishInitStub }) {
+ let originalDeferred = Normandy.uiAvailableNotificationObserved;
+ let mockUiAvailableDeferred = PromiseUtils.defer();
+ Normandy.uiAvailableNotificationObserved = mockUiAvailableDeferred;
+
+ let initPromise = Normandy.init();
+ await null;
+
+ ok(
+ !finishInitStub.called,
+ "When initialized, do not call finishInit immediately."
+ );
+
+ Normandy.observe(null, "sessionstore-windows-restored");
+ await initPromise;
+ ok(
+ finishInitStub.called,
+ "Once the sessionstore-windows-restored event is observed, finishInit should be called."
+ );
+
+ Normandy.uiAvailableNotificationObserved = originalDeferred;
+ }
+);
+
+// During startup, preferences that are changed for experiments should
+// be record by calling PreferenceExperiments.recordOriginalValues.
+decorate_task(
+ withStub(PreferenceExperiments, "recordOriginalValues", {
+ as: "experimentsRecordOriginalValuesStub",
+ }),
+ withStub(PreferenceRollouts, "recordOriginalValues", {
+ as: "rolloutsRecordOriginalValueStub",
+ }),
+ async function testApplyStartupPrefs({
+ experimentsRecordOriginalValuesStub,
+ rolloutsRecordOriginalValueStub,
+ }) {
+ const defaultBranch = Services.prefs.getDefaultBranch("");
+
+ defaultBranch.setBoolPref(experimentPref1, false);
+ defaultBranch.setIntPref(experimentPref2, 1);
+ defaultBranch.setCharPref(experimentPref3, "original string");
+ // experimentPref4 is left unset
+
+ Normandy.applyStartupPrefs("app.normandy.startupExperimentPrefs.");
+ Normandy.studyPrefsChanged = { "test.study-pref": 1 };
+ Normandy.rolloutPrefsChanged = { "test.rollout-pref": 1 };
+ await Normandy.finishInit();
+
+ Assert.deepEqual(
+ experimentsRecordOriginalValuesStub.args,
+ [[{ "test.study-pref": 1 }]],
+ "finishInit should record original values of the study prefs"
+ );
+ Assert.deepEqual(
+ rolloutsRecordOriginalValueStub.args,
+ [[{ "test.rollout-pref": 1 }]],
+ "finishInit should record original values of the study prefs"
+ );
+
+ // cleanup
+ defaultBranch.deleteBranch(experimentPref1);
+ defaultBranch.deleteBranch(experimentPref2);
+ defaultBranch.deleteBranch(experimentPref3);
+ }
+);
+
+// Test that startup prefs are handled correctly when there is a value on the user branch but not the default branch.
+decorate_task(
+ withPrefEnv({
+ set: [
+ ["app.normandy.startupExperimentPrefs.testing.does-not-exist", "foo"],
+ ["testing.does-not-exist", "foo"],
+ ],
+ }),
+ withStub(PreferenceExperiments, "recordOriginalValues"),
+ async function testApplyStartupPrefsNoDefaultValue() {
+ Normandy.applyStartupPrefs("app.normandy.startupExperimentPrefs");
+ ok(
+ true,
+ "initExperimentPrefs should not throw for prefs that doesn't exist on the default branch"
+ );
+ }
+);
+
+decorate_task(withStubInits(), async function testStartup() {
+ const initObserved = TestUtils.topicObserved("shield-init-complete");
+ await Normandy.finishInit();
+ ok(AddonStudies.init.called, "startup calls AddonStudies.init");
+ ok(
+ PreferenceExperiments.init.called,
+ "startup calls PreferenceExperiments.init"
+ );
+ ok(RecipeRunner.init.called, "startup calls RecipeRunner.init");
+ await initObserved;
+});
+
+decorate_task(withStubInits(), async function testStartupPrefInitFail() {
+ PreferenceExperiments.init.rejects();
+
+ await Normandy.finishInit();
+ ok(AddonStudies.init.called, "startup calls AddonStudies.init");
+ ok(AddonRollouts.init.called, "startup calls AddonRollouts.init");
+ ok(
+ PreferenceExperiments.init.called,
+ "startup calls PreferenceExperiments.init"
+ );
+ ok(RecipeRunner.init.called, "startup calls RecipeRunner.init");
+ ok(TelemetryEvents.init.called, "startup calls TelemetryEvents.init");
+ ok(PreferenceRollouts.init.called, "startup calls PreferenceRollouts.init");
+});
+
+decorate_task(
+ withStubInits(),
+ async function testStartupAddonStudiesInitFail() {
+ AddonStudies.init.rejects();
+
+ await Normandy.finishInit();
+ ok(AddonStudies.init.called, "startup calls AddonStudies.init");
+ ok(AddonRollouts.init.called, "startup calls AddonRollouts.init");
+ ok(
+ PreferenceExperiments.init.called,
+ "startup calls PreferenceExperiments.init"
+ );
+ ok(RecipeRunner.init.called, "startup calls RecipeRunner.init");
+ ok(TelemetryEvents.init.called, "startup calls TelemetryEvents.init");
+ ok(PreferenceRollouts.init.called, "startup calls PreferenceRollouts.init");
+ }
+);
+
+decorate_task(
+ withStubInits(),
+ async function testStartupTelemetryEventsInitFail() {
+ TelemetryEvents.init.throws();
+
+ await Normandy.finishInit();
+ ok(AddonStudies.init.called, "startup calls AddonStudies.init");
+ ok(AddonRollouts.init.called, "startup calls AddonRollouts.init");
+ ok(
+ PreferenceExperiments.init.called,
+ "startup calls PreferenceExperiments.init"
+ );
+ ok(RecipeRunner.init.called, "startup calls RecipeRunner.init");
+ ok(TelemetryEvents.init.called, "startup calls TelemetryEvents.init");
+ ok(PreferenceRollouts.init.called, "startup calls PreferenceRollouts.init");
+ }
+);
+
+decorate_task(
+ withStubInits(),
+ async function testStartupPreferenceRolloutsInitFail() {
+ PreferenceRollouts.init.throws();
+
+ await Normandy.finishInit();
+ ok(AddonStudies.init.called, "startup calls AddonStudies.init");
+ ok(AddonRollouts.init.called, "startup calls AddonRollouts.init");
+ ok(
+ PreferenceExperiments.init.called,
+ "startup calls PreferenceExperiments.init"
+ );
+ ok(RecipeRunner.init.called, "startup calls RecipeRunner.init");
+ ok(TelemetryEvents.init.called, "startup calls TelemetryEvents.init");
+ ok(PreferenceRollouts.init.called, "startup calls PreferenceRollouts.init");
+ }
+);
+
+// Test that disabling telemetry removes all stored enrollment IDs
+decorate_task(
+ PreferenceExperiments.withMockExperiments([
+ factories.preferenceStudyFactory({
+ enrollmentId: "test-enrollment-id",
+ }),
+ ]),
+ AddonStudies.withStudies([
+ factories.addonStudyFactory({ slug: "test-study" }),
+ ]),
+ PreferenceRollouts.withTestMock(),
+ AddonRollouts.withTestMock(),
+ async function disablingTelemetryClearsEnrollmentIds({
+ prefExperiments: [prefExperiment],
+ addonStudies: [addonStudy],
+ }) {
+ const prefRollout = {
+ slug: "test-rollout",
+ state: PreferenceRollouts.STATE_ACTIVE,
+ preferences: [],
+ enrollmentId: "test-enrollment-id",
+ };
+ await PreferenceRollouts.add(prefRollout);
+ const addonRollout = {
+ slug: "test-rollout",
+ state: AddonRollouts.STATE_ACTIVE,
+ extension: {},
+ enrollmentId: "test-enrollment-id",
+ };
+ await AddonRollouts.add(addonRollout);
+
+ // pre-check
+ ok(
+ (await PreferenceExperiments.get(prefExperiment.slug)).enrollmentId,
+ "pref experiment should have an enrollment id"
+ );
+ ok(
+ (await AddonStudies.get(addonStudy.recipeId)).enrollmentId,
+ "addon study should have an enrollment id"
+ );
+ ok(
+ (await PreferenceRollouts.get(prefRollout.slug)).enrollmentId,
+ "pref rollout should have an enrollment id"
+ );
+ ok(
+ (await AddonRollouts.get(addonRollout.slug)).enrollmentId,
+ "addon rollout should have an enrollment id"
+ );
+
+ // trigger telemetry being disabled
+ await Normandy.observe(
+ null,
+ TelemetryUtils.TELEMETRY_UPLOAD_DISABLED_TOPIC,
+ null
+ );
+
+ // no enrollment IDs anymore
+ is(
+ (await PreferenceExperiments.get(prefExperiment.slug)).enrollmentId,
+ TelemetryEvents.NO_ENROLLMENT_ID_MARKER,
+ "pref experiment should not have an enrollment id"
+ );
+ is(
+ (await AddonStudies.get(addonStudy.recipeId)).enrollmentId,
+ TelemetryEvents.NO_ENROLLMENT_ID_MARKER,
+ "addon study should not have an enrollment id"
+ );
+ is(
+ (await PreferenceRollouts.get(prefRollout.slug)).enrollmentId,
+ TelemetryEvents.NO_ENROLLMENT_ID_MARKER,
+ "pref rollout should not have an enrollment id"
+ );
+ is(
+ (await AddonRollouts.get(addonRollout.slug)).enrollmentId,
+ TelemetryEvents.NO_ENROLLMENT_ID_MARKER,
+ "addon rollout should not have an enrollment id"
+ );
+ }
+);
diff --git a/toolkit/components/normandy/test/browser/browser_NormandyAddonManager.js b/toolkit/components/normandy/test/browser/browser_NormandyAddonManager.js
new file mode 100644
index 0000000000..fe62f557e2
--- /dev/null
+++ b/toolkit/components/normandy/test/browser/browser_NormandyAddonManager.js
@@ -0,0 +1,189 @@
+"use strict";
+
+const { NormandyAddonManager } = ChromeUtils.importESModule(
+ "resource://normandy/lib/NormandyAddonManager.sys.mjs"
+);
+
+decorate_task(ensureAddonCleanup(), async function download_and_install() {
+ const applyDeferred = PromiseUtils.defer();
+
+ const [addonId, addonVersion] = await NormandyAddonManager.downloadAndInstall(
+ {
+ extensionDetails: {
+ extension_id: FIXTURE_ADDON_ID,
+ hash: FIXTURE_ADDON_DETAILS["normandydriver-a-1.0"].hash,
+ hash_algorithm: "sha256",
+ version: "1.0",
+ xpi: FIXTURE_ADDON_DETAILS["normandydriver-a-1.0"].url,
+ },
+ applyNormandyChanges: () => {
+ applyDeferred.resolve();
+ },
+ createError: () => {},
+ reportError: () => {},
+ undoNormandyChanges: () => {},
+ }
+ );
+
+ // Ensure applyNormandyChanges was called
+ await applyDeferred;
+
+ const addon = await AddonManager.getAddonByID(FIXTURE_ADDON_ID);
+ is(addon.id, addonId, "add-on is installed");
+ is(addon.version, addonVersion, "add-on version is correct");
+
+ // Cleanup
+ await addon.uninstall();
+});
+
+decorate_task(ensureAddonCleanup(), async function id_mismatch() {
+ const applyDeferred = PromiseUtils.defer();
+ const undoDeferred = PromiseUtils.defer();
+
+ let error;
+
+ try {
+ await NormandyAddonManager.downloadAndInstall({
+ extensionDetails: {
+ extension_id: FIXTURE_ADDON_ID,
+ hash: FIXTURE_ADDON_DETAILS["normandydriver-b-1.0"].hash,
+ hash_algorithm: "sha256",
+ version: "1.0",
+ xpi: FIXTURE_ADDON_DETAILS["normandydriver-b-1.0"].url,
+ },
+ applyNormandyChanges: () => {
+ applyDeferred.resolve();
+ },
+ createError: (reason, extra) => {
+ return [reason, extra];
+ },
+ reportError: err => {
+ return err;
+ },
+ undoNormandyChanges: () => {
+ undoDeferred.resolve();
+ },
+ });
+ } catch ([reason, extra]) {
+ error = true;
+ is(reason, "metadata-mismatch", "the expected reason is provided");
+ Assert.deepEqual(
+ extra,
+ undefined,
+ "the expected extra details are provided"
+ );
+ }
+
+ ok(error, "an error occured");
+
+ // Ensure applyNormandyChanges was called
+ await applyDeferred;
+
+ // Ensure undoNormandyChanges was called
+ await undoDeferred;
+
+ const addon = await AddonManager.getAddonByID(FIXTURE_ADDON_ID);
+ ok(!addon, "add-on is not installed");
+});
+
+decorate_task(ensureAddonCleanup(), async function version_mismatch() {
+ const applyDeferred = PromiseUtils.defer();
+ const undoDeferred = PromiseUtils.defer();
+
+ let error;
+
+ try {
+ await NormandyAddonManager.downloadAndInstall({
+ extensionDetails: {
+ extension_id: FIXTURE_ADDON_ID,
+ hash: FIXTURE_ADDON_DETAILS["normandydriver-a-1.0"].hash,
+ hash_algorithm: "sha256",
+ version: "2.0",
+ xpi: FIXTURE_ADDON_DETAILS["normandydriver-a-1.0"].url,
+ },
+ applyNormandyChanges: () => {
+ applyDeferred.resolve();
+ },
+ createError: (reason, extra) => {
+ return [reason, extra];
+ },
+ reportError: err => {
+ return err;
+ },
+ undoNormandyChanges: () => {
+ undoDeferred.resolve();
+ },
+ });
+ } catch ([reason, extra]) {
+ error = true;
+ is(reason, "metadata-mismatch", "the expected reason is provided");
+ Assert.deepEqual(
+ extra,
+ undefined,
+ "the expected extra details are provided"
+ );
+ }
+
+ ok(error, "should throw an error");
+
+ // Ensure applyNormandyChanges was called
+ await applyDeferred;
+
+ // Ensure undoNormandyChanges was called
+ await undoDeferred;
+
+ const addon = await AddonManager.getAddonByID(FIXTURE_ADDON_ID);
+ ok(!addon, "add-on is not installed");
+});
+
+decorate_task(ensureAddonCleanup(), async function download_failure() {
+ const applyDeferred = PromiseUtils.defer();
+ const undoDeferred = PromiseUtils.defer();
+
+ let error;
+
+ try {
+ await NormandyAddonManager.downloadAndInstall({
+ extensionDetails: {
+ extension_id: FIXTURE_ADDON_ID,
+ hash: FIXTURE_ADDON_DETAILS["normandydriver-b-1.0"].hash,
+ hash_algorithm: "sha256",
+ version: "1.0",
+ xpi: FIXTURE_ADDON_DETAILS["normandydriver-a-1.0"].url,
+ },
+ applyNormandyChanges: () => {
+ applyDeferred.resolve();
+ },
+ createError: (reason, extra) => {
+ return [reason, extra];
+ },
+ reportError: err => {
+ return err;
+ },
+ undoNormandyChanges: () => {
+ undoDeferred.resolve();
+ },
+ });
+ } catch ([reason, extra]) {
+ error = true;
+ is(reason, "download-failure", "the expected reason is provided");
+ Assert.deepEqual(
+ extra,
+ {
+ detail: "ERROR_INCORRECT_HASH",
+ },
+ "the expected extra details are provided"
+ );
+ }
+
+ ok(error, "an error occured");
+
+ // Ensure applyNormandyChanges was called
+ await applyDeferred;
+
+ // Ensure undoNormandyChanges was called
+ await undoDeferred;
+
+ const addon = await AddonManager.getAddonByID(FIXTURE_ADDON_ID);
+ ok(!addon, "add-on is not installed");
+});
diff --git a/toolkit/components/normandy/test/browser/browser_NormandyMigrations.js b/toolkit/components/normandy/test/browser/browser_NormandyMigrations.js
new file mode 100644
index 0000000000..9e60219c8b
--- /dev/null
+++ b/toolkit/components/normandy/test/browser/browser_NormandyMigrations.js
@@ -0,0 +1,106 @@
+const { NormandyMigrations } = ChromeUtils.importESModule(
+ "resource://normandy/NormandyMigrations.sys.mjs"
+);
+
+decorate_task(
+ withMockPreferences(),
+ async function testApplyMigrations({ mockPreferences }) {
+ const migrationsAppliedPref = "app.normandy.migrationsApplied";
+ mockPreferences.set(migrationsAppliedPref, 0);
+
+ await NormandyMigrations.applyAll();
+
+ is(
+ Services.prefs.getIntPref(migrationsAppliedPref),
+ NormandyMigrations.migrations.length,
+ "All migrations should have been applied"
+ );
+ }
+);
+
+decorate_task(
+ withMockPreferences(),
+ async function testPrefMigration({ mockPreferences }) {
+ const legacyPref = "extensions.shield-recipe-client.test";
+ const migratedPref = "app.normandy.test";
+ mockPreferences.set(legacyPref, 1);
+
+ ok(
+ Services.prefs.prefHasUserValue(legacyPref),
+ "Legacy pref should have a user value before running migration"
+ );
+ ok(
+ !Services.prefs.prefHasUserValue(migratedPref),
+ "Migrated pref should not have a user value before running migration"
+ );
+
+ await NormandyMigrations.applyOne(0);
+
+ ok(
+ !Services.prefs.prefHasUserValue(legacyPref),
+ "Legacy pref should not have a user value after running migration"
+ );
+ ok(
+ Services.prefs.prefHasUserValue(migratedPref),
+ "Migrated pref should have a user value after running migration"
+ );
+ is(
+ Services.prefs.getIntPref(migratedPref),
+ 1,
+ "Value should have been migrated"
+ );
+
+ Services.prefs.clearUserPref(migratedPref);
+ }
+);
+
+decorate_task(
+ withMockPreferences(),
+ async function testMigration0({ mockPreferences }) {
+ const studiesEnabledPref = "app.shield.optoutstudies.enabled";
+ const healthReportUploadEnabledPref =
+ "datareporting.healthreport.uploadEnabled";
+
+ // Both enabled
+ mockPreferences.set(studiesEnabledPref, true);
+ mockPreferences.set(healthReportUploadEnabledPref, true);
+ await NormandyMigrations.applyOne(1);
+ ok(
+ Services.prefs.getBoolPref(studiesEnabledPref),
+ "Studies should be enabled."
+ );
+
+ mockPreferences.cleanup();
+
+ // Telemetry disabled, studies enabled
+ mockPreferences.set(studiesEnabledPref, true);
+ mockPreferences.set(healthReportUploadEnabledPref, false);
+ await NormandyMigrations.applyOne(1);
+ ok(
+ !Services.prefs.getBoolPref(studiesEnabledPref),
+ "Studies should be disabled."
+ );
+
+ mockPreferences.cleanup();
+
+ // Telemetry enabled, studies disabled
+ mockPreferences.set(studiesEnabledPref, false);
+ mockPreferences.set(healthReportUploadEnabledPref, true);
+ await NormandyMigrations.applyOne(1);
+ ok(
+ !Services.prefs.getBoolPref(studiesEnabledPref),
+ "Studies should be disabled."
+ );
+
+ mockPreferences.cleanup();
+
+ // Both disabled
+ mockPreferences.set(studiesEnabledPref, false);
+ mockPreferences.set(healthReportUploadEnabledPref, false);
+ await NormandyMigrations.applyOne(1);
+ ok(
+ !Services.prefs.getBoolPref(studiesEnabledPref),
+ "Studies should be disabled."
+ );
+ }
+);
diff --git a/toolkit/components/normandy/test/browser/browser_PreferenceExperiments.js b/toolkit/components/normandy/test/browser/browser_PreferenceExperiments.js
new file mode 100644
index 0000000000..80c3cd79f2
--- /dev/null
+++ b/toolkit/components/normandy/test/browser/browser_PreferenceExperiments.js
@@ -0,0 +1,2205 @@
+"use strict";
+
+const { PreferenceExperiments } = ChromeUtils.importESModule(
+ "resource://normandy/lib/PreferenceExperiments.sys.mjs"
+);
+const { CleanupManager } = ChromeUtils.importESModule(
+ "resource://normandy/lib/CleanupManager.sys.mjs"
+);
+const { NormandyUtils } = ChromeUtils.importESModule(
+ "resource://normandy/lib/NormandyUtils.sys.mjs"
+);
+const { NormandyTestUtils } = ChromeUtils.importESModule(
+ "resource://testing-common/NormandyTestUtils.sys.mjs"
+);
+
+// Save ourselves some typing
+const { withMockExperiments } = PreferenceExperiments;
+const DefaultPreferences = new Preferences({ defaultBranch: true });
+const startupPrefs = "app.normandy.startupExperimentPrefs";
+const { preferenceStudyFactory } = NormandyTestUtils.factories;
+
+const NOW = new Date();
+
+const mockV1Data = {
+ hypothetical_experiment: {
+ name: "hypothetical_experiment",
+ branch: "hypo_1",
+ expired: false,
+ lastSeen: NOW.toJSON(),
+ preferenceName: "some.pref",
+ preferenceValue: 2,
+ preferenceType: "integer",
+ previousPreferenceValue: 1,
+ preferenceBranchType: "user",
+ experimentType: "exp",
+ },
+ another_experiment: {
+ name: "another_experiment",
+ branch: "another_4",
+ expired: true,
+ lastSeen: NOW.toJSON(),
+ preferenceName: "another.pref",
+ preferenceValue: true,
+ preferenceType: "boolean",
+ previousPreferenceValue: false,
+ preferenceBranchType: "default",
+ experimentType: "exp",
+ },
+};
+
+const mockV2Data = {
+ experiments: {
+ hypothetical_experiment: {
+ name: "hypothetical_experiment",
+ branch: "hypo_1",
+ expired: false,
+ lastSeen: NOW.toJSON(),
+ preferenceName: "some.pref",
+ preferenceValue: 2,
+ preferenceType: "integer",
+ previousPreferenceValue: 1,
+ preferenceBranchType: "user",
+ experimentType: "exp",
+ },
+ another_experiment: {
+ name: "another_experiment",
+ branch: "another_4",
+ expired: true,
+ lastSeen: NOW.toJSON(),
+ preferenceName: "another.pref",
+ preferenceValue: true,
+ preferenceType: "boolean",
+ previousPreferenceValue: false,
+ preferenceBranchType: "default",
+ experimentType: "exp",
+ },
+ },
+};
+
+const mockV3Data = {
+ experiments: {
+ hypothetical_experiment: {
+ name: "hypothetical_experiment",
+ branch: "hypo_1",
+ expired: false,
+ lastSeen: NOW.toJSON(),
+ preferences: {
+ "some.pref": {
+ preferenceValue: 2,
+ preferenceType: "integer",
+ previousPreferenceValue: 1,
+ preferenceBranchType: "user",
+ },
+ },
+ experimentType: "exp",
+ },
+ another_experiment: {
+ name: "another_experiment",
+ branch: "another_4",
+ expired: true,
+ lastSeen: NOW.toJSON(),
+ preferences: {
+ "another.pref": {
+ preferenceValue: true,
+ preferenceType: "boolean",
+ previousPreferenceValue: false,
+ preferenceBranchType: "default",
+ },
+ },
+ experimentType: "exp",
+ },
+ },
+};
+
+const mockV4Data = {
+ experiments: {
+ hypothetical_experiment: {
+ name: "hypothetical_experiment",
+ branch: "hypo_1",
+ actionName: "SinglePreferenceExperimentAction",
+ expired: false,
+ lastSeen: NOW.toJSON(),
+ preferences: {
+ "some.pref": {
+ preferenceValue: 2,
+ preferenceType: "integer",
+ previousPreferenceValue: 1,
+ preferenceBranchType: "user",
+ },
+ },
+ experimentType: "exp",
+ },
+ another_experiment: {
+ name: "another_experiment",
+ branch: "another_4",
+ actionName: "SinglePreferenceExperimentAction",
+ expired: true,
+ lastSeen: NOW.toJSON(),
+ preferences: {
+ "another.pref": {
+ preferenceValue: true,
+ preferenceType: "boolean",
+ previousPreferenceValue: false,
+ preferenceBranchType: "default",
+ },
+ },
+ experimentType: "exp",
+ },
+ },
+};
+
+const mockV5Data = {
+ experiments: {
+ hypothetical_experiment: {
+ slug: "hypothetical_experiment",
+ branch: "hypo_1",
+ actionName: "SinglePreferenceExperimentAction",
+ expired: false,
+ lastSeen: NOW.toJSON(),
+ preferences: {
+ "some.pref": {
+ preferenceValue: 2,
+ preferenceType: "integer",
+ previousPreferenceValue: 1,
+ preferenceBranchType: "user",
+ },
+ },
+ experimentType: "exp",
+ },
+ another_experiment: {
+ slug: "another_experiment",
+ branch: "another_4",
+ actionName: "SinglePreferenceExperimentAction",
+ expired: true,
+ lastSeen: NOW.toJSON(),
+ preferences: {
+ "another.pref": {
+ preferenceValue: true,
+ preferenceType: "boolean",
+ previousPreferenceValue: false,
+ preferenceBranchType: "default",
+ },
+ },
+ experimentType: "exp",
+ },
+ },
+};
+
+const migrationsInfo = [
+ {
+ migration: PreferenceExperiments.migrations.migration01MoveExperiments,
+ dataBefore: mockV1Data,
+ dataAfter: mockV2Data,
+ },
+ {
+ migration: PreferenceExperiments.migrations.migration02MultiPreference,
+ dataBefore: mockV2Data,
+ dataAfter: mockV3Data,
+ },
+ {
+ migration: PreferenceExperiments.migrations.migration03AddActionName,
+ dataBefore: mockV3Data,
+ dataAfter: mockV4Data,
+ },
+ {
+ migration: PreferenceExperiments.migrations.migration04RenameNameToSlug,
+ dataBefore: mockV4Data,
+ dataAfter: mockV5Data,
+ },
+ // Migration 5 is not a simple data migration. This style of tests does not apply to it.
+];
+
+/**
+ * Make a mock `JsonFile` object with a no-op `saveSoon` method and a deep copy
+ * of the data passed.
+ * @param {Object} data the data in the store
+ */
+function makeMockJsonFile(data = {}) {
+ return {
+ // Deep clone the data in case migrations mutate it.
+ data: JSON.parse(JSON.stringify(data)),
+ saveSoon: () => {},
+ };
+}
+
+/** Test that each migration results in the expected data */
+add_task(async function test_migrations() {
+ for (const { migration, dataAfter, dataBefore } of migrationsInfo) {
+ let mockJsonFile = makeMockJsonFile(dataBefore);
+ await migration(mockJsonFile);
+ Assert.deepEqual(
+ mockJsonFile.data,
+ dataAfter,
+ `Migration ${migration.name} should result in the expected data`
+ );
+ }
+});
+
+add_task(async function migrations_are_idempotent() {
+ for (const { migration, dataBefore } of migrationsInfo) {
+ const mockJsonFileOnce = makeMockJsonFile(dataBefore);
+ const mockJsonFileTwice = makeMockJsonFile(dataBefore);
+ await migration(mockJsonFileOnce);
+ await migration(mockJsonFileTwice);
+ await migration(mockJsonFileTwice);
+ Assert.deepEqual(
+ mockJsonFileOnce.data,
+ mockJsonFileTwice.data,
+ "migrating data twice should be idempotent for " + migration.name
+ );
+ }
+});
+
+add_task(async function migration03KeepsActionName() {
+ let mockData = JSON.parse(JSON.stringify(mockV3Data));
+ mockData.experiments.another_experiment.actionName = "SomeOldAction";
+ const mockJsonFile = makeMockJsonFile(mockData);
+ // Output should be the same as mockV4Data, but preserving the action.
+ const migratedData = JSON.parse(JSON.stringify(mockV4Data));
+ migratedData.experiments.another_experiment.actionName = "SomeOldAction";
+
+ await PreferenceExperiments.migrations.migration03AddActionName(mockJsonFile);
+ Assert.deepEqual(mockJsonFile.data, migratedData);
+});
+
+// Test that migration 5 works as expected
+decorate_task(
+ withMockExperiments([
+ NormandyTestUtils.factories.preferenceStudyFactory({
+ actionName: "PreferenceExperimentAction",
+ expired: false,
+ }),
+ NormandyTestUtils.factories.preferenceStudyFactory({
+ actionName: "SinglePreferenceExperimentAction",
+ expired: false,
+ }),
+ ]),
+ async function migration05Works({ prefExperiments: [expKeep, expExpire] }) {
+ // pre check
+ const activeSlugsBefore = (await PreferenceExperiments.getAllActive()).map(
+ e => e.slug
+ );
+ Assert.deepEqual(
+ activeSlugsBefore,
+ [expKeep.slug, expExpire.slug],
+ "Both experiments should be present and active before the migration"
+ );
+
+ // run the migration
+ await PreferenceExperiments.migrations.migration05RemoveOldAction();
+
+ // verify behavior
+ const activeSlugsAfter = (await PreferenceExperiments.getAllActive()).map(
+ e => e.slug
+ );
+ Assert.deepEqual(
+ activeSlugsAfter,
+ [expKeep.slug],
+ "The single pref experiment should be ended by the migration"
+ );
+ const allSlugsAfter = (await PreferenceExperiments.getAll()).map(
+ e => e.slug
+ );
+ Assert.deepEqual(
+ allSlugsAfter,
+ [expKeep.slug, expExpire.slug],
+ "Both experiments should still exist after the migration"
+ );
+ }
+);
+
+// clearAllExperimentStorage
+decorate_task(
+ withMockExperiments([preferenceStudyFactory({ slug: "test" })]),
+ async function ({ prefExperiments }) {
+ ok(await PreferenceExperiments.has("test"), "Mock experiment is detected.");
+ await PreferenceExperiments.clearAllExperimentStorage();
+ ok(
+ !(await PreferenceExperiments.has("test")),
+ "clearAllExperimentStorage removed all stored experiments"
+ );
+ }
+);
+
+// start should throw if an experiment with the given name already exists
+decorate_task(
+ withMockExperiments([preferenceStudyFactory({ slug: "test" })]),
+ withSendEventSpy(),
+ async function ({ sendEventSpy }) {
+ await Assert.rejects(
+ PreferenceExperiments.start({
+ slug: "test",
+ actionName: "SomeAction",
+ branch: "branch",
+ preferences: {
+ "fake.preference": {
+ preferenceValue: "value",
+ preferenceType: "string",
+ preferenceBranchType: "default",
+ },
+ },
+ }),
+ /test.*already exists/,
+ "start threw an error due to a conflicting experiment name"
+ );
+
+ sendEventSpy.assertEvents([
+ ["enrollFailed", "preference_study", "test", { reason: "name-conflict" }],
+ ]);
+ }
+);
+
+// start should throw if an experiment for any of the given
+// preferences are active
+decorate_task(
+ withMockExperiments([
+ preferenceStudyFactory({
+ slug: "test",
+ preferences: { "fake.preferenceinteger": {} },
+ }),
+ ]),
+ withSendEventSpy(),
+ async function ({ sendEventSpy }) {
+ await Assert.rejects(
+ PreferenceExperiments.start({
+ slug: "different",
+ actionName: "SomeAction",
+ branch: "branch",
+ preferences: {
+ "fake.preference": {
+ preferenceValue: "value",
+ preferenceType: "string",
+ preferenceBranchType: "default",
+ },
+ "fake.preferenceinteger": {
+ preferenceValue: 2,
+ preferenceType: "integer",
+ preferenceBranchType: "default",
+ },
+ },
+ }),
+ /another.*is currently active/i,
+ "start threw an error due to an active experiment for the given preference"
+ );
+
+ sendEventSpy.assertEvents([
+ [
+ "enrollFailed",
+ "preference_study",
+ "different",
+ { reason: "pref-conflict" },
+ ],
+ ]);
+ }
+);
+
+// start should throw if an invalid preferenceBranchType is given
+decorate_task(
+ withMockExperiments(),
+ withSendEventSpy(),
+ async function ({ sendEventSpy }) {
+ await Assert.rejects(
+ PreferenceExperiments.start({
+ slug: "test",
+ actionName: "SomeAction",
+ branch: "branch",
+ preferences: {
+ "fake.preference": {
+ preferenceValue: "value",
+ preferenceType: "string",
+ preferenceBranchType: "invalid",
+ },
+ },
+ }),
+ /invalid value for preferenceBranchType: invalid/i,
+ "start threw an error due to an invalid preference branch type"
+ );
+
+ sendEventSpy.assertEvents([
+ [
+ "enrollFailed",
+ "preference_study",
+ "test",
+ { reason: "invalid-branch" },
+ ],
+ ]);
+ }
+);
+
+// start should save experiment data, modify preferences, and register a
+// watcher.
+decorate_task(
+ withMockExperiments(),
+ withMockPreferences(),
+ withStub(PreferenceExperiments, "startObserver"),
+ withSendEventSpy(),
+ async function testStart({
+ prefExperiments,
+ mockPreferences,
+ startObserverStub,
+ sendEventSpy,
+ }) {
+ mockPreferences.set("fake.preference", "oldvalue", "default");
+ mockPreferences.set("fake.preference", "uservalue", "user");
+ mockPreferences.set("fake.preferenceinteger", 1, "default");
+ mockPreferences.set("fake.preferenceinteger", 101, "user");
+
+ const experiment = {
+ slug: "test",
+ actionName: "SomeAction",
+ branch: "branch",
+ preferences: {
+ "fake.preference": {
+ preferenceValue: "newvalue",
+ preferenceBranchType: "default",
+ preferenceType: "string",
+ },
+ "fake.preferenceinteger": {
+ preferenceValue: 2,
+ preferenceBranchType: "default",
+ preferenceType: "integer",
+ },
+ },
+ };
+ await PreferenceExperiments.start(experiment);
+ ok(await PreferenceExperiments.get("test"), "start saved the experiment");
+ ok(
+ startObserverStub.calledWith("test", experiment.preferences),
+ "start registered an observer"
+ );
+
+ const expectedExperiment = {
+ slug: "test",
+ branch: "branch",
+ expired: false,
+ preferences: {
+ "fake.preference": {
+ preferenceValue: "newvalue",
+ preferenceType: "string",
+ previousPreferenceValue: "oldvalue",
+ preferenceBranchType: "default",
+ overridden: true,
+ },
+ "fake.preferenceinteger": {
+ preferenceValue: 2,
+ preferenceType: "integer",
+ previousPreferenceValue: 1,
+ preferenceBranchType: "default",
+ overridden: true,
+ },
+ },
+ };
+ const experimentSubset = {};
+ const actualExperiment = await PreferenceExperiments.get("test");
+ Object.keys(expectedExperiment).forEach(
+ key => (experimentSubset[key] = actualExperiment[key])
+ );
+ Assert.deepEqual(
+ experimentSubset,
+ expectedExperiment,
+ "start saved the experiment"
+ );
+
+ is(
+ DefaultPreferences.get("fake.preference"),
+ "newvalue",
+ "start modified the default preference"
+ );
+ is(
+ Preferences.get("fake.preference"),
+ "uservalue",
+ "start did not modify the user preference"
+ );
+ is(
+ Preferences.get(`${startupPrefs}.fake.preference`),
+ "newvalue",
+ "start saved the experiment value to the startup prefs tree"
+ );
+ is(
+ DefaultPreferences.get("fake.preferenceinteger"),
+ 2,
+ "start modified the default preference"
+ );
+ is(
+ Preferences.get("fake.preferenceinteger"),
+ 101,
+ "start did not modify the user preference"
+ );
+ is(
+ Preferences.get(`${startupPrefs}.fake.preferenceinteger`),
+ 2,
+ "start saved the experiment value to the startup prefs tree"
+ );
+ }
+);
+
+// start should modify the user preference for the user branch type
+decorate_task(
+ withMockExperiments(),
+ withMockPreferences(),
+ withStub(PreferenceExperiments, "startObserver"),
+ async function ({ mockPreferences, startObserverStub }) {
+ mockPreferences.set("fake.preference", "olddefaultvalue", "default");
+ mockPreferences.set("fake.preference", "oldvalue", "user");
+
+ const experiment = {
+ slug: "test",
+ actionName: "SomeAction",
+ branch: "branch",
+ preferences: {
+ "fake.preference": {
+ preferenceValue: "newvalue",
+ preferenceType: "string",
+ preferenceBranchType: "user",
+ },
+ },
+ };
+ await PreferenceExperiments.start(experiment);
+ ok(
+ startObserverStub.calledWith("test", experiment.preferences),
+ "start registered an observer"
+ );
+
+ const expectedExperiment = {
+ slug: "test",
+ branch: "branch",
+ expired: false,
+ preferences: {
+ "fake.preference": {
+ preferenceValue: "newvalue",
+ preferenceType: "string",
+ previousPreferenceValue: "oldvalue",
+ preferenceBranchType: "user",
+ },
+ },
+ };
+
+ const experimentSubset = {};
+ const actualExperiment = await PreferenceExperiments.get("test");
+ Object.keys(expectedExperiment).forEach(
+ key => (experimentSubset[key] = actualExperiment[key])
+ );
+ Assert.deepEqual(
+ experimentSubset,
+ expectedExperiment,
+ "start saved the experiment"
+ );
+
+ Assert.notEqual(
+ DefaultPreferences.get("fake.preference"),
+ "newvalue",
+ "start did not modify the default preference"
+ );
+ is(
+ Preferences.get("fake.preference"),
+ "newvalue",
+ "start modified the user preference"
+ );
+ }
+);
+
+// start should detect if a new preference value type matches the previous value type
+decorate_task(
+ withMockPreferences(),
+ withSendEventSpy(),
+ async function ({ mockPreferences, sendEventSpy }) {
+ mockPreferences.set("fake.type_preference", "oldvalue");
+
+ await Assert.rejects(
+ PreferenceExperiments.start({
+ slug: "test",
+ actionName: "SomeAction",
+ branch: "branch",
+ preferences: {
+ "fake.type_preference": {
+ preferenceBranchType: "user",
+ preferenceValue: 12345,
+ preferenceType: "integer",
+ },
+ },
+ }),
+ /previous preference value is of type/i,
+ "start threw error for incompatible preference type"
+ );
+
+ sendEventSpy.assertEvents([
+ ["enrollFailed", "preference_study", "test", { reason: "invalid-type" }],
+ ]);
+ }
+);
+
+// startObserver should throw if an observer for the experiment is already
+// active.
+decorate_task(withMockExperiments(), async function () {
+ PreferenceExperiments.startObserver("test", {
+ "fake.preference": {
+ preferenceType: "string",
+ preferenceValue: "newvalue",
+ },
+ });
+ Assert.throws(
+ () =>
+ PreferenceExperiments.startObserver("test", {
+ "another.fake": {
+ preferenceType: "string",
+ preferenceValue: "othervalue",
+ },
+ }),
+ /observer.*is already active/i,
+ "startObservers threw due to a conflicting active observer"
+ );
+ PreferenceExperiments.stopAllObservers();
+});
+
+// startObserver should register an observer that sends an event when preference
+// changes from its experimental value.
+decorate_task(
+ withMockExperiments(),
+ withMockPreferences(),
+ withStub(PreferenceExperiments, "recordPrefChange"),
+ async function testObserversCanObserveChanges({
+ mockPreferences,
+ recordPrefChangeStub,
+ }) {
+ const preferences = {
+ "fake.preferencestring": {
+ preferenceType: "string",
+ previousPreferenceValue: "startvalue",
+ preferenceValue: "experimentvalue",
+ },
+ // "newvalue",
+ "fake.preferenceboolean": {
+ preferenceType: "boolean",
+ previousPreferenceValue: false,
+ preferenceValue: true,
+ }, // false
+ "fake.preferenceinteger": {
+ preferenceType: "integer",
+ previousPreferenceValue: 1,
+ preferenceValue: 2,
+ }, // 42
+ };
+ const newValues = {
+ "fake.preferencestring": "newvalue",
+ "fake.preferenceboolean": false,
+ "fake.preferenceinteger": 42,
+ };
+
+ for (const [testPref, newValue] of Object.entries(newValues)) {
+ const experimentSlug = "test-" + testPref;
+ for (const [prefName, prefInfo] of Object.entries(preferences)) {
+ mockPreferences.set(prefName, prefInfo.previousPreferenceValue);
+ }
+
+ // NOTE: startObserver does not modify the pref
+ PreferenceExperiments.startObserver(experimentSlug, preferences);
+
+ // Setting it to the experimental value should not trigger the call.
+ for (const [prefName, prefInfo] of Object.entries(preferences)) {
+ mockPreferences.set(prefName, prefInfo.preferenceValue);
+ ok(
+ !recordPrefChangeStub.called,
+ "Changing to the experimental pref value did not trigger the observer"
+ );
+ }
+
+ // Setting it to something different should trigger the call.
+ mockPreferences.set(testPref, newValue);
+ Assert.deepEqual(
+ recordPrefChangeStub.args,
+ [[{ experimentSlug, preferenceName: testPref, reason: "observer" }]],
+ "Changing to a different value triggered the observer"
+ );
+
+ PreferenceExperiments.stopAllObservers();
+ recordPrefChangeStub.resetHistory();
+ }
+ }
+);
+
+// Changes to prefs that have an experimental pref as a prefix should not trigger the observer.
+decorate_task(
+ withMockExperiments(),
+ withMockPreferences(),
+ withStub(PreferenceExperiments, "recordPrefChange"),
+ async function testObserversCanObserveChanges({
+ mockPreferences,
+ recordPrefChangeStub,
+ }) {
+ const preferences = {
+ "fake.preference": {
+ preferenceType: "string",
+ previousPreferenceValue: "startvalue",
+ preferenceValue: "experimentvalue",
+ },
+ };
+
+ const experimentSlug = "test-prefix";
+ for (const [prefName, prefInfo] of Object.entries(preferences)) {
+ mockPreferences.set(prefName, prefInfo.preferenceValue);
+ }
+ PreferenceExperiments.startObserver(experimentSlug, preferences);
+
+ // Changing a preference that has the experimental pref as a prefix should
+ // not trigger the observer.
+ mockPreferences.set("fake.preference.extra", "value");
+ // Setting it to the experimental value should not trigger the call.
+ ok(
+ !recordPrefChangeStub.called,
+ "Changing to the experimental pref value did not trigger the observer"
+ );
+
+ PreferenceExperiments.stopAllObservers();
+ }
+);
+
+decorate_task(withMockExperiments(), async function testHasObserver() {
+ PreferenceExperiments.startObserver("test", {
+ "fake.preference": {
+ preferenceType: "string",
+ preferenceValue: "experimentValue",
+ },
+ });
+
+ ok(
+ await PreferenceExperiments.hasObserver("test"),
+ "hasObserver should detect active observers"
+ );
+ ok(
+ !(await PreferenceExperiments.hasObserver("missing")),
+ "hasObserver shouldn't detect inactive observers"
+ );
+
+ PreferenceExperiments.stopAllObservers();
+});
+
+// stopObserver should throw if there is no observer active for it to stop.
+decorate_task(withMockExperiments(), async function () {
+ Assert.throws(
+ () => PreferenceExperiments.stopObserver("neveractive"),
+ /no observer.*found/i,
+ "stopObserver threw because there was not matching active observer"
+ );
+});
+
+// stopObserver should cancel an active observers.
+decorate_task(
+ withMockExperiments(),
+ withMockPreferences(),
+ withStub(PreferenceExperiments, "stop", { returnValue: Promise.resolve() }),
+ async function ({ mockPreferences, stopStub }) {
+ const preferenceInfo = {
+ "fake.preferencestring": {
+ preferenceType: "string",
+ preferenceValue: "experimentvalue",
+ },
+ "fake.preferenceinteger": {
+ preferenceType: "integer",
+ preferenceValue: 2,
+ },
+ };
+ mockPreferences.set("fake.preference", "startvalue");
+
+ PreferenceExperiments.startObserver("test", preferenceInfo);
+ PreferenceExperiments.stopObserver("test");
+
+ // Setting the preference now that the observer is stopped should not call
+ // stop.
+ mockPreferences.set("fake.preferencestring", "newvalue");
+ ok(
+ !stopStub.called,
+ "stopObserver successfully removed the observer for string"
+ );
+
+ mockPreferences.set("fake.preferenceinteger", 42);
+ ok(
+ !stopStub.called,
+ "stopObserver successfully removed the observer for integer"
+ );
+
+ // Now that the observer is stopped, start should be able to start a new one
+ // without throwing.
+ try {
+ PreferenceExperiments.startObserver("test", preferenceInfo);
+ } catch (err) {
+ ok(
+ false,
+ "startObserver did not throw an error for an observer that was already stopped"
+ );
+ }
+
+ PreferenceExperiments.stopAllObservers();
+ }
+);
+
+// stopAllObservers
+decorate_task(
+ withMockExperiments(),
+ withMockPreferences(),
+ withStub(PreferenceExperiments, "stop", { returnValue: Promise.resolve() }),
+ async function ({ mockPreferences, stopStub }) {
+ mockPreferences.set("fake.preference", "startvalue");
+ mockPreferences.set("other.fake.preference", "startvalue");
+
+ PreferenceExperiments.startObserver("test", {
+ "fake.preference": {
+ preferenceType: "string",
+ preferenceValue: "experimentvalue",
+ },
+ });
+ PreferenceExperiments.startObserver("test2", {
+ "other.fake.preference": {
+ preferenceType: "string",
+ preferenceValue: "experimentvalue",
+ },
+ });
+ PreferenceExperiments.stopAllObservers();
+
+ // Setting the preference now that the observers are stopped should not call
+ // stop.
+ mockPreferences.set("fake.preference", "newvalue");
+ mockPreferences.set("other.fake.preference", "newvalue");
+ ok(!stopStub.called, "stopAllObservers successfully removed all observers");
+
+ // Now that the observers are stopped, start should be able to start new
+ // observers without throwing.
+ try {
+ PreferenceExperiments.startObserver("test", {
+ "fake.preference": {
+ preferenceType: "string",
+ preferenceValue: "experimentvalue",
+ },
+ });
+ PreferenceExperiments.startObserver("test2", {
+ "other.fake.preference": {
+ preferenceType: "string",
+ preferenceValue: "experimentvalue",
+ },
+ });
+ } catch (err) {
+ ok(
+ false,
+ "startObserver did not throw an error for an observer that was already stopped"
+ );
+ }
+
+ PreferenceExperiments.stopAllObservers();
+ }
+);
+
+// markLastSeen should throw if it can't find a matching experiment
+decorate_task(withMockExperiments(), async function () {
+ await Assert.rejects(
+ PreferenceExperiments.markLastSeen("neveractive"),
+ /could not find/i,
+ "markLastSeen threw because there was not a matching experiment"
+ );
+});
+
+// markLastSeen should update the lastSeen date
+const oldDate = new Date(1988, 10, 1).toJSON();
+decorate_task(
+ withMockExperiments([
+ preferenceStudyFactory({ slug: "test", lastSeen: oldDate }),
+ ]),
+ async function ({ prefExperiments: [experiment] }) {
+ await PreferenceExperiments.markLastSeen("test");
+ Assert.notEqual(
+ experiment.lastSeen,
+ oldDate,
+ "markLastSeen updated the experiment lastSeen date"
+ );
+ }
+);
+
+// stop should throw if an experiment with the given name doesn't exist
+decorate_task(
+ withMockExperiments(),
+ withSendEventSpy(),
+ async function ({ sendEventSpy }) {
+ await Assert.rejects(
+ PreferenceExperiments.stop("test"),
+ /could not find/i,
+ "stop threw an error because there are no experiments with the given name"
+ );
+
+ sendEventSpy.assertEvents([
+ [
+ "unenrollFailed",
+ "preference_study",
+ "test",
+ { reason: "does-not-exist" },
+ ],
+ ]);
+ }
+);
+
+// stop should throw if the experiment is already expired
+decorate_task(
+ withMockExperiments([
+ preferenceStudyFactory({ slug: "test", expired: true }),
+ ]),
+ withSendEventSpy(),
+ async function ({ sendEventSpy }) {
+ await Assert.rejects(
+ PreferenceExperiments.stop("test"),
+ /already expired/,
+ "stop threw an error because the experiment was already expired"
+ );
+
+ sendEventSpy.assertEvents([
+ [
+ "unenrollFailed",
+ "preference_study",
+ "test",
+ { reason: "already-unenrolled" },
+ ],
+ ]);
+ }
+);
+
+// stop should mark the experiment as expired, stop its observer, and revert the
+// preference value.
+decorate_task(
+ withMockExperiments([
+ preferenceStudyFactory({
+ slug: "test",
+ expired: false,
+ branch: "fakebranch",
+ preferences: {
+ "fake.preference": {
+ preferenceValue: "experimentvalue",
+ preferenceType: "string",
+ previousPreferenceValue: "oldvalue",
+ preferenceBranchType: "default",
+ },
+ },
+ }),
+ ]),
+ withMockPreferences(),
+ withSpy(PreferenceExperiments, "stopObserver"),
+ withSendEventSpy(),
+ async function testStop({ mockPreferences, stopObserverSpy, sendEventSpy }) {
+ // this assertion is mostly useful for --verify test runs, to make
+ // sure that tests clean up correctly.
+ ok(!Preferences.get("fake.preference"), "preference should start unset");
+
+ mockPreferences.set(
+ `${startupPrefs}.fake.preference`,
+ "experimentvalue",
+ "user"
+ );
+ mockPreferences.set("fake.preference", "experimentvalue", "default");
+ PreferenceExperiments.startObserver("test", {
+ "fake.preference": {
+ preferenceType: "string",
+ preferenceValue: "experimentvalue",
+ },
+ });
+
+ await PreferenceExperiments.stop("test", { reason: "test-reason" });
+ ok(stopObserverSpy.calledWith("test"), "stop removed an observer");
+ const experiment = await PreferenceExperiments.get("test");
+ is(experiment.expired, true, "stop marked the experiment as expired");
+ is(
+ DefaultPreferences.get("fake.preference"),
+ "oldvalue",
+ "stop reverted the preference to its previous value"
+ );
+ ok(
+ !Services.prefs.prefHasUserValue(`${startupPrefs}.fake.preference`),
+ "stop cleared the startup preference for fake.preference."
+ );
+
+ sendEventSpy.assertEvents([
+ [
+ "unenroll",
+ "preference_study",
+ "test",
+ {
+ didResetValue: "true",
+ reason: "test-reason",
+ branch: "fakebranch",
+ },
+ ],
+ ]);
+
+ PreferenceExperiments.stopAllObservers();
+ }
+);
+
+// stop should also support user pref experiments
+decorate_task(
+ withMockExperiments([
+ preferenceStudyFactory({
+ slug: "test",
+ expired: false,
+ preferences: {
+ "fake.preference": {
+ preferenceValue: "experimentvalue",
+ preferenceType: "string",
+ previousPreferenceValue: "oldvalue",
+ preferenceBranchType: "user",
+ },
+ },
+ }),
+ ]),
+ withMockPreferences(),
+ withStub(PreferenceExperiments, "stopObserver"),
+ withStub(PreferenceExperiments, "hasObserver"),
+ async function testStopUserPrefs({
+ mockPreferences,
+ stopObserverStub,
+ hasObserverStub,
+ }) {
+ hasObserverStub.returns(true);
+
+ mockPreferences.set("fake.preference", "experimentvalue", "user");
+ PreferenceExperiments.startObserver("test", {
+ "fake.preference": {
+ preferenceType: "string",
+ preferenceValue: "experimentvalue",
+ },
+ });
+
+ await PreferenceExperiments.stop("test");
+ ok(stopObserverStub.calledWith("test"), "stop removed an observer");
+ const experiment = await PreferenceExperiments.get("test");
+ is(experiment.expired, true, "stop marked the experiment as expired");
+ is(
+ Preferences.get("fake.preference"),
+ "oldvalue",
+ "stop reverted the preference to its previous value"
+ );
+ stopObserverStub.restore();
+ PreferenceExperiments.stopAllObservers();
+ }
+);
+
+// stop should remove a preference that had no value prior to an experiment for user prefs
+decorate_task(
+ withMockExperiments([
+ preferenceStudyFactory({
+ slug: "test",
+ expired: false,
+ preferences: {
+ "fake.preference": {
+ preferenceValue: "experimentvalue",
+ preferenceType: "string",
+ previousPreferenceValue: null,
+ preferenceBranchType: "user",
+ },
+ },
+ }),
+ ]),
+ withMockPreferences(),
+ withStub(PreferenceExperiments, "stopObserver"),
+ async function ({ mockPreferences }) {
+ mockPreferences.set("fake.preference", "experimentvalue", "user");
+
+ await PreferenceExperiments.stop("test");
+ ok(
+ !Preferences.isSet("fake.preference"),
+ "stop removed the preference that had no value prior to the experiment"
+ );
+ }
+);
+
+// stop should not modify a preference if resetValue is false
+decorate_task(
+ withMockExperiments([
+ preferenceStudyFactory({
+ slug: "test",
+ expired: false,
+ branch: "fakebranch",
+ preferences: {
+ "fake.preference": {
+ preferenceValue: "experimentvalue",
+ preferenceType: "string",
+ previousPreferenceValue: "oldvalue",
+ preferenceBranchType: "default",
+ },
+ },
+ }),
+ ]),
+ withMockPreferences(),
+ withStub(PreferenceExperiments, "stopObserver"),
+ withSendEventSpy(),
+ async function testStopReset({ mockPreferences, sendEventSpy }) {
+ mockPreferences.set("fake.preference", "customvalue", "default");
+
+ await PreferenceExperiments.stop("test", {
+ reason: "test-reason",
+ resetValue: false,
+ });
+ is(
+ DefaultPreferences.get("fake.preference"),
+ "customvalue",
+ "stop did not modify the preference"
+ );
+ sendEventSpy.assertEvents([
+ [
+ "unenroll",
+ "preference_study",
+ "test",
+ {
+ didResetValue: "false",
+ reason: "test-reason",
+ branch: "fakebranch",
+ },
+ ],
+ ]);
+ }
+);
+
+// stop should include the system that stopped it
+decorate_task(
+ withMockExperiments([preferenceStudyFactory({ expired: true })]),
+ withSendEventSpy,
+ async function testStopUserPrefs([experiment], sendEventSpy) {
+ await Assert.rejects(
+ PreferenceExperiments.stop(experiment.slug, {
+ caller: "testCaller",
+ reason: "original-reason",
+ }),
+ /.*already expired.*/,
+ "Stopped an expired experiment should throw an exception"
+ );
+
+ const expectedExtra = {
+ reason: "already-unenrolled",
+ enrollmentId: experiment.enrollmentId,
+ originalReason: "original-reason",
+ };
+ if (AppConstants.NIGHTLY_BUILD) {
+ expectedExtra.caller = "testCaller";
+ }
+
+ sendEventSpy.assertEvents([
+ ["unenrollFailed", "preference_study", experiment.slug, expectedExtra],
+ ]);
+ }
+);
+
+// get should throw if no experiment exists with the given name
+decorate_task(withMockExperiments(), async function () {
+ await Assert.rejects(
+ PreferenceExperiments.get("neverexisted"),
+ /could not find/i,
+ "get rejects if no experiment with the given name is found"
+ );
+});
+
+// get
+decorate_task(
+ withMockExperiments([preferenceStudyFactory({ slug: "test" })]),
+ async function ({ prefExperiments }) {
+ const experiment = await PreferenceExperiments.get("test");
+ is(experiment.slug, "test", "get fetches the correct experiment");
+
+ // Modifying the fetched experiment must not edit the data source.
+ experiment.slug = "othername";
+ const refetched = await PreferenceExperiments.get("test");
+ is(refetched.slug, "test", "get returns a copy of the experiment");
+ }
+);
+
+// get all
+decorate_task(
+ withMockExperiments([
+ preferenceStudyFactory({ slug: "experiment1", disabled: false }),
+ preferenceStudyFactory({ slug: "experiment2", disabled: true }),
+ ]),
+ async function testGetAll({ prefExperiments: [experiment1, experiment2] }) {
+ const fetchedExperiments = await PreferenceExperiments.getAll();
+ is(
+ fetchedExperiments.length,
+ 2,
+ "getAll returns a list of all stored experiments"
+ );
+ Assert.deepEqual(
+ fetchedExperiments.find(e => e.slug === "experiment1"),
+ experiment1,
+ "getAll returns a list with the correct experiments"
+ );
+ const fetchedExperiment2 = fetchedExperiments.find(
+ e => e.slug === "experiment2"
+ );
+ Assert.deepEqual(
+ fetchedExperiment2,
+ experiment2,
+ "getAll returns a list with the correct experiments, including disabled ones"
+ );
+
+ fetchedExperiment2.slug = "otherslug";
+ is(
+ experiment2.slug,
+ "experiment2",
+ "getAll returns copies of the experiments"
+ );
+ }
+);
+
+// get all active
+decorate_task(
+ withMockExperiments([
+ preferenceStudyFactory({
+ slug: "active",
+ expired: false,
+ }),
+ preferenceStudyFactory({
+ slug: "inactive",
+ expired: true,
+ }),
+ ]),
+ withMockPreferences(),
+ async function testGetAllActive({
+ prefExperiments: [activeExperiment, inactiveExperiment],
+ }) {
+ let allActiveExperiments = await PreferenceExperiments.getAllActive();
+ Assert.deepEqual(
+ allActiveExperiments,
+ [activeExperiment],
+ "getAllActive only returns active experiments"
+ );
+
+ allActiveExperiments[0].slug = "newfakename";
+ allActiveExperiments = await PreferenceExperiments.getAllActive();
+ Assert.notEqual(
+ allActiveExperiments,
+ "newfakename",
+ "getAllActive returns copies of stored experiments"
+ );
+ }
+);
+
+// has
+decorate_task(
+ withMockExperiments([preferenceStudyFactory({ slug: "test" })]),
+ async function () {
+ ok(
+ await PreferenceExperiments.has("test"),
+ "has returned true for a stored experiment"
+ );
+ ok(
+ !(await PreferenceExperiments.has("missing")),
+ "has returned false for a missing experiment"
+ );
+ }
+);
+
+// init should register telemetry experiments
+decorate_task(
+ withMockExperiments([
+ preferenceStudyFactory({
+ slug: "test",
+ branch: "branch",
+ preferences: {
+ "fake.pref": {
+ preferenceValue: "experiment value",
+ preferenceBranchType: "default",
+ preferenceType: "string",
+ },
+ },
+ }),
+ ]),
+ withMockPreferences(),
+ withStub(TelemetryEnvironment, "setExperimentActive"),
+ withStub(PreferenceExperiments, "startObserver"),
+ async function testInit({
+ prefExperiments,
+ mockPreferences,
+ setExperimentActiveStub,
+ }) {
+ mockPreferences.set("fake.pref", "experiment value");
+ await PreferenceExperiments.init();
+ ok(
+ setExperimentActiveStub.calledWith("test", "branch", {
+ type: "normandy-exp",
+ enrollmentId: prefExperiments[0].enrollmentId,
+ }),
+ "Experiment is registered by init"
+ );
+ }
+);
+
+// init should use the provided experiment type
+decorate_task(
+ withMockExperiments([
+ preferenceStudyFactory({
+ slug: "test",
+ branch: "branch",
+ preferences: {
+ "fake.pref": {
+ preferenceValue: "experiment value",
+ preferenceType: "string",
+ },
+ },
+ experimentType: "pref-test",
+ }),
+ ]),
+ withMockPreferences(),
+ withStub(TelemetryEnvironment, "setExperimentActive"),
+ withStub(PreferenceExperiments, "startObserver"),
+ async function testInit({ mockPreferences, setExperimentActiveStub }) {
+ mockPreferences.set("fake.pref", "experiment value");
+ await PreferenceExperiments.init();
+ ok(
+ setExperimentActiveStub.calledWith("test", "branch", {
+ type: "normandy-pref-test",
+ enrollmentId: sinon.match(NormandyTestUtils.isUuid),
+ }),
+ "init should use the provided experiment type"
+ );
+ }
+);
+
+// starting and stopping experiments should register in telemetry
+decorate_task(
+ withMockExperiments(),
+ withStub(TelemetryEnvironment, "setExperimentActive"),
+ withStub(TelemetryEnvironment, "setExperimentInactive"),
+ withSendEventSpy(),
+ async function testStartAndStopTelemetry({
+ setExperimentActiveStub,
+ setExperimentInactiveStub,
+ sendEventSpy,
+ }) {
+ let { enrollmentId } = await PreferenceExperiments.start({
+ slug: "test",
+ actionName: "SomeAction",
+ branch: "branch",
+ preferences: {
+ "fake.preference": {
+ preferenceValue: "value",
+ preferenceType: "string",
+ preferenceBranchType: "default",
+ },
+ },
+ });
+
+ ok(
+ NormandyTestUtils.isUuid(enrollmentId),
+ "Experiment should have a UUID enrollmentId"
+ );
+
+ Assert.deepEqual(
+ setExperimentActiveStub.getCall(0).args,
+ ["test", "branch", { type: "normandy-exp", enrollmentId }],
+ "Experiment is registered by start()"
+ );
+ await PreferenceExperiments.stop("test", { reason: "test-reason" });
+ Assert.deepEqual(
+ setExperimentInactiveStub.args,
+ [["test"]],
+ "Experiment is unregistered by stop()"
+ );
+
+ sendEventSpy.assertEvents([
+ [
+ "enroll",
+ "preference_study",
+ "test",
+ {
+ experimentType: "exp",
+ branch: "branch",
+ enrollmentId,
+ },
+ ],
+ [
+ "unenroll",
+ "preference_study",
+ "test",
+ {
+ reason: "test-reason",
+ didResetValue: "true",
+ branch: "branch",
+ enrollmentId,
+ },
+ ],
+ ]);
+ }
+);
+
+// starting experiments should use the provided experiment type
+decorate_task(
+ withMockExperiments(),
+ withStub(TelemetryEnvironment, "setExperimentActive"),
+ withStub(TelemetryEnvironment, "setExperimentInactive"),
+ withSendEventSpy(),
+ async function testInitTelemetryExperimentType({
+ setExperimentActiveStub,
+ sendEventSpy,
+ }) {
+ const { enrollmentId } = await PreferenceExperiments.start({
+ slug: "test",
+ actionName: "SomeAction",
+ branch: "branch",
+ preferences: {
+ "fake.preference": {
+ preferenceValue: "value",
+ preferenceType: "string",
+ preferenceBranchType: "default",
+ },
+ },
+ experimentType: "pref-test",
+ });
+
+ Assert.deepEqual(
+ setExperimentActiveStub.getCall(0).args,
+ ["test", "branch", { type: "normandy-pref-test", enrollmentId }],
+ "start() should register the experiment with the provided type"
+ );
+
+ sendEventSpy.assertEvents([
+ [
+ "enroll",
+ "preference_study",
+ "test",
+ {
+ experimentType: "pref-test",
+ branch: "branch",
+ enrollmentId,
+ },
+ ],
+ ]);
+
+ // start sets the passed preference in a way that is hard to mock.
+ // Reset the preference so it doesn't interfere with other tests.
+ Services.prefs.getDefaultBranch("fake.preference").deleteBranch("");
+ }
+);
+
+// When a default-branch experiment starts, and some preferences already have
+// user set values, they should immediately send telemetry events.
+decorate_task(
+ withMockExperiments(),
+ withStub(TelemetryEnvironment, "setExperimentActive"),
+ withStub(TelemetryEnvironment, "setExperimentInactive"),
+ withSendEventSpy(),
+ withMockPreferences(),
+ async function testOverriddenAtEnroll({ sendEventSpy, mockPreferences }) {
+ // consts for preference names to avoid typos
+ const prefNames = {
+ defaultNoOverride: "fake.preference.default-no-override",
+ defaultWithOverride: "fake.preference.default-with-override",
+ userNoOverride: "fake.preference.user-no-override",
+ userWithOverride: "fake.preference.user-with-override",
+ };
+
+ // Set up preferences for the test. Two preferences with only default
+ // values, and two preferences with both default and user values.
+ mockPreferences.set(
+ prefNames.defaultNoOverride,
+ "default value",
+ "default"
+ );
+ mockPreferences.set(
+ prefNames.defaultWithOverride,
+ "default value",
+ "default"
+ );
+ mockPreferences.set(prefNames.defaultWithOverride, "user value", "user");
+ mockPreferences.set(prefNames.userNoOverride, "default value", "default");
+ mockPreferences.set(prefNames.userWithOverride, "default value", "default");
+ mockPreferences.set(prefNames.userWithOverride, "user value", "user");
+
+ // Start the experiment with two each of default-branch and user-branch
+ // methods, one each of which will already be overridden.
+ const { enrollmentId, slug } = await PreferenceExperiments.start({
+ slug: "test-experiment",
+ actionName: "someAction",
+ branch: "experimental-branch",
+ preferences: {
+ [prefNames.defaultNoOverride]: {
+ preferenceValue: "experimental value",
+ preferenceType: "string",
+ preferenceBranchType: "default",
+ },
+ [prefNames.defaultWithOverride]: {
+ preferenceValue: "experimental value",
+ preferenceType: "string",
+ preferenceBranchType: "default",
+ },
+ [prefNames.userNoOverride]: {
+ preferenceValue: "experimental value",
+ preferenceType: "string",
+ preferenceBranchType: "user",
+ },
+ [prefNames.userWithOverride]: {
+ preferenceValue: "experimental value",
+ preferenceType: "string",
+ preferenceBranchType: "user",
+ },
+ },
+ experimentType: "pref-test",
+ });
+
+ sendEventSpy.assertEvents([
+ [
+ "enroll",
+ "preference_study",
+ slug,
+ {
+ experimentType: "pref-test",
+ branch: "experimental-branch",
+ enrollmentId,
+ },
+ ],
+ [
+ "expPrefChanged",
+ "preference_study",
+ slug,
+ {
+ preferenceName: prefNames.defaultWithOverride,
+ reason: "onEnroll",
+ enrollmentId,
+ },
+ ],
+ ]);
+ }
+);
+
+// Experiments shouldn't be recorded by init() in telemetry if they are expired
+decorate_task(
+ withMockExperiments([
+ preferenceStudyFactory({
+ slug: "expired",
+ branch: "branch",
+ expired: true,
+ }),
+ ]),
+ withStub(TelemetryEnvironment, "setExperimentActive"),
+ async function testInitTelemetryExpired({ setExperimentActiveStub }) {
+ await PreferenceExperiments.init();
+ ok(
+ !setExperimentActiveStub.called,
+ "Expired experiment is not registered by init"
+ );
+ }
+);
+
+// Experiments should record if the preference has been changed when init() is
+// called and no previous override had been observed.
+decorate_task(
+ withMockExperiments([
+ preferenceStudyFactory({
+ slug: "test",
+ preferences: {
+ "fake.preference.1": {
+ preferenceValue: "experiment value 1",
+ preferenceType: "string",
+ overridden: false,
+ },
+ "fake.preference.2": {
+ preferenceValue: "experiment value 2",
+ preferenceType: "string",
+ overridden: true,
+ },
+ },
+ }),
+ ]),
+ withMockPreferences(),
+ withStub(PreferenceExperiments, "recordPrefChange"),
+ async function testInitChanges({
+ mockPreferences,
+ recordPrefChangeStub,
+ prefExperiments: [experiment],
+ }) {
+ mockPreferences.set("fake.preference.1", "experiment value 1", "default");
+ mockPreferences.set("fake.preference.1", "changed value 1", "user");
+ mockPreferences.set("fake.preference.2", "experiment value 2", "default");
+ mockPreferences.set("fake.preference.2", "changed value 2", "user");
+ await PreferenceExperiments.init();
+
+ is(
+ Preferences.get("fake.preference.1"),
+ "changed value 1",
+ "Preference value was not changed"
+ );
+ is(
+ Preferences.get("fake.preference.2"),
+ "changed value 2",
+ "Preference value was not changed"
+ );
+
+ Assert.deepEqual(
+ recordPrefChangeStub.args,
+ [
+ [
+ {
+ experiment,
+ preferenceName: "fake.preference.1",
+ reason: "sideload",
+ },
+ ],
+ ],
+ "Only one experiment preference change should be recorded"
+ );
+ }
+);
+
+// init should register an observer for experiments
+decorate_task(
+ withMockExperiments([
+ preferenceStudyFactory({
+ slug: "test",
+ preferences: {
+ "fake.preference": {
+ preferenceValue: "experiment value",
+ preferenceType: "string",
+ previousPreferenceValue: "oldfakevalue",
+ },
+ },
+ }),
+ ]),
+ withMockPreferences(),
+ withStub(PreferenceExperiments, "startObserver"),
+ withStub(PreferenceExperiments, "stop"),
+ withStub(CleanupManager, "addCleanupHandler"),
+ async function testInitRegistersObserver({
+ mockPreferences,
+ startObserverStub,
+ stopStub,
+ }) {
+ stopStub.throws("Stop should not be called");
+ mockPreferences.set("fake.preference", "experiment value", "default");
+ is(
+ Preferences.get("fake.preference"),
+ "experiment value",
+ "pref shouldn't have a user value"
+ );
+ await PreferenceExperiments.init();
+
+ ok(startObserverStub.calledOnce, "init should register an observer");
+ Assert.deepEqual(
+ startObserverStub.getCall(0).args,
+ [
+ "test",
+ {
+ "fake.preference": {
+ preferenceType: "string",
+ preferenceValue: "experiment value",
+ previousPreferenceValue: "oldfakevalue",
+ preferenceBranchType: "default",
+ overridden: false,
+ },
+ },
+ ],
+ "init should register an observer with the right args"
+ );
+ }
+);
+
+// saveStartupPrefs
+decorate_task(
+ withMockExperiments([
+ preferenceStudyFactory({
+ slug: "char",
+ preferences: {
+ "fake.char": {
+ preferenceValue: "string",
+ preferenceType: "string",
+ },
+ },
+ }),
+ preferenceStudyFactory({
+ slug: "int",
+ preferences: {
+ "fake.int": {
+ preferenceValue: 2,
+ preferenceType: "int",
+ },
+ },
+ }),
+ preferenceStudyFactory({
+ slug: "bool",
+ preferences: {
+ "fake.bool": {
+ preferenceValue: true,
+ preferenceType: "boolean",
+ },
+ },
+ }),
+ ]),
+ async function testSaveStartupPrefs() {
+ Services.prefs.deleteBranch(startupPrefs);
+ Services.prefs.setBoolPref(`${startupPrefs}.fake.old`, true);
+ await PreferenceExperiments.saveStartupPrefs();
+
+ ok(
+ Services.prefs.getBoolPref(`${startupPrefs}.fake.bool`),
+ "The startup value for fake.bool was saved."
+ );
+ is(
+ Services.prefs.getCharPref(`${startupPrefs}.fake.char`),
+ "string",
+ "The startup value for fake.char was saved."
+ );
+ is(
+ Services.prefs.getIntPref(`${startupPrefs}.fake.int`),
+ 2,
+ "The startup value for fake.int was saved."
+ );
+ ok(
+ !Services.prefs.prefHasUserValue(`${startupPrefs}.fake.old`),
+ "saveStartupPrefs deleted old startup pref values."
+ );
+ }
+);
+
+// saveStartupPrefs errors for invalid pref type
+decorate_task(
+ withMockExperiments([
+ preferenceStudyFactory({
+ slug: "test",
+ preferences: {
+ "fake.invalidValue": {
+ preferenceValue: new Date(),
+ },
+ },
+ }),
+ ]),
+ async function testSaveStartupPrefsError() {
+ await Assert.rejects(
+ PreferenceExperiments.saveStartupPrefs(),
+ /invalid preference type/i,
+ "saveStartupPrefs throws if an experiment has an invalid preference value type"
+ );
+ }
+);
+
+// saveStartupPrefs should not store values for user-branch recipes
+decorate_task(
+ withMockExperiments([
+ preferenceStudyFactory({
+ slug: "defaultBranchRecipe",
+ preferences: {
+ "fake.default": {
+ preferenceValue: "experiment value",
+ preferenceType: "string",
+ preferenceBranchType: "default",
+ },
+ },
+ }),
+ preferenceStudyFactory({
+ slug: "userBranchRecipe",
+ preferences: {
+ "fake.user": {
+ preferenceValue: "experiment value",
+ preferenceType: "string",
+ preferenceBranchType: "user",
+ },
+ },
+ }),
+ ]),
+ async function testSaveStartupPrefsUserBranch() {
+ Assert.deepEqual(
+ Services.prefs.getChildList(startupPrefs),
+ [],
+ "As a prerequisite no startup prefs are set"
+ );
+
+ await PreferenceExperiments.saveStartupPrefs();
+
+ Assert.deepEqual(
+ Services.prefs.getChildList(startupPrefs),
+ [`${startupPrefs}.fake.default`],
+ "only the expected prefs are set"
+ );
+ is(
+ Services.prefs.getCharPref(
+ `${startupPrefs}.fake.default`,
+ "fallback value"
+ ),
+ "experiment value",
+ "The startup value for fake.default was set"
+ );
+ is(
+ Services.prefs.getPrefType(`${startupPrefs}.fake.user`),
+ Services.prefs.PREF_INVALID,
+ "The startup value for fake.user was not set"
+ );
+
+ Services.prefs.deleteBranch(startupPrefs);
+ }
+);
+
+// test that default branch prefs restore to the right value if the default pref changes
+decorate_task(
+ withMockExperiments(),
+ withMockPreferences(),
+ withStub(PreferenceExperiments, "startObserver"),
+ withStub(PreferenceExperiments, "stopObserver"),
+ async function testDefaultBranchStop({ mockPreferences }) {
+ const prefName = "fake.preference";
+ mockPreferences.set(prefName, "old version's value", "default");
+
+ // start an experiment
+ await PreferenceExperiments.start({
+ slug: "test",
+ actionName: "SomeAction",
+ branch: "branch",
+ preferences: {
+ [prefName]: {
+ preferenceValue: "experiment value",
+ preferenceBranchType: "default",
+ preferenceType: "string",
+ },
+ },
+ });
+
+ is(
+ Services.prefs.getCharPref(prefName),
+ "experiment value",
+ "Starting an experiment should change the pref"
+ );
+
+ // Now pretend that firefox has updated and restarted to a version
+ // where the built-default value of fake.preference is something
+ // else. Bootstrap has run and changed the pref to the
+ // experimental value, and produced the call to
+ // recordOriginalValues below.
+ PreferenceExperiments.recordOriginalValues({
+ [prefName]: "new version's value",
+ });
+ is(
+ Services.prefs.getCharPref(prefName),
+ "experiment value",
+ "Recording original values shouldn't affect the preference."
+ );
+
+ // Now stop the experiment. It should revert to the new version's default, not the old.
+ await PreferenceExperiments.stop("test");
+ is(
+ Services.prefs.getCharPref(prefName),
+ "new version's value",
+ "Preference should revert to new default"
+ );
+ }
+);
+
+// test that default branch prefs restore to the right value if the preference is removed
+decorate_task(
+ withMockExperiments(),
+ withMockPreferences(),
+ withStub(PreferenceExperiments, "startObserver"),
+ withStub(PreferenceExperiments, "stopObserver"),
+ async function testDefaultBranchStop({ mockPreferences }) {
+ const prefName = "fake.preference";
+ mockPreferences.set(prefName, "old version's value", "default");
+
+ // start an experiment
+ await PreferenceExperiments.start({
+ slug: "test",
+ actionName: "SomeAction",
+ branch: "branch",
+ preferences: {
+ [prefName]: {
+ preferenceValue: "experiment value",
+ preferenceBranchType: "default",
+ preferenceType: "string",
+ },
+ },
+ });
+
+ is(
+ Services.prefs.getCharPref(prefName),
+ "experiment value",
+ "Starting an experiment should change the pref"
+ );
+
+ // Now pretend that firefox has updated and restarted to a version
+ // where fake.preference has been removed in the default pref set.
+ // Bootstrap has run and changed the pref to the experimental
+ // value, and produced the call to recordOriginalValues below.
+ PreferenceExperiments.recordOriginalValues({ [prefName]: null });
+ is(
+ Services.prefs.getCharPref(prefName),
+ "experiment value",
+ "Recording original values shouldn't affect the preference."
+ );
+
+ // Now stop the experiment. It should remove the preference
+ await PreferenceExperiments.stop("test");
+ is(
+ Services.prefs.getCharPref(prefName, "DEFAULT"),
+ "DEFAULT",
+ "Preference should be absent"
+ );
+ }
+).skip(/* bug 1502410 and bug 1505941 */);
+
+// stop should pass "unknown" to telemetry event for `reason` if none is specified
+decorate_task(
+ withMockExperiments([
+ preferenceStudyFactory({
+ slug: "test",
+ preferences: {
+ "fake.preference": {
+ preferenceValue: "experiment value",
+ preferenceType: "string",
+ },
+ },
+ }),
+ ]),
+ withMockPreferences(),
+ withStub(PreferenceExperiments, "stopObserver"),
+ withSendEventSpy(),
+ async function testStopUnknownReason({ mockPreferences, sendEventSpy }) {
+ mockPreferences.set("fake.preference", "default value", "default");
+ await PreferenceExperiments.stop("test");
+ is(
+ sendEventSpy.getCall(0).args[3].reason,
+ "unknown",
+ "PreferenceExperiments.stop() should use unknown as the default reason"
+ );
+ }
+);
+
+// stop should pass along the value for resetValue to Telemetry Events as didResetValue
+decorate_task(
+ withMockExperiments([
+ preferenceStudyFactory({
+ slug: "test1",
+ preferences: {
+ "fake.preference1": {
+ preferenceValue: "experiment value",
+ preferenceType: "string",
+ previousValue: "previous",
+ },
+ },
+ }),
+ preferenceStudyFactory({
+ slug: "test2",
+ preferences: {
+ "fake.preference2": {
+ preferenceValue: "experiment value",
+ preferenceType: "string",
+ previousValue: "previous",
+ },
+ },
+ }),
+ ]),
+ withMockPreferences(),
+ withStub(PreferenceExperiments, "stopObserver"),
+ withSendEventSpy(),
+ async function testStopResetValue({ mockPreferences, sendEventSpy }) {
+ mockPreferences.set("fake.preference1", "default value", "default");
+ await PreferenceExperiments.stop("test1", { resetValue: true });
+ is(sendEventSpy.callCount, 1);
+ is(
+ sendEventSpy.getCall(0).args[3].didResetValue,
+ "true",
+ "PreferenceExperiments.stop() should pass true values of resetValue as didResetValue"
+ );
+
+ mockPreferences.set("fake.preference2", "default value", "default");
+ await PreferenceExperiments.stop("test2", { resetValue: false });
+ is(sendEventSpy.callCount, 2);
+ is(
+ sendEventSpy.getCall(1).args[3].didResetValue,
+ "false",
+ "PreferenceExperiments.stop() should pass false values of resetValue as didResetValue"
+ );
+ }
+);
+
+// `recordPrefChange` should send the right telemetry and mark the pref as
+// overridden when passed an experiment
+decorate_task(
+ withMockExperiments([
+ preferenceStudyFactory({
+ preferences: {
+ "test.pref": {},
+ },
+ }),
+ ]),
+ withSendEventSpy(),
+ async function testRecordPrefChangeWorks({
+ sendEventSpy,
+ prefExperiments: [experiment],
+ }) {
+ is(
+ experiment.preferences["test.pref"].overridden,
+ false,
+ "Precondition: the pref should not be overridden yet"
+ );
+
+ await PreferenceExperiments.recordPrefChange({
+ experiment,
+ preferenceName: "test.pref",
+ reason: "test-run",
+ });
+
+ experiment = await PreferenceExperiments.get(experiment.slug);
+ is(
+ experiment.preferences["test.pref"].overridden,
+ true,
+ "The pref should be marked as overridden"
+ );
+ sendEventSpy.assertEvents([
+ [
+ "expPrefChanged",
+ "preference_study",
+ experiment.slug,
+ {
+ preferenceName: "test.pref",
+ reason: "test-run",
+ enrollmentId: experiment.enrollmentId,
+ },
+ ],
+ ]);
+ }
+);
+
+// `recordPrefChange` should send the right telemetry and mark the pref as
+// overridden when passed a slug
+decorate_task(
+ withMockExperiments([
+ preferenceStudyFactory({
+ preferences: {
+ "test.pref": {},
+ },
+ }),
+ ]),
+ withSendEventSpy(),
+ async function testRecordPrefChangeWorks({
+ sendEventSpy,
+ prefExperiments: [experiment],
+ }) {
+ is(
+ experiment.preferences["test.pref"].overridden,
+ false,
+ "Precondition: the pref should not be overridden yet"
+ );
+
+ await PreferenceExperiments.recordPrefChange({
+ experimentSlug: experiment.slug,
+ preferenceName: "test.pref",
+ reason: "test-run",
+ });
+
+ experiment = await PreferenceExperiments.get(experiment.slug);
+ is(
+ experiment.preferences["test.pref"].overridden,
+ true,
+ "The pref should be marked as overridden"
+ );
+ sendEventSpy.assertEvents([
+ [
+ "expPrefChanged",
+ "preference_study",
+ experiment.slug,
+ {
+ preferenceName: "test.pref",
+ reason: "test-run",
+ enrollmentId: experiment.enrollmentId,
+ },
+ ],
+ ]);
+ }
+);
+
+// When a default-branch experiment starts, prefs that already have user values
+// should not be changed.
+decorate_task(
+ withMockExperiments(),
+ withStub(TelemetryEnvironment, "setExperimentActive"),
+ withStub(TelemetryEnvironment, "setExperimentInactive"),
+ withSendEventSpy(),
+ withMockPreferences(),
+ async function testOverriddenAtEnrollNoChange({ mockPreferences }) {
+ // Set up a situation where the user has changed the value of the pref away
+ // from the default. Then run a default experiment that changes the
+ // preference to the same value.
+ mockPreferences.set("test.pref", "old value", "default");
+ mockPreferences.set("test.pref", "new value", "user");
+
+ await PreferenceExperiments.start({
+ slug: "test-experiment",
+ actionName: "someAction",
+ branch: "experimental-branch",
+ preferences: {
+ "test.pref": {
+ preferenceValue: "new value",
+ preferenceType: "string",
+ preferenceBranchType: "default",
+ },
+ },
+ experimentType: "pref-test",
+ });
+
+ is(
+ Services.prefs.getCharPref("test.pref"),
+ "new value",
+ "User value should be preserved"
+ );
+ is(
+ Services.prefs.getDefaultBranch("").getCharPref("test.pref"),
+ "old value",
+ "Default value should not have changed"
+ );
+
+ const experiment = await PreferenceExperiments.get("test-experiment");
+ ok(
+ experiment.preferences["test.pref"].overridden,
+ "Pref should be marked as overridden"
+ );
+ }
+);
+
+// When a default-branch experiment starts, prefs that already exist and that
+// have user values should not be changed.
+// Bug 1735344:
+// eslint-disable-next-line mozilla/reject-addtask-only
+decorate_task(
+ withMockExperiments(),
+ withStub(TelemetryEnvironment, "setExperimentActive"),
+ withStub(TelemetryEnvironment, "setExperimentInactive"),
+ withSendEventSpy(),
+ withMockPreferences(),
+ async function testOverriddenAtEnrollNoChange({ mockPreferences }) {
+ // Set up a situation where the user has changed the value of the pref away
+ // from the default. Then run a default experiment that changes the
+ // preference to the same value.
+
+ // An arbitrary string preference that won't interact with Normandy.
+ let pref = "extensions.recommendations.privacyPolicyUrl";
+ let defaultValue = Services.prefs.getCharPref(pref);
+
+ mockPreferences.set(pref, "user-set-value", "user");
+
+ await PreferenceExperiments.start({
+ slug: "test-experiment",
+ actionName: "someAction",
+ branch: "experimental-branch",
+ preferences: {
+ [pref]: {
+ preferenceValue: "experiment-value",
+ preferenceType: "string",
+ preferenceBranchType: "default",
+ },
+ },
+ experimentType: "pref-test",
+ });
+
+ is(
+ Services.prefs.getCharPref(pref),
+ "user-set-value",
+ "User value should be preserved"
+ );
+ is(
+ Services.prefs.getDefaultBranch("").getCharPref(pref),
+ defaultValue,
+ "Default value should not have changed"
+ );
+
+ const experiment = await PreferenceExperiments.get("test-experiment");
+ ok(
+ experiment.preferences[pref].overridden,
+ "Pref should be marked as overridden"
+ );
+ }
+).only();
diff --git a/toolkit/components/normandy/test/browser/browser_PreferenceRollouts.js b/toolkit/components/normandy/test/browser/browser_PreferenceRollouts.js
new file mode 100644
index 0000000000..43536418ab
--- /dev/null
+++ b/toolkit/components/normandy/test/browser/browser_PreferenceRollouts.js
@@ -0,0 +1,316 @@
+"use strict";
+
+const { IndexedDB } = ChromeUtils.importESModule(
+ "resource://gre/modules/IndexedDB.sys.mjs"
+);
+
+const { PreferenceRollouts } = ChromeUtils.importESModule(
+ "resource://normandy/lib/PreferenceRollouts.sys.mjs"
+);
+const {
+ NormandyTestUtils: {
+ factories: { preferenceRolloutFactory },
+ },
+} = ChromeUtils.importESModule(
+ "resource://testing-common/NormandyTestUtils.sys.mjs"
+);
+
+decorate_task(
+ PreferenceRollouts.withTestMock(),
+ async function testGetMissing() {
+ ok(
+ !(await PreferenceRollouts.get("does-not-exist")),
+ "get should return null when the requested rollout does not exist"
+ );
+ }
+);
+
+decorate_task(
+ PreferenceRollouts.withTestMock(),
+ async function testAddUpdateAndGet() {
+ const rollout = {
+ slug: "test-rollout",
+ state: PreferenceRollouts.STATE_ACTIVE,
+ preferences: [],
+ enrollmentId: "test-enrollment-id",
+ };
+ await PreferenceRollouts.add(rollout);
+ let storedRollout = await PreferenceRollouts.get(rollout.slug);
+ Assert.deepEqual(
+ rollout,
+ storedRollout,
+ "get should retrieve a rollout from storage."
+ );
+
+ rollout.state = PreferenceRollouts.STATE_GRADUATED;
+ await PreferenceRollouts.update(rollout);
+ storedRollout = await PreferenceRollouts.get(rollout.slug);
+ Assert.deepEqual(
+ rollout,
+ storedRollout,
+ "get should retrieve a rollout from storage."
+ );
+ }
+);
+
+decorate_task(
+ PreferenceRollouts.withTestMock(),
+ async function testCantUpdateNonexistent() {
+ const rollout = {
+ slug: "test-rollout",
+ state: PreferenceRollouts.STATE_ACTIVE,
+ preferences: [],
+ };
+ await Assert.rejects(
+ PreferenceRollouts.update(rollout),
+ /doesn't already exist/,
+ "Update should fail if the rollout doesn't exist"
+ );
+ ok(
+ !(await PreferenceRollouts.has("test-rollout")),
+ "rollout should not have been added"
+ );
+ }
+);
+
+decorate_task(PreferenceRollouts.withTestMock(), async function testGetAll() {
+ const rollout1 = {
+ slug: "test-rollout-1",
+ preference: [],
+ enrollmentId: "test-enrollment-id-1",
+ };
+ const rollout2 = {
+ slug: "test-rollout-2",
+ preference: [],
+ enrollmentId: "test-enrollment-id-2",
+ };
+ await PreferenceRollouts.add(rollout1);
+ await PreferenceRollouts.add(rollout2);
+
+ const storedRollouts = await PreferenceRollouts.getAll();
+ Assert.deepEqual(
+ storedRollouts.sort((a, b) => a.id - b.id),
+ [rollout1, rollout2],
+ "getAll should return every stored rollout."
+ );
+});
+
+decorate_task(
+ PreferenceRollouts.withTestMock(),
+ async function testGetAllActive() {
+ const rollout1 = {
+ slug: "test-rollout-1",
+ state: PreferenceRollouts.STATE_ACTIVE,
+ enrollmentId: "test-enrollment-1",
+ };
+ const rollout2 = {
+ slug: "test-rollout-2",
+ state: PreferenceRollouts.STATE_GRADUATED,
+ enrollmentId: "test-enrollment-2",
+ };
+ const rollout3 = {
+ slug: "test-rollout-3",
+ state: PreferenceRollouts.STATE_ROLLED_BACK,
+ enrollmentId: "test-enrollment-3",
+ };
+ await PreferenceRollouts.add(rollout1);
+ await PreferenceRollouts.add(rollout2);
+ await PreferenceRollouts.add(rollout3);
+
+ const activeRollouts = await PreferenceRollouts.getAllActive();
+ Assert.deepEqual(
+ activeRollouts,
+ [rollout1],
+ "getAllActive should return only active rollouts"
+ );
+ }
+);
+
+decorate_task(PreferenceRollouts.withTestMock(), async function testHas() {
+ const rollout = {
+ slug: "test-rollout",
+ preferences: [],
+ enrollmentId: "test-enrollment",
+ };
+ await PreferenceRollouts.add(rollout);
+ ok(
+ await PreferenceRollouts.has(rollout.slug),
+ "has should return true for an existing rollout"
+ );
+ ok(
+ !(await PreferenceRollouts.has("does not exist")),
+ "has should return false for a missing rollout"
+ );
+});
+
+// recordOriginalValue should update storage to note the original values
+decorate_task(
+ PreferenceRollouts.withTestMock(),
+ async function testRecordOriginalValuesUpdatesPreviousValues() {
+ await PreferenceRollouts.add({
+ slug: "test-rollout",
+ state: PreferenceRollouts.STATE_ACTIVE,
+ preferences: [
+ { preferenceName: "test.pref", value: 2, previousValue: null },
+ ],
+ enrollmentId: "test-enrollment",
+ });
+
+ await PreferenceRollouts.recordOriginalValues({ "test.pref": 1 });
+
+ Assert.deepEqual(
+ await PreferenceRollouts.getAll(),
+ [
+ {
+ slug: "test-rollout",
+ state: PreferenceRollouts.STATE_ACTIVE,
+ preferences: [
+ { preferenceName: "test.pref", value: 2, previousValue: 1 },
+ ],
+ enrollmentId: "test-enrollment",
+ },
+ ],
+ "rollout in database should be updated"
+ );
+ }
+);
+
+// recordOriginalValue should graduate a study when all of its preferences are built-in
+decorate_task(
+ withSendEventSpy(),
+ PreferenceRollouts.withTestMock(),
+ async function testRecordOriginalValuesGraduates({ sendEventSpy }) {
+ await PreferenceRollouts.add({
+ slug: "test-rollout",
+ state: PreferenceRollouts.STATE_ACTIVE,
+ preferences: [
+ { preferenceName: "test.pref1", value: 2, previousValue: null },
+ { preferenceName: "test.pref2", value: 2, previousValue: null },
+ ],
+ enrollmentId: "test-enrollment-id",
+ });
+
+ // one pref being the same isn't enough to graduate
+ await PreferenceRollouts.recordOriginalValues({
+ "test.pref1": 1,
+ "test.pref2": 2,
+ });
+ let rollout = await PreferenceRollouts.get("test-rollout");
+ is(
+ rollout.state,
+ PreferenceRollouts.STATE_ACTIVE,
+ "rollouts should remain active when only one pref matches the built-in default"
+ );
+
+ sendEventSpy.assertEvents([]);
+
+ // both prefs is enough
+ await PreferenceRollouts.recordOriginalValues({
+ "test.pref1": 2,
+ "test.pref2": 2,
+ });
+ rollout = await PreferenceRollouts.get("test-rollout");
+ is(
+ rollout.state,
+ PreferenceRollouts.STATE_GRADUATED,
+ "rollouts should graduate when all prefs matches the built-in defaults"
+ );
+
+ sendEventSpy.assertEvents([
+ [
+ "graduate",
+ "preference_rollout",
+ "test-rollout",
+ { enrollmentId: "test-enrollment-id" },
+ ],
+ ]);
+ }
+);
+
+// init should mark active rollouts in telemetry
+decorate_task(
+ withStub(TelemetryEnvironment, "setExperimentActive"),
+ PreferenceRollouts.withTestMock(),
+ async function testInitTelemetry({ setExperimentActiveStub }) {
+ await PreferenceRollouts.add({
+ slug: "test-rollout-active-1",
+ state: PreferenceRollouts.STATE_ACTIVE,
+ enrollmentId: "test-enrollment-1",
+ });
+ await PreferenceRollouts.add({
+ slug: "test-rollout-active-2",
+ state: PreferenceRollouts.STATE_ACTIVE,
+ enrollmentId: "test-enrollment-2",
+ });
+ await PreferenceRollouts.add({
+ slug: "test-rollout-rolled-back",
+ state: PreferenceRollouts.STATE_ROLLED_BACK,
+ enrollmentId: "test-enrollment-3",
+ });
+ await PreferenceRollouts.add({
+ slug: "test-rollout-graduated",
+ state: PreferenceRollouts.STATE_GRADUATED,
+ enrollmentId: "test-enrollment-4",
+ });
+
+ await PreferenceRollouts.init();
+
+ Assert.deepEqual(
+ setExperimentActiveStub.args,
+ [
+ [
+ "test-rollout-active-1",
+ "active",
+ { type: "normandy-prefrollout", enrollmentId: "test-enrollment-1" },
+ ],
+ [
+ "test-rollout-active-2",
+ "active",
+ { type: "normandy-prefrollout", enrollmentId: "test-enrollment-2" },
+ ],
+ ],
+ "init should set activate a telemetry experiment for active preferences"
+ );
+ }
+);
+
+// init should graduate rollouts in the graduation set
+decorate_task(
+ withStub(TelemetryEnvironment, "setExperimentActive"),
+ withSendEventSpy(),
+ PreferenceRollouts.withTestMock({
+ graduationSet: new Set(["test-rollout"]),
+ rollouts: [
+ preferenceRolloutFactory({
+ slug: "test-rollout",
+ state: PreferenceRollouts.STATE_ACTIVE,
+ enrollmentId: "test-enrollment-id",
+ }),
+ ],
+ }),
+ async function testInitGraduationSet({
+ setExperimentActiveStub,
+ sendEventSpy,
+ }) {
+ await PreferenceRollouts.init();
+ const newRollout = await PreferenceRollouts.get("test-rollout");
+ Assert.equal(
+ newRollout.state,
+ PreferenceRollouts.STATE_GRADUATED,
+ "the rollout should be graduated"
+ );
+ Assert.deepEqual(
+ setExperimentActiveStub.args,
+ [],
+ "setExperimentActive should not be called"
+ );
+ sendEventSpy.assertEvents([
+ [
+ "graduate",
+ "preference_rollout",
+ "test-rollout",
+ { enrollmentId: "test-enrollment-id", reason: "in-graduation-set" },
+ ],
+ ]);
+ }
+);
diff --git a/toolkit/components/normandy/test/browser/browser_RecipeRunner.js b/toolkit/components/normandy/test/browser/browser_RecipeRunner.js
new file mode 100644
index 0000000000..d5b37b5c67
--- /dev/null
+++ b/toolkit/components/normandy/test/browser/browser_RecipeRunner.js
@@ -0,0 +1,874 @@
+"use strict";
+
+const { NormandyTestUtils } = ChromeUtils.importESModule(
+ "resource://testing-common/NormandyTestUtils.sys.mjs"
+);
+const { FilterExpressions } = ChromeUtils.importESModule(
+ "resource://gre/modules/components-utils/FilterExpressions.sys.mjs"
+);
+
+const { Normandy } = ChromeUtils.importESModule(
+ "resource://normandy/Normandy.sys.mjs"
+);
+const { BaseAction } = ChromeUtils.importESModule(
+ "resource://normandy/actions/BaseAction.sys.mjs"
+);
+const { RecipeRunner } = ChromeUtils.importESModule(
+ "resource://normandy/lib/RecipeRunner.sys.mjs"
+);
+const { ClientEnvironment } = ChromeUtils.importESModule(
+ "resource://normandy/lib/ClientEnvironment.sys.mjs"
+);
+const { CleanupManager } = ChromeUtils.importESModule(
+ "resource://normandy/lib/CleanupManager.sys.mjs"
+);
+const { ActionsManager } = ChromeUtils.importESModule(
+ "resource://normandy/lib/ActionsManager.sys.mjs"
+);
+const { Uptake } = ChromeUtils.importESModule(
+ "resource://normandy/lib/Uptake.sys.mjs"
+);
+
+const { RemoteSettings } = ChromeUtils.importESModule(
+ "resource://services-settings/remote-settings.sys.mjs"
+);
+
+add_task(async function getFilterContext() {
+ const recipe = { id: 17, arguments: { foo: "bar" }, unrelated: false };
+ const context = RecipeRunner.getFilterContext(recipe);
+
+ // Test for expected properties in the filter expression context.
+ const expectedNormandyKeys = [
+ "channel",
+ "country",
+ "distribution",
+ "doNotTrack",
+ "isDefaultBrowser",
+ "locale",
+ "plugins",
+ "recipe",
+ "request_time",
+ "searchEngine",
+ "syncDesktopDevices",
+ "syncMobileDevices",
+ "syncSetup",
+ "syncTotalDevices",
+ "telemetry",
+ "userId",
+ "version",
+ ];
+ for (const key of expectedNormandyKeys) {
+ ok(key in context.env, `env.${key} is available`);
+ ok(key in context.normandy, `normandy.${key} is available`);
+ }
+ Assert.deepEqual(
+ context.normandy,
+ context.env,
+ "context offers normandy as backwards-compatible alias for context.environment"
+ );
+
+ is(
+ context.env.recipe.id,
+ recipe.id,
+ "environment.recipe is the recipe passed to getFilterContext"
+ );
+ is(
+ ClientEnvironment.recipe,
+ undefined,
+ "ClientEnvironment has not been mutated"
+ );
+ delete recipe.unrelated;
+ Assert.deepEqual(
+ context.env.recipe,
+ recipe,
+ "environment.recipe drops unrecognized attributes from the recipe"
+ );
+
+ // Filter context attributes are cached.
+ await SpecialPowers.pushPrefEnv({
+ set: [["app.normandy.user_id", "some id"]],
+ });
+ is(context.env.userId, "some id", "User id is read from prefs when accessed");
+ await SpecialPowers.pushPrefEnv({
+ set: [["app.normandy.user_id", "real id"]],
+ });
+ is(context.env.userId, "some id", "userId was cached");
+});
+
+add_task(
+ withStub(NormandyApi, "verifyObjectSignature"),
+ async function test_getRecipeSuitability_filterExpressions() {
+ const check = filter =>
+ RecipeRunner.getRecipeSuitability({ filter_expression: filter });
+
+ // Errors must result in a false return value.
+ is(
+ await check("invalid ( + 5yntax"),
+ BaseAction.suitability.FILTER_ERROR,
+ "Invalid filter expressions return false"
+ );
+
+ // Non-boolean filter results result in a true return value.
+ is(
+ await check("[1, 2, 3]"),
+ BaseAction.suitability.FILTER_MATCH,
+ "Non-boolean filter expressions return true"
+ );
+
+ // The given recipe must be available to the filter context.
+ const recipe = { filter_expression: "normandy.recipe.id == 7", id: 7 };
+ is(
+ await RecipeRunner.getRecipeSuitability(recipe),
+ BaseAction.suitability.FILTER_MATCH,
+ "The recipe is available in the filter context"
+ );
+ recipe.id = 4;
+ is(
+ await RecipeRunner.getRecipeSuitability(recipe),
+ BaseAction.suitability.FILTER_MISMATCH,
+ "The recipe is available in the filter context"
+ );
+ }
+);
+
+decorate_task(
+ withStub(FilterExpressions, "eval"),
+ withStub(Uptake, "reportRecipe"),
+ withStub(NormandyApi, "verifyObjectSignature"),
+ async function test_getRecipeSuitability_canHandleExceptions({
+ evalStub,
+ reportRecipeStub,
+ }) {
+ evalStub.throws("this filter was broken somehow");
+ const someRecipe = {
+ id: "1",
+ action: "action",
+ filter_expression: "broken",
+ };
+ const result = await RecipeRunner.getRecipeSuitability(someRecipe);
+
+ is(
+ result,
+ BaseAction.suitability.FILTER_ERROR,
+ "broken filters are reported"
+ );
+ Assert.deepEqual(reportRecipeStub.args, [
+ [someRecipe, Uptake.RECIPE_FILTER_BROKEN],
+ ]);
+ }
+);
+
+decorate_task(
+ withSpy(FilterExpressions, "eval"),
+ withStub(RecipeRunner, "getCapabilities"),
+ withStub(NormandyApi, "verifyObjectSignature"),
+ async function test_getRecipeSuitability_checksCapabilities({
+ evalSpy,
+ getCapabilitiesStub,
+ }) {
+ getCapabilitiesStub.returns(new Set(["test-capability"]));
+
+ is(
+ await RecipeRunner.getRecipeSuitability({
+ filter_expression: "true",
+ }),
+ BaseAction.suitability.FILTER_MATCH,
+ "Recipes with no capabilities should pass"
+ );
+ ok(evalSpy.called, "Filter should be evaluated");
+
+ evalSpy.resetHistory();
+ is(
+ await RecipeRunner.getRecipeSuitability({
+ capabilities: [],
+ filter_expression: "true",
+ }),
+ BaseAction.suitability.FILTER_MATCH,
+ "Recipes with empty capabilities should pass"
+ );
+ ok(evalSpy.called, "Filter should be evaluated");
+
+ evalSpy.resetHistory();
+ is(
+ await RecipeRunner.getRecipeSuitability({
+ capabilities: ["test-capability"],
+ filter_expression: "true",
+ }),
+ BaseAction.suitability.FILTER_MATCH,
+ "Recipes with a matching capability should pass"
+ );
+ ok(evalSpy.called, "Filter should be evaluated");
+
+ evalSpy.resetHistory();
+ is(
+ await RecipeRunner.getRecipeSuitability({
+ capabilities: ["impossible-capability"],
+ filter_expression: "true",
+ }),
+ BaseAction.suitability.CAPABILITIES_MISMATCH,
+ "Recipes with non-matching capabilities should not pass"
+ );
+ ok(!evalSpy.called, "Filter should not be evaluated");
+ }
+);
+
+decorate_task(
+ withMockNormandyApi(),
+ withStub(ClientEnvironment, "getClientClassification"),
+ async function testClientClassificationCache({
+ mockNormandyApi,
+ getClientClassificationStub,
+ }) {
+ getClientClassificationStub.returns(Promise.resolve(false));
+
+ await SpecialPowers.pushPrefEnv({
+ set: [["app.normandy.api_url", "https://example.com/selfsupport-dummy"]],
+ });
+
+ // When the experiment pref is false, eagerly call getClientClassification.
+ await SpecialPowers.pushPrefEnv({
+ set: [["app.normandy.experiments.lazy_classify", false]],
+ });
+ ok(
+ !getClientClassificationStub.called,
+ "getClientClassification hasn't been called"
+ );
+ await RecipeRunner.run();
+ ok(
+ getClientClassificationStub.called,
+ "getClientClassification was called eagerly"
+ );
+
+ // When the experiment pref is true, do not eagerly call getClientClassification.
+ await SpecialPowers.pushPrefEnv({
+ set: [["app.normandy.experiments.lazy_classify", true]],
+ });
+ getClientClassificationStub.reset();
+ ok(
+ !getClientClassificationStub.called,
+
+ "getClientClassification hasn't been called"
+ );
+ await RecipeRunner.run();
+ ok(
+ !getClientClassificationStub.called,
+
+ "getClientClassification was not called eagerly"
+ );
+ }
+);
+
+decorate_task(
+ withStub(Uptake, "reportRunner"),
+ withStub(ActionsManager.prototype, "finalize"),
+ NormandyTestUtils.withMockRecipeCollection([]),
+ async function testRunEvents() {
+ const startPromise = TestUtils.topicObserved("recipe-runner:start");
+ const endPromise = TestUtils.topicObserved("recipe-runner:end");
+
+ await RecipeRunner.run();
+
+ // Will timeout if notifications were not received.
+ await startPromise;
+ await endPromise;
+ ok(true, "The test should pass without timing out");
+ }
+);
+
+decorate_task(
+ withStub(RecipeRunner, "getCapabilities"),
+ withStub(NormandyApi, "verifyObjectSignature"),
+ NormandyTestUtils.withMockRecipeCollection([{ id: 1 }]),
+ async function test_run_includesCapabilities({ getCapabilitiesStub }) {
+ getCapabilitiesStub.returns(new Set(["test-capability"]));
+ await RecipeRunner.run();
+ ok(getCapabilitiesStub.called, "getCapabilities should be called");
+ }
+);
+
+decorate_task(
+ withStub(NormandyApi, "verifyObjectSignature"),
+ withStub(ActionsManager.prototype, "processRecipe"),
+ withStub(ActionsManager.prototype, "finalize"),
+ withStub(Uptake, "reportRecipe"),
+ async function testReadFromRemoteSettings({
+ verifyObjectSignatureStub,
+ processRecipeStub,
+ finalizeStub,
+ reportRecipeStub,
+ }) {
+ const matchRecipe = {
+ id: 1,
+ name: "match",
+ action: "matchAction",
+ filter_expression: "true",
+ };
+ const noMatchRecipe = {
+ id: 2,
+ name: "noMatch",
+ action: "noMatchAction",
+ filter_expression: "false",
+ };
+ const missingRecipe = {
+ id: 3,
+ name: "missing",
+ action: "missingAction",
+ filter_expression: "true",
+ };
+
+ const db = await RecipeRunner._remoteSettingsClientForTesting.db;
+ const fakeSig = { signature: "abc" };
+ await db.importChanges({}, Date.now(), [
+ { id: "match", recipe: matchRecipe, signature: fakeSig },
+ {
+ id: "noMatch",
+ recipe: noMatchRecipe,
+ signature: fakeSig,
+ },
+ {
+ id: "missing",
+ recipe: missingRecipe,
+ signature: fakeSig,
+ },
+ ]);
+
+ let recipesFromRS = (
+ await RecipeRunner._remoteSettingsClientForTesting.get()
+ ).map(({ recipe, signature }) => recipe);
+ // Sort the records by id so that they match the order in the assertion
+ recipesFromRS.sort((a, b) => a.id - b.id);
+ Assert.deepEqual(
+ recipesFromRS,
+ [matchRecipe, noMatchRecipe, missingRecipe],
+ "The recipes should be accesible from Remote Settings"
+ );
+
+ await RecipeRunner.run();
+
+ Assert.deepEqual(
+ verifyObjectSignatureStub.args,
+ [
+ [matchRecipe, fakeSig, "recipe"],
+ [missingRecipe, fakeSig, "recipe"],
+ [noMatchRecipe, fakeSig, "recipe"],
+ ],
+ "all recipes should have their signature verified"
+ );
+ Assert.deepEqual(
+ processRecipeStub.args,
+ [
+ [matchRecipe, BaseAction.suitability.FILTER_MATCH],
+ [missingRecipe, BaseAction.suitability.FILTER_MATCH],
+ [noMatchRecipe, BaseAction.suitability.FILTER_MISMATCH],
+ ],
+ "Recipes should be reported with the correct suitabilities"
+ );
+ Assert.deepEqual(
+ reportRecipeStub.args,
+ [[noMatchRecipe, Uptake.RECIPE_DIDNT_MATCH_FILTER]],
+ "Filtered-out recipes should be reported"
+ );
+ }
+);
+
+decorate_task(
+ withStub(NormandyApi, "verifyObjectSignature"),
+ withStub(ActionsManager.prototype, "processRecipe"),
+ withStub(RecipeRunner, "getCapabilities"),
+ async function testReadFromRemoteSettings({
+ processRecipeStub,
+ getCapabilitiesStub,
+ }) {
+ getCapabilitiesStub.returns(new Set(["compatible"]));
+ const compatibleRecipe = {
+ name: "match",
+ filter_expression: "true",
+ capabilities: ["compatible"],
+ };
+ const incompatibleRecipe = {
+ name: "noMatch",
+ filter_expression: "true",
+ capabilities: ["incompatible"],
+ };
+
+ const db = await RecipeRunner._remoteSettingsClientForTesting.db;
+ const fakeSig = { signature: "abc" };
+ await db.importChanges(
+ {},
+ Date.now(),
+ [
+ {
+ id: "match",
+ recipe: compatibleRecipe,
+ signature: fakeSig,
+ },
+ {
+ id: "noMatch",
+ recipe: incompatibleRecipe,
+ signature: fakeSig,
+ },
+ ],
+ {
+ clear: true,
+ }
+ );
+
+ await RecipeRunner.run();
+
+ Assert.deepEqual(
+ processRecipeStub.args,
+ [
+ [compatibleRecipe, BaseAction.suitability.FILTER_MATCH],
+ [incompatibleRecipe, BaseAction.suitability.CAPABILITIES_MISMATCH],
+ ],
+ "recipes should be marked if their capabilities aren't compatible"
+ );
+ }
+);
+
+decorate_task(
+ withStub(ActionsManager.prototype, "processRecipe"),
+ withStub(NormandyApi, "verifyObjectSignature"),
+ withStub(Uptake, "reportRecipe"),
+ NormandyTestUtils.withMockRecipeCollection(),
+ async function testBadSignatureFromRemoteSettings({
+ processRecipeStub,
+ verifyObjectSignatureStub,
+ reportRecipeStub,
+ mockRecipeCollection,
+ }) {
+ verifyObjectSignatureStub.throws(new Error("fake signature error"));
+ const badSigRecipe = {
+ id: 1,
+ name: "badSig",
+ action: "matchAction",
+ filter_expression: "true",
+ };
+ await mockRecipeCollection.addRecipes([badSigRecipe]);
+
+ await RecipeRunner.run();
+
+ Assert.deepEqual(processRecipeStub.args, [
+ [badSigRecipe, BaseAction.suitability.SIGNATURE_ERROR],
+ ]);
+ Assert.deepEqual(
+ reportRecipeStub.args,
+ [[badSigRecipe, Uptake.RECIPE_INVALID_SIGNATURE]],
+ "The recipe should have its uptake status recorded"
+ );
+ }
+);
+
+// Test init() during normal operation
+decorate_task(
+ withPrefEnv({
+ set: [
+ ["datareporting.healthreport.uploadEnabled", true], // telemetry enabled
+ ["app.normandy.dev_mode", false],
+ ["app.normandy.first_run", false],
+ ],
+ }),
+ withStub(RecipeRunner, "run"),
+ withStub(RecipeRunner, "registerTimer"),
+ async function testInit({ runStub, registerTimerStub }) {
+ await RecipeRunner.init();
+ ok(
+ !runStub.called,
+ "RecipeRunner.run should not be called immediately when not in dev mode or first run"
+ );
+ ok(registerTimerStub.called, "RecipeRunner.init registers a timer");
+ }
+);
+
+// test init() in dev mode
+decorate_task(
+ withPrefEnv({
+ set: [
+ ["datareporting.healthreport.uploadEnabled", true], // telemetry enabled
+ ["app.normandy.dev_mode", true],
+ ],
+ }),
+ withStub(RecipeRunner, "run"),
+ withStub(RecipeRunner, "registerTimer"),
+ withStub(RecipeRunner._remoteSettingsClientForTesting, "sync"),
+ async function testInitDevMode({ runStub, registerTimerStub, syncStub }) {
+ await RecipeRunner.init();
+ Assert.deepEqual(
+ runStub.args,
+ [[{ trigger: "devMode" }]],
+ "RecipeRunner.run should be called immediately when in dev mode"
+ );
+ ok(registerTimerStub.called, "RecipeRunner.init should register a timer");
+ ok(
+ syncStub.called,
+ "RecipeRunner.init should sync remote settings in dev mode"
+ );
+ }
+);
+
+// Test init() first run
+decorate_task(
+ withPrefEnv({
+ set: [
+ ["datareporting.healthreport.uploadEnabled", true], // telemetry enabled
+ ["app.normandy.dev_mode", false],
+ ["app.normandy.first_run", true],
+ ],
+ }),
+ withStub(RecipeRunner, "run"),
+ withStub(RecipeRunner, "registerTimer"),
+ withStub(RecipeRunner, "watchPrefs"),
+ async function testInitFirstRun({
+ runStub,
+ registerTimerStub,
+ watchPrefsStub,
+ }) {
+ await RecipeRunner.init();
+ Assert.deepEqual(
+ runStub.args,
+ [[{ trigger: "firstRun" }]],
+ "RecipeRunner.run is called immediately on first run"
+ );
+ ok(
+ !Services.prefs.getBoolPref("app.normandy.first_run"),
+ "On first run, the first run pref is set to false"
+ );
+ ok(
+ registerTimerStub.called,
+ "RecipeRunner.registerTimer registers a timer"
+ );
+
+ // RecipeRunner.init() sets this pref to false, but SpecialPowers
+ // relies on the preferences it manages to actually change when it
+ // tries to change them. Settings this back to true here allows
+ // that to happen. Not doing this causes popPrefEnv to hang forever.
+ Services.prefs.setBoolPref("app.normandy.first_run", true);
+ }
+);
+
+// Test that prefs are watched correctly
+decorate_task(
+ withPrefEnv({
+ set: [
+ ["app.normandy.dev_mode", false],
+ ["app.normandy.first_run", false],
+ ["app.normandy.enabled", true],
+ ],
+ }),
+ withStub(RecipeRunner, "run"),
+ withStub(RecipeRunner, "enable"),
+ withStub(RecipeRunner, "disable"),
+ withStub(CleanupManager, "addCleanupHandler"),
+
+ async function testPrefWatching({ runStub, enableStub, disableStub }) {
+ await RecipeRunner.init();
+ is(enableStub.callCount, 1, "Enable should be called initially");
+ is(disableStub.callCount, 0, "Disable should not be called initially");
+
+ await SpecialPowers.pushPrefEnv({ set: [["app.normandy.enabled", false]] });
+ is(enableStub.callCount, 1, "Enable should not be called again");
+ is(
+ disableStub.callCount,
+ 1,
+ "RecipeRunner should disable when Shield is disabled"
+ );
+
+ await SpecialPowers.pushPrefEnv({ set: [["app.normandy.enabled", true]] });
+ is(
+ enableStub.callCount,
+ 2,
+ "RecipeRunner should re-enable when Shield is enabled"
+ );
+ is(disableStub.callCount, 1, "Disable should not be called again");
+
+ await SpecialPowers.pushPrefEnv({
+ set: [["app.normandy.api_url", "http://example.com"]],
+ }); // does not start with https://
+ is(enableStub.callCount, 2, "Enable should not be called again");
+ is(
+ disableStub.callCount,
+ 2,
+ "RecipeRunner should disable when an invalid api url is given"
+ );
+
+ await SpecialPowers.pushPrefEnv({
+ set: [["app.normandy.api_url", "https://example.com"]],
+ }); // ends with https://
+ is(
+ enableStub.callCount,
+ 3,
+ "RecipeRunner should re-enable when a valid api url is given"
+ );
+ is(disableStub.callCount, 2, "Disable should not be called again");
+
+ is(
+ runStub.callCount,
+ 0,
+ "RecipeRunner.run should not be called during this test"
+ );
+ }
+);
+
+// Test that enable and disable are idempotent
+decorate_task(
+ withStub(RecipeRunner, "registerTimer"),
+ withStub(RecipeRunner, "unregisterTimer"),
+ async function testPrefWatching({ registerTimerStub }) {
+ const originalEnabled = RecipeRunner.enabled;
+
+ try {
+ RecipeRunner.enabled = false;
+ RecipeRunner.enable();
+ RecipeRunner.enable();
+ RecipeRunner.enable();
+ is(registerTimerStub.callCount, 1, "Enable should be idempotent");
+
+ RecipeRunner.enabled = true;
+ RecipeRunner.disable();
+ RecipeRunner.disable();
+ RecipeRunner.disable();
+ is(registerTimerStub.callCount, 1, "Disable should be idempotent");
+ } finally {
+ RecipeRunner.enabled = originalEnabled;
+ }
+ }
+);
+
+decorate_task(
+ withPrefEnv({
+ set: [["app.normandy.onsync_skew_sec", 0]],
+ }),
+ withStub(RecipeRunner, "run"),
+ async function testRunOnSyncRemoteSettings({ runStub }) {
+ const rsClient = RecipeRunner._remoteSettingsClientForTesting;
+ await RecipeRunner.init();
+ ok(
+ RecipeRunner._alreadySetUpRemoteSettings,
+ "remote settings should be set up in the runner"
+ );
+
+ // Runner disabled
+ RecipeRunner.disable();
+ await rsClient.emit("sync", {});
+ ok(!runStub.called, "run() should not be called if disabled");
+ runStub.reset();
+
+ // Runner enabled
+ RecipeRunner.enable();
+ await rsClient.emit("sync", {});
+ ok(runStub.called, "run() should be called if enabled");
+ runStub.reset();
+
+ // Runner disabled
+ RecipeRunner.disable();
+ await rsClient.emit("sync", {});
+ ok(!runStub.called, "run() should not be called if disabled");
+ runStub.reset();
+
+ // Runner re-enabled
+ RecipeRunner.enable();
+ await rsClient.emit("sync", {});
+ ok(runStub.called, "run() should be called if runner is re-enabled");
+ }
+);
+
+decorate_task(
+ withPrefEnv({
+ set: [
+ ["app.normandy.onsync_skew_sec", 600], // 10 minutes, much longer than the test will take to run
+ ],
+ }),
+ withStub(RecipeRunner, "run"),
+ async function testOnSyncRunDelayed({ runStub }) {
+ ok(
+ !RecipeRunner._syncSkewTimeout,
+ "precondition: No timer should be active"
+ );
+ const rsClient = RecipeRunner._remoteSettingsClientForTesting;
+ await rsClient.emit("sync", {});
+ ok(runStub.notCalled, "run() should be not called yet");
+ ok(RecipeRunner._syncSkewTimeout, "A timer should be set");
+ clearInterval(RecipeRunner._syncSkewTimeout); // cleanup
+ }
+);
+
+decorate_task(
+ withStub(RecipeRunner._remoteSettingsClientForTesting, "get"),
+ async function testRunCanRunOnlyOnce({ getStub }) {
+ getStub.returns(
+ // eslint-disable-next-line mozilla/no-arbitrary-setTimeout
+ new Promise(resolve => setTimeout(() => resolve([]), 10))
+ );
+
+ // Run 2 in parallel.
+ await Promise.all([RecipeRunner.run(), RecipeRunner.run()]);
+
+ is(getStub.callCount, 1, "run() is no-op if already running");
+ }
+);
+
+decorate_task(
+ withPrefEnv({
+ set: [
+ // Enable update timer logs.
+ ["app.update.log", true],
+ ["app.normandy.api_url", "https://example.com"],
+ ["app.normandy.first_run", false],
+ ["app.normandy.onsync_skew_sec", 0],
+ ],
+ }),
+ withSpy(RecipeRunner, "run"),
+ withStub(ActionsManager.prototype, "finalize"),
+ withStub(Uptake, "reportRunner"),
+ async function testSyncDelaysTimer({ runSpy }) {
+ // Mark any existing timer as having run just now.
+ for (const { value } of Services.catMan.enumerateCategory("update-timer")) {
+ const timerID = value.split(",")[2];
+ console.log(`Mark timer ${timerID} as ran recently`);
+ // See https://searchfox.org/mozilla-central/rev/11cfa0462/toolkit/components/timermanager/UpdateTimerManager.jsm#8
+ const timerLastUpdatePref = `app.update.lastUpdateTime.${timerID}`;
+ const lastUpdateTime = Math.round(Date.now() / 1000);
+ Services.prefs.setIntPref(timerLastUpdatePref, lastUpdateTime);
+ }
+
+ // Give our timer a short duration so that it executes quickly.
+ // This needs to be more than 1 second as we will call UpdateTimerManager's
+ // notify method twice in a row and verify that our timer is only called
+ // once, but because the timestamps are rounded to seconds, just a few
+ // additional ms could result in a higher value that would cause the timer
+ // to be called again almost immediately if our timer duration was only 1s.
+ const kTimerDuration = 2;
+ Services.prefs.setIntPref(
+ "app.normandy.run_interval_seconds",
+ kTimerDuration
+ );
+ // This will refresh the timer interval.
+ RecipeRunner.unregisterTimer();
+ // Ensure our timer is ready to run now.
+ Services.prefs.setIntPref(
+ "app.update.lastUpdateTime.recipe-client-addon-run",
+ Math.round(Date.now() / 1000) - kTimerDuration
+ );
+ RecipeRunner.registerTimer();
+
+ is(runSpy.callCount, 0, "run() shouldn't have run yet");
+
+ // Simulate timer notification.
+ runSpy.resetHistory();
+ const service = Cc["@mozilla.org/updates/timer-manager;1"].getService(
+ Ci.nsITimerCallback
+ );
+ const newTimer = () => {
+ const t = Cc["@mozilla.org/timer;1"].createInstance(Ci.nsITimer);
+ t.initWithCallback(() => {}, 10, Ci.nsITimer.TYPE_ONE_SHOT);
+ return t;
+ };
+
+ // Run timer once, to make sure this test works as expected.
+ const startTime = Date.now();
+ const endPromise = TestUtils.topicObserved("recipe-runner:end");
+ service.notify(newTimer());
+ await endPromise; // will timeout if run() not called.
+ const timerLatency = Math.max(Date.now() - startTime, 1);
+ is(runSpy.callCount, 1, "run() should be called from timer");
+
+ // Run once from sync event.
+ runSpy.resetHistory();
+ const rsClient = RecipeRunner._remoteSettingsClientForTesting;
+ await rsClient.emit("sync", {}); // waits for listeners to run.
+ is(runSpy.callCount, 1, "run() should be called from sync");
+
+ // Trigger timer again. This should not run recipes again, since a sync just happened
+ runSpy.resetHistory();
+ is(runSpy.callCount, 0, "run() does not run again from timer");
+ service.notify(newTimer());
+ // Wait at least as long as the latency we had above. Ten times as a margin.
+ is(runSpy.callCount, 0, "run() does not run again from timer");
+ // eslint-disable-next-line mozilla/no-arbitrary-setTimeout
+ await new Promise(resolve => setTimeout(resolve, timerLatency * 10));
+ is(runSpy.callCount, 0, "run() does not run again from timer");
+ RecipeRunner.disable();
+ }
+);
+
+// Test that the capabilities for context variables are generated correctly.
+decorate_task(async function testAutomaticCapabilities() {
+ const capabilities = await RecipeRunner.getCapabilities();
+
+ ok(
+ capabilities.has("jexl.context.env.country"),
+ "context variables from Normandy's client context should be included"
+ );
+ ok(
+ capabilities.has("jexl.context.env.version"),
+ "context variables from the superclass context should be included"
+ );
+ ok(
+ !capabilities.has("jexl.context.env.getClientClassification"),
+ "non-getter functions should not be included"
+ );
+ ok(
+ !capabilities.has("jexl.context.env.prototype"),
+ "built-in, non-enumerable properties should not be included"
+ );
+});
+
+// Test that recipe runner won't run if Normandy hasn't been initialized.
+decorate_task(
+ withStub(Uptake, "reportRunner"),
+ withStub(ActionsManager.prototype, "finalize"),
+ NormandyTestUtils.withMockRecipeCollection([]),
+ async function testRunEvents({ reportRunnerStub, finalizeStub }) {
+ const observer = sinon.spy();
+ Services.obs.addObserver(observer, "recipe-runner:start");
+
+ const originalPrefsApplied = Normandy.defaultPrefsHaveBeenApplied;
+ Normandy.defaultPrefsHaveBeenApplied = PromiseUtils.defer();
+
+ const recipeRunnerPromise = RecipeRunner.run();
+ await Promise.resolve();
+ ok(
+ !observer.called,
+ "RecipeRunner.run shouldn't run if Normandy isn't initialized"
+ );
+
+ Normandy.defaultPrefsHaveBeenApplied.resolve();
+ await recipeRunnerPromise;
+ ok(
+ observer.called,
+ "RecipeRunner.run should run after Normandy has initialized"
+ );
+
+ // cleanup
+ Services.obs.removeObserver(observer, "recipe-runner:start");
+ Normandy.defaultPrefsHaveBeenApplied = originalPrefsApplied;
+ }
+);
+
+// If no recipes are found on the server, the action manager should be informed of that
+decorate_task(
+ withSpy(ActionsManager.prototype, "finalize"),
+ NormandyTestUtils.withMockRecipeCollection([]),
+ async function testNoRecipes({ finalizeSpy }) {
+ await RecipeRunner.run();
+ Assert.deepEqual(
+ finalizeSpy.args,
+ [[{ noRecipes: true }]],
+ "Action manager should know there were no recipes received"
+ );
+ }
+);
+
+// If some recipes are found on the server, the action manager should be informed of that
+decorate_task(
+ withSpy(ActionsManager.prototype, "finalize"),
+ NormandyTestUtils.withMockRecipeCollection([{ id: 1 }]),
+ async function testSomeRecipes({ finalizeSpy }) {
+ await RecipeRunner.run();
+ Assert.deepEqual(
+ finalizeSpy.args,
+ [[{ noRecipes: false }]],
+ "Action manager should know there were recipes received"
+ );
+ }
+);
diff --git a/toolkit/components/normandy/test/browser/browser_ShieldPreferences.js b/toolkit/components/normandy/test/browser/browser_ShieldPreferences.js
new file mode 100644
index 0000000000..e455a5f25b
--- /dev/null
+++ b/toolkit/components/normandy/test/browser/browser_ShieldPreferences.js
@@ -0,0 +1,91 @@
+"use strict";
+
+const { PreferenceExperiments } = ChromeUtils.importESModule(
+ "resource://normandy/lib/PreferenceExperiments.sys.mjs"
+);
+const { ShieldPreferences } = ChromeUtils.importESModule(
+ "resource://normandy/lib/ShieldPreferences.sys.mjs"
+);
+
+const OPT_OUT_STUDIES_ENABLED_PREF = "app.shield.optoutstudies.enabled";
+
+const { NormandyTestUtils } = ChromeUtils.importESModule(
+ "resource://testing-common/NormandyTestUtils.sys.mjs"
+);
+const { addonStudyFactory, preferenceStudyFactory } =
+ NormandyTestUtils.factories;
+
+ShieldPreferences.init();
+
+decorate_task(
+ withMockPreferences(),
+ AddonStudies.withStudies([
+ addonStudyFactory({ active: true }),
+ addonStudyFactory({ active: true }),
+ ]),
+ async function testDisableStudiesWhenOptOutDisabled({
+ mockPreferences,
+ addonStudies: [study1, study2],
+ }) {
+ mockPreferences.set(OPT_OUT_STUDIES_ENABLED_PREF, true);
+ const observers = [
+ studyEndObserved(study1.recipeId),
+ studyEndObserved(study2.recipeId),
+ ];
+ Services.prefs.setBoolPref(OPT_OUT_STUDIES_ENABLED_PREF, false);
+ await Promise.all(observers);
+
+ const newStudy1 = await AddonStudies.get(study1.recipeId);
+ const newStudy2 = await AddonStudies.get(study2.recipeId);
+ ok(
+ !newStudy1.active && !newStudy2.active,
+ "Setting the opt-out pref to false stops all active opt-out studies."
+ );
+ }
+);
+
+decorate_task(
+ withMockPreferences(),
+ PreferenceExperiments.withMockExperiments([
+ preferenceStudyFactory({ active: true }),
+ preferenceStudyFactory({ active: true }),
+ ]),
+ withStub(PreferenceExperiments, "stop"),
+ async function testDisableExperimentsWhenOptOutDisabled({
+ mockPreferences,
+ prefExperiments: [study1, study2],
+ stopStub,
+ }) {
+ mockPreferences.set(OPT_OUT_STUDIES_ENABLED_PREF, true);
+ let stopArgs = [];
+ let stoppedBoth = new Promise(resolve => {
+ let calls = 0;
+ stopStub.callsFake(function () {
+ stopArgs.push(Array.from(arguments));
+ calls++;
+ if (calls == 2) {
+ resolve();
+ }
+ });
+ });
+ Services.prefs.setBoolPref(OPT_OUT_STUDIES_ENABLED_PREF, false);
+ await stoppedBoth;
+
+ Assert.deepEqual(stopArgs, [
+ [
+ study1.slug,
+ {
+ reason: "general-opt-out",
+ caller: "observePrefChange::general-opt-out",
+ },
+ ],
+ [
+ study2.slug,
+ {
+ reason: "general-opt-out",
+ caller: "observePrefChange::general-opt-out",
+ },
+ ],
+ ]);
+ }
+);
diff --git a/toolkit/components/normandy/test/browser/browser_Storage.js b/toolkit/components/normandy/test/browser/browser_Storage.js
new file mode 100644
index 0000000000..74272c52d9
--- /dev/null
+++ b/toolkit/components/normandy/test/browser/browser_Storage.js
@@ -0,0 +1,43 @@
+"use strict";
+
+add_task(async function () {
+ const store1 = new Storage("prefix1");
+ const store2 = new Storage("prefix2");
+
+ // Make sure values return null before being set
+ Assert.equal(await store1.getItem("key"), null);
+ Assert.equal(await store2.getItem("key"), null);
+
+ // Set values to check
+ await store1.setItem("key", "value1");
+ await store2.setItem("key", "value2");
+
+ // Check that they are available
+ Assert.equal(await store1.getItem("key"), "value1");
+ Assert.equal(await store2.getItem("key"), "value2");
+
+ // Remove them, and check they are gone
+ await store1.removeItem("key");
+ await store2.removeItem("key");
+ Assert.equal(await store1.getItem("key"), null);
+ Assert.equal(await store2.getItem("key"), null);
+
+ // Check that numbers are stored as numbers (not strings)
+ await store1.setItem("number", 42);
+ Assert.equal(await store1.getItem("number"), 42);
+
+ // Check complex types work
+ const complex = { a: 1, b: [2, 3], c: { d: 4 } };
+ await store1.setItem("complex", complex);
+ Assert.deepEqual(await store1.getItem("complex"), complex);
+
+ // Check that clearing the storage removes data from multiple
+ // prefixes.
+ await store1.setItem("removeTest", 1);
+ await store2.setItem("removeTest", 2);
+ Assert.equal(await store1.getItem("removeTest"), 1);
+ Assert.equal(await store2.getItem("removeTest"), 2);
+ await Storage.clearAllStorage();
+ Assert.equal(await store1.getItem("removeTest"), null);
+ Assert.equal(await store2.getItem("removeTest"), null);
+});
diff --git a/toolkit/components/normandy/test/browser/browser_Uptake.js b/toolkit/components/normandy/test/browser/browser_Uptake.js
new file mode 100644
index 0000000000..1fa3db3da1
--- /dev/null
+++ b/toolkit/components/normandy/test/browser/browser_Uptake.js
@@ -0,0 +1,15 @@
+"use strict";
+
+const { Uptake } = ChromeUtils.importESModule(
+ "resource://normandy/lib/Uptake.sys.mjs"
+);
+
+const Telemetry = Services.telemetry;
+
+add_task(async function reportRecipeSubmitsFreshness() {
+ Telemetry.clearScalars();
+ const recipe = { id: 17, revision_id: "12" };
+ await Uptake.reportRecipe(recipe, Uptake.RECIPE_SUCCESS);
+ const scalars = Telemetry.getSnapshotForKeyedScalars("main", true);
+ Assert.deepEqual(scalars.parent["normandy.recipe_freshness"], { 17: 12 });
+});
diff --git a/toolkit/components/normandy/test/browser/browser_about_preferences.js b/toolkit/components/normandy/test/browser/browser_about_preferences.js
new file mode 100644
index 0000000000..7b0c706d13
--- /dev/null
+++ b/toolkit/components/normandy/test/browser/browser_about_preferences.js
@@ -0,0 +1,106 @@
+"use strict";
+
+const OPT_OUT_PREF = "app.shield.optoutstudies.enabled";
+
+function withPrivacyPrefs() {
+ return function (testFunc) {
+ return async args =>
+ BrowserTestUtils.withNewTab("about:preferences#privacy", async browser =>
+ testFunc({ ...args, browser })
+ );
+ };
+}
+
+decorate_task(
+ withPrefEnv({
+ set: [[OPT_OUT_PREF, true]],
+ }),
+ withPrivacyPrefs(),
+ async function testCheckedOnLoad({ browser }) {
+ const checkbox = browser.contentDocument.getElementById(
+ "optOutStudiesEnabled"
+ );
+ ok(
+ checkbox.checked,
+ "Opt-out checkbox is checked on load when the pref is true"
+ );
+ }
+);
+
+decorate_task(
+ withPrefEnv({
+ set: [[OPT_OUT_PREF, false]],
+ }),
+ withPrivacyPrefs(),
+ async function testUncheckedOnLoad({ browser }) {
+ const checkbox = browser.contentDocument.getElementById(
+ "optOutStudiesEnabled"
+ );
+ ok(
+ !checkbox.checked,
+ "Opt-out checkbox is unchecked on load when the pref is false"
+ );
+ }
+);
+
+decorate_task(
+ withPrefEnv({
+ set: [[OPT_OUT_PREF, true]],
+ }),
+ withPrivacyPrefs(),
+ async function testCheckboxes({ browser }) {
+ const optOutCheckbox = browser.contentDocument.getElementById(
+ "optOutStudiesEnabled"
+ );
+
+ optOutCheckbox.click();
+ ok(
+ !Services.prefs.getBoolPref(OPT_OUT_PREF),
+ "Unchecking the opt-out checkbox sets the pref to false."
+ );
+ optOutCheckbox.click();
+ ok(
+ Services.prefs.getBoolPref(OPT_OUT_PREF),
+ "Checking the opt-out checkbox sets the pref to true."
+ );
+ }
+);
+
+decorate_task(
+ withPrefEnv({
+ set: [[OPT_OUT_PREF, true]],
+ }),
+ withPrivacyPrefs(),
+ async function testPrefWatchers({ browser }) {
+ const optOutCheckbox = browser.contentDocument.getElementById(
+ "optOutStudiesEnabled"
+ );
+
+ Services.prefs.setBoolPref(OPT_OUT_PREF, false);
+ ok(
+ !optOutCheckbox.checked,
+ "Disabling the opt-out pref unchecks the opt-out checkbox."
+ );
+ Services.prefs.setBoolPref(OPT_OUT_PREF, true);
+ ok(
+ optOutCheckbox.checked,
+ "Enabling the opt-out pref checks the opt-out checkbox."
+ );
+ }
+);
+
+decorate_task(
+ withPrivacyPrefs(),
+ async function testViewStudiesLink({ browser }) {
+ browser.contentDocument.getElementById("viewShieldStudies").click();
+ await BrowserTestUtils.waitForLocationChange(gBrowser);
+
+ is(
+ gBrowser.currentURI.spec,
+ "about:studies",
+ "Clicking the view studies link opens about:studies in a new tab."
+ );
+
+ gBrowser.removeCurrentTab();
+ }
+);
diff --git a/toolkit/components/normandy/test/browser/browser_about_studies.js b/toolkit/components/normandy/test/browser/browser_about_studies.js
new file mode 100644
index 0000000000..745e961b9a
--- /dev/null
+++ b/toolkit/components/normandy/test/browser/browser_about_studies.js
@@ -0,0 +1,825 @@
+"use strict";
+
+const { PreferenceExperiments } = ChromeUtils.importESModule(
+ "resource://normandy/lib/PreferenceExperiments.sys.mjs"
+);
+const { RecipeRunner } = ChromeUtils.importESModule(
+ "resource://normandy/lib/RecipeRunner.sys.mjs"
+);
+const { ExperimentFakes } = ChromeUtils.importESModule(
+ "resource://testing-common/NimbusTestUtils.sys.mjs"
+);
+const { ExperimentManager } = ChromeUtils.importESModule(
+ "resource://nimbus/lib/ExperimentManager.sys.mjs"
+);
+const { RemoteSettingsExperimentLoader } = ChromeUtils.importESModule(
+ "resource://nimbus/lib/RemoteSettingsExperimentLoader.sys.mjs"
+);
+
+const { NormandyTestUtils } = ChromeUtils.importESModule(
+ "resource://testing-common/NormandyTestUtils.sys.mjs"
+);
+const { addonStudyFactory, preferenceStudyFactory } =
+ NormandyTestUtils.factories;
+
+function withAboutStudies() {
+ return function (testFunc) {
+ return async args =>
+ BrowserTestUtils.withNewTab("about:studies", async browser =>
+ testFunc({ ...args, browser })
+ );
+ };
+}
+
+// Test that the code renders at all
+decorate_task(
+ withAboutStudies(),
+ async function testAboutStudiesWorks({ browser }) {
+ const appFound = await SpecialPowers.spawn(
+ browser,
+ [],
+ () => !!content.document.getElementById("app")
+ );
+ ok(appFound, "App element was found");
+ }
+);
+
+// Test that the learn more element is displayed correctly
+decorate_task(
+ withPrefEnv({
+ set: [["app.normandy.shieldLearnMoreUrl", "http://test/%OS%/"]],
+ }),
+ withAboutStudies(),
+ async function testLearnMore({ browser }) {
+ SpecialPowers.spawn(browser, [], async () => {
+ const doc = content.document;
+ await ContentTaskUtils.waitForCondition(() =>
+ doc.getElementById("shield-studies-learn-more")
+ );
+ doc.getElementById("shield-studies-learn-more").click();
+ });
+ await BrowserTestUtils.waitForLocationChange(gBrowser);
+
+ const location = browser.currentURI.spec;
+ is(
+ location,
+ AboutPages.aboutStudies.getShieldLearnMoreHref(),
+ "Clicking Learn More opens the correct page on SUMO."
+ );
+ ok(!location.includes("%OS%"), "The Learn More URL is formatted.");
+ }
+);
+
+// Test that jumping to preferences worked as expected
+decorate_task(
+ withAboutStudies(),
+ async function testUpdatePreferences({ browser }) {
+ let loadPromise = BrowserTestUtils.firstBrowserLoaded(window);
+
+ // We have to use gBrowser instead of browser in most spots since we're
+ // dealing with a new tab outside of the about:studies tab.
+ const tab = await BrowserTestUtils.switchTab(gBrowser, () => {
+ SpecialPowers.spawn(browser, [], async () => {
+ const doc = content.document;
+ await ContentTaskUtils.waitForCondition(() =>
+ doc.getElementById("shield-studies-update-preferences")
+ );
+ content.document
+ .getElementById("shield-studies-update-preferences")
+ .click();
+ });
+ });
+
+ await loadPromise;
+
+ const location = gBrowser.currentURI.spec;
+ is(
+ location,
+ "about:preferences#privacy",
+ "Clicking Update Preferences opens the privacy section of the new about:preferences."
+ );
+
+ BrowserTestUtils.removeTab(tab);
+ }
+);
+
+// Test that the study listing shows studies in the proper order and grouping
+decorate_task(
+ AddonStudies.withStudies([
+ addonStudyFactory({
+ slug: "fake-study-a",
+ userFacingName: "A Fake Add-on Study",
+ active: true,
+ userFacingDescription: "A fake description",
+ studyStartDate: new Date(2018, 0, 4),
+ }),
+ addonStudyFactory({
+ slug: "fake-study-b",
+ userFacingName: "B Fake Add-on Study",
+ active: false,
+ userFacingDescription: "B fake description",
+ studyStartDate: new Date(2018, 0, 2),
+ }),
+ addonStudyFactory({
+ slug: "fake-study-c",
+ userFacingName: "C Fake Add-on Study",
+ active: true,
+ userFacingDescription: "C fake description",
+ studyStartDate: new Date(2018, 0, 1),
+ }),
+ ]),
+ PreferenceExperiments.withMockExperiments([
+ preferenceStudyFactory({
+ slug: "fake-study-d",
+ userFacingName: null,
+ userFacingDescription: null,
+ lastSeen: new Date(2018, 0, 3),
+ expired: false,
+ }),
+ preferenceStudyFactory({
+ slug: "fake-study-e",
+ userFacingName: "E Fake Preference Study",
+ lastSeen: new Date(2018, 0, 5),
+ expired: true,
+ }),
+ preferenceStudyFactory({
+ slug: "fake-study-f",
+ userFacingName: "F Fake Preference Study",
+ lastSeen: new Date(2018, 0, 6),
+ expired: false,
+ }),
+ ]),
+ withAboutStudies(),
+ async function testStudyListing({ addonStudies, prefExperiments, browser }) {
+ await SpecialPowers.spawn(
+ browser,
+ [{ addonStudies, prefExperiments }],
+ async ({ addonStudies, prefExperiments }) => {
+ const doc = content.document;
+
+ function getStudyRow(docElem, slug) {
+ return docElem.querySelector(`.study[data-study-slug="${slug}"]`);
+ }
+
+ await ContentTaskUtils.waitForCondition(
+ () => doc.querySelectorAll(".active-study-list .study").length
+ );
+ const activeNames = Array.from(
+ doc.querySelectorAll(".active-study-list .study")
+ ).map(row => row.dataset.studySlug);
+ const inactiveNames = Array.from(
+ doc.querySelectorAll(".inactive-study-list .study")
+ ).map(row => row.dataset.studySlug);
+
+ Assert.deepEqual(
+ activeNames,
+ [
+ prefExperiments[2].slug,
+ addonStudies[0].slug,
+ prefExperiments[0].slug,
+ addonStudies[2].slug,
+ ],
+ "Active studies are grouped by enabled status, and sorted by date"
+ );
+ Assert.deepEqual(
+ inactiveNames,
+ [prefExperiments[1].slug, addonStudies[1].slug],
+ "Inactive studies are grouped by enabled status, and sorted by date"
+ );
+
+ const activeAddonStudy = getStudyRow(doc, addonStudies[0].slug);
+ ok(
+ activeAddonStudy
+ .querySelector(".study-description")
+ .textContent.includes(addonStudies[0].userFacingDescription),
+ "Study descriptions are shown in about:studies."
+ );
+ is(
+ activeAddonStudy.querySelector(".study-status").textContent,
+ "Active",
+ "Active studies show an 'Active' indicator."
+ );
+ ok(
+ activeAddonStudy.querySelector(".remove-button"),
+ "Active studies show a remove button"
+ );
+ is(
+ activeAddonStudy
+ .querySelector(".study-icon")
+ .textContent.toLowerCase(),
+ "a",
+ "Study icons use the first letter of the study name."
+ );
+
+ const inactiveAddonStudy = getStudyRow(doc, addonStudies[1].slug);
+ is(
+ inactiveAddonStudy.querySelector(".study-status").textContent,
+ "Complete",
+ "Inactive studies are marked as complete."
+ );
+ ok(
+ !inactiveAddonStudy.querySelector(".remove-button"),
+ "Inactive studies do not show a remove button"
+ );
+
+ const activePrefStudy = getStudyRow(doc, prefExperiments[0].slug);
+ const preferenceName = Object.keys(prefExperiments[0].preferences)[0];
+ ok(
+ activePrefStudy
+ .querySelector(".study-description")
+ .textContent.includes(preferenceName),
+ "Preference studies show the preference they are changing"
+ );
+ is(
+ activePrefStudy.querySelector(".study-status").textContent,
+ "Active",
+ "Active studies show an 'Active' indicator."
+ );
+ ok(
+ activePrefStudy.querySelector(".remove-button"),
+ "Active studies show a remove button"
+ );
+
+ const inactivePrefStudy = getStudyRow(doc, prefExperiments[1].slug);
+ is(
+ inactivePrefStudy.querySelector(".study-status").textContent,
+ "Complete",
+ "Inactive studies are marked as complete."
+ );
+ ok(
+ !inactivePrefStudy.querySelector(".remove-button"),
+ "Inactive studies do not show a remove button"
+ );
+
+ activeAddonStudy.querySelector(".remove-button").click();
+ await ContentTaskUtils.waitForCondition(() =>
+ getStudyRow(doc, addonStudies[0].slug).matches(".study.disabled")
+ );
+ ok(
+ getStudyRow(doc, addonStudies[0].slug).matches(".study.disabled"),
+ "Clicking the remove button updates the UI to show that the study has been disabled."
+ );
+
+ activePrefStudy.querySelector(".remove-button").click();
+ await ContentTaskUtils.waitForCondition(() =>
+ getStudyRow(doc, prefExperiments[0].slug).matches(".study.disabled")
+ );
+ ok(
+ getStudyRow(doc, prefExperiments[0].slug).matches(".study.disabled"),
+ "Clicking the remove button updates the UI to show that the study has been disabled."
+ );
+ }
+ );
+
+ const updatedAddonStudy = await AddonStudies.get(addonStudies[0].recipeId);
+ ok(
+ !updatedAddonStudy.active,
+ "Clicking the remove button marks addon studies as inactive in storage."
+ );
+
+ const updatedPrefStudy = await PreferenceExperiments.get(
+ prefExperiments[0].slug
+ );
+ ok(
+ updatedPrefStudy.expired,
+ "Clicking the remove button marks preference studies as expired in storage."
+ );
+ }
+);
+
+// Test that a message is shown when no studies have been run
+decorate_task(
+ AddonStudies.withStudies([]),
+ withAboutStudies(),
+ async function testStudyListingNoStudies({ browser }) {
+ await SpecialPowers.spawn(browser, [], async () => {
+ const doc = content.document;
+ await ContentTaskUtils.waitForCondition(
+ () => doc.querySelectorAll(".study-list-info").length
+ );
+ const studyRows = doc.querySelectorAll(".study-list .study");
+ is(studyRows.length, 0, "There should be no studies");
+ is(
+ doc.querySelector(".study-list-info").textContent,
+ "You have not participated in any studies.",
+ "A message is shown when no studies exist"
+ );
+ });
+ }
+);
+
+// Test that the message shown when studies are disabled and studies exist
+decorate_task(
+ withAboutStudies(),
+ AddonStudies.withStudies([
+ addonStudyFactory({
+ userFacingName: "A Fake Add-on Study",
+ slug: "fake-addon-study",
+ active: false,
+ userFacingDescription: "A fake description",
+ studyStartDate: new Date(2018, 0, 4),
+ }),
+ ]),
+ PreferenceExperiments.withMockExperiments([
+ preferenceStudyFactory({
+ slug: "fake-pref-study",
+ userFacingName: "B Fake Preference Study",
+ lastSeen: new Date(2018, 0, 5),
+ expired: true,
+ }),
+ ]),
+ async function testStudyListingDisabled({ browser }) {
+ try {
+ RecipeRunner.disable();
+
+ await SpecialPowers.spawn(browser, [], async () => {
+ const doc = content.document;
+ await ContentTaskUtils.waitForCondition(() =>
+ doc.querySelector(".info-box-content > span")
+ );
+
+ is(
+ doc.querySelector(".info-box-content > span").textContent,
+ "This is a list of studies that you have participated in. No new studies will run.",
+ "A message is shown when studies are disabled"
+ );
+ });
+ } finally {
+ // reset RecipeRunner.enabled
+ RecipeRunner.checkPrefs();
+ }
+ }
+);
+
+// Test for bug 1498940 - detects studies disabled when only study opt-out is set
+decorate_task(
+ withPrefEnv({
+ set: [
+ ["datareporting.healthreport.uploadEnabled", true],
+ ["app.normandy.api_url", "https://example.com"],
+ ["app.shield.optoutstudies.enabled", false],
+ ],
+ }),
+ withAboutStudies(),
+ AddonStudies.withStudies([]),
+ PreferenceExperiments.withMockExperiments([]),
+ async function testStudyListingStudiesOptOut({ browser }) {
+ RecipeRunner.checkPrefs();
+ ok(
+ RecipeRunner.enabled,
+ "RecipeRunner should be enabled as a Precondition"
+ );
+
+ await SpecialPowers.spawn(browser, [], async () => {
+ const doc = content.document;
+ await ContentTaskUtils.waitForCondition(() => {
+ const span = doc.querySelector(".info-box-content > span");
+ return span && span.textContent;
+ });
+
+ is(
+ doc.querySelector(".info-box-content > span").textContent,
+ "This is a list of studies that you have participated in. No new studies will run.",
+ "A message is shown when studies are disabled"
+ );
+ });
+ }
+);
+
+// Test that clicking remove on a study that was disabled by an outside source
+// since the page loaded correctly updates.
+decorate_task(
+ AddonStudies.withStudies([
+ addonStudyFactory({
+ slug: "fake-addon-study",
+ userFacingName: "Fake Add-on Study",
+ active: true,
+ userFacingDescription: "A fake description",
+ studyStartDate: new Date(2018, 0, 4),
+ }),
+ ]),
+ PreferenceExperiments.withMockExperiments([
+ preferenceStudyFactory({
+ slug: "fake-pref-study",
+ userFacingName: "Fake Preference Study",
+ lastSeen: new Date(2018, 0, 3),
+ expired: false,
+ }),
+ ]),
+ withAboutStudies(),
+ async function testStudyListing({
+ addonStudies: [addonStudy],
+ prefExperiments: [prefStudy],
+ browser,
+ }) {
+ // The content page has already loaded. Disabling the studies here shouldn't
+ // affect it, since it doesn't live-update.
+ await AddonStudies.markAsEnded(addonStudy, "disabled-automatically-test");
+ await PreferenceExperiments.stop(prefStudy.slug, {
+ resetValue: false,
+ reason: "disabled-automatically-test",
+ });
+
+ await SpecialPowers.spawn(
+ browser,
+ [{ addonStudy, prefStudy }],
+ async ({ addonStudy, prefStudy }) => {
+ const doc = content.document;
+
+ function getStudyRow(docElem, slug) {
+ return docElem.querySelector(`.study[data-study-slug="${slug}"]`);
+ }
+
+ await ContentTaskUtils.waitForCondition(
+ () => doc.querySelectorAll(".remove-button").length == 2
+ );
+ let activeNames = Array.from(
+ doc.querySelectorAll(".active-study-list .study")
+ ).map(row => row.dataset.studySlug);
+ let inactiveNames = Array.from(
+ doc.querySelectorAll(".inactive-study-list .study")
+ ).map(row => row.dataset.studySlug);
+
+ Assert.deepEqual(
+ activeNames,
+ [addonStudy.slug, prefStudy.slug],
+ "Both studies should be listed as active, even though they have been disabled outside of the page"
+ );
+ Assert.deepEqual(
+ inactiveNames,
+ [],
+ "No studies should be listed as inactive"
+ );
+
+ const activeAddonStudy = getStudyRow(doc, addonStudy.slug);
+ const activePrefStudy = getStudyRow(doc, prefStudy.slug);
+
+ activeAddonStudy.querySelector(".remove-button").click();
+ await ContentTaskUtils.waitForCondition(() =>
+ getStudyRow(doc, addonStudy.slug).matches(".study.disabled")
+ );
+ ok(
+ getStudyRow(doc, addonStudy.slug).matches(".study.disabled"),
+ "Clicking the remove button updates the UI to show that the study has been disabled."
+ );
+
+ activePrefStudy.querySelector(".remove-button").click();
+ await ContentTaskUtils.waitForCondition(() =>
+ getStudyRow(doc, prefStudy.slug).matches(".study.disabled")
+ );
+ ok(
+ getStudyRow(doc, prefStudy.slug).matches(".study.disabled"),
+ "Clicking the remove button updates the UI to show that the study has been disabled."
+ );
+
+ activeNames = Array.from(
+ doc.querySelectorAll(".active-study-list .study")
+ ).map(row => row.dataset.studySlug);
+
+ Assert.deepEqual(
+ activeNames,
+ [],
+ "No studies should be listed as active"
+ );
+ }
+ );
+ }
+);
+
+// Test that clicking remove on a study updates even about:studies pages
+// that are not currently in focus.
+decorate_task(
+ AddonStudies.withStudies([
+ addonStudyFactory({
+ slug: "fake-addon-study",
+ userFacingName: "Fake Add-on Study",
+ active: true,
+ userFacingDescription: "A fake description",
+ studyStartDate: new Date(2018, 0, 4),
+ }),
+ ]),
+ PreferenceExperiments.withMockExperiments([
+ preferenceStudyFactory({
+ slug: "fake-pref-study",
+ userFacingName: "Fake Preference Study",
+ lastSeen: new Date(2018, 0, 3),
+ expired: false,
+ }),
+ ]),
+ withAboutStudies(),
+ async function testOtherTabsUpdated({
+ addonStudies: [addonStudy],
+ prefExperiments: [prefStudy],
+ browser,
+ }) {
+ // Ensure that both our studies are active in the current tab.
+ await SpecialPowers.spawn(
+ browser,
+ [{ addonStudy, prefStudy }],
+ async ({ addonStudy, prefStudy }) => {
+ const doc = content.document;
+ await ContentTaskUtils.waitForCondition(
+ () => doc.querySelectorAll(".remove-button").length == 2,
+ "waiting for page to load"
+ );
+ let activeNames = Array.from(
+ doc.querySelectorAll(".active-study-list .study")
+ ).map(row => row.dataset.studySlug);
+ let inactiveNames = Array.from(
+ doc.querySelectorAll(".inactive-study-list .study")
+ ).map(row => row.dataset.studySlug);
+
+ Assert.deepEqual(
+ activeNames,
+ [addonStudy.slug, prefStudy.slug],
+ "Both studies should be listed as active"
+ );
+ Assert.deepEqual(
+ inactiveNames,
+ [],
+ "No studies should be listed as inactive"
+ );
+ }
+ );
+
+ // Open a new about:studies tab.
+ await BrowserTestUtils.withNewTab("about:studies", async browser => {
+ // Delete both studies in this tab; this should pass if previous tests have passed.
+ await SpecialPowers.spawn(
+ browser,
+ [{ addonStudy, prefStudy }],
+ async ({ addonStudy, prefStudy }) => {
+ const doc = content.document;
+
+ function getStudyRow(docElem, slug) {
+ return docElem.querySelector(`.study[data-study-slug="${slug}"]`);
+ }
+
+ await ContentTaskUtils.waitForCondition(
+ () => doc.querySelectorAll(".remove-button").length == 2,
+ "waiting for page to load"
+ );
+ let activeNames = Array.from(
+ doc.querySelectorAll(".active-study-list .study")
+ ).map(row => row.dataset.studySlug);
+ let inactiveNames = Array.from(
+ doc.querySelectorAll(".inactive-study-list .study")
+ ).map(row => row.dataset.studySlug);
+
+ Assert.deepEqual(
+ activeNames,
+ [addonStudy.slug, prefStudy.slug],
+ "Both studies should be listed as active in the new tab"
+ );
+ Assert.deepEqual(
+ inactiveNames,
+ [],
+ "No studies should be listed as inactive in the new tab"
+ );
+
+ const activeAddonStudy = getStudyRow(doc, addonStudy.slug);
+ const activePrefStudy = getStudyRow(doc, prefStudy.slug);
+
+ activeAddonStudy.querySelector(".remove-button").click();
+ await ContentTaskUtils.waitForCondition(() =>
+ getStudyRow(doc, addonStudy.slug).matches(".study.disabled")
+ );
+ ok(
+ getStudyRow(doc, addonStudy.slug).matches(".study.disabled"),
+ "Clicking the remove button updates the UI in the new tab"
+ );
+
+ activePrefStudy.querySelector(".remove-button").click();
+ await ContentTaskUtils.waitForCondition(() =>
+ getStudyRow(doc, prefStudy.slug).matches(".study.disabled")
+ );
+ ok(
+ getStudyRow(doc, prefStudy.slug).matches(".study.disabled"),
+ "Clicking the remove button updates the UI in the new tab"
+ );
+
+ activeNames = Array.from(
+ doc.querySelectorAll(".active-study-list .study")
+ ).map(row => row.dataset.studySlug);
+
+ Assert.deepEqual(
+ activeNames,
+ [],
+ "No studies should be listed as active"
+ );
+ }
+ );
+ });
+
+ // Ensure that the original tab has updated correctly.
+ await SpecialPowers.spawn(
+ browser,
+ [{ addonStudy, prefStudy }],
+ async ({ addonStudy, prefStudy }) => {
+ const doc = content.document;
+ await ContentTaskUtils.waitForCondition(
+ () => doc.querySelectorAll(".inactive-study-list .study").length == 2,
+ "Two studies should load into the inactive list, since they were disabled in a different tab"
+ );
+ let activeNames = Array.from(
+ doc.querySelectorAll(".active-study-list .study")
+ ).map(row => row.dataset.studySlug);
+ let inactiveNames = Array.from(
+ doc.querySelectorAll(".inactive-study-list .study")
+ ).map(row => row.dataset.studySlug);
+ Assert.deepEqual(
+ activeNames,
+ [],
+ "No studies should be listed as active, since they were disabled in a different tab"
+ );
+ Assert.deepEqual(
+ inactiveNames,
+ [addonStudy.slug, prefStudy.slug],
+ "Both studies should be listed as inactive, since they were disabled in a different tab"
+ );
+ }
+ );
+ }
+);
+
+add_task(async function test_nimbus_about_studies_experiment() {
+ const recipe = ExperimentFakes.recipe("about-studies-foo");
+ await ExperimentManager.enroll(recipe);
+ await BrowserTestUtils.withNewTab(
+ { gBrowser, url: "about:studies" },
+ async browser => {
+ const name = await SpecialPowers.spawn(browser, [], async () => {
+ await ContentTaskUtils.waitForCondition(
+ () => content.document.querySelector(".nimbus .remove-button"),
+ "waiting for page/experiment to load"
+ );
+ return content.document.querySelector(".study-name").innerText;
+ });
+ // Make sure strings are properly shown
+ Assert.equal(
+ name,
+ recipe.userFacingName,
+ "Correct active experiment name"
+ );
+ }
+ );
+ ExperimentManager.unenroll(recipe.slug);
+ await BrowserTestUtils.withNewTab(
+ { gBrowser, url: "about:studies" },
+ async browser => {
+ const name = await SpecialPowers.spawn(browser, [], async () => {
+ await ContentTaskUtils.waitForCondition(
+ () => content.document.querySelector(".nimbus.disabled"),
+ "waiting for experiment to become disabled"
+ );
+ return content.document.querySelector(".study-name").innerText;
+ });
+ // Make sure strings are properly shown
+ Assert.equal(
+ name,
+ recipe.userFacingName,
+ "Correct disabled experiment name"
+ );
+ }
+ );
+ // Cleanup for multiple test runs
+ ExperimentManager.store._deleteForTests(recipe.slug);
+ Assert.equal(ExperimentManager.store.getAll().length, 0, "Cleanup done");
+});
+
+add_task(async function test_nimbus_about_studies_rollout() {
+ let recipe = ExperimentFakes.recipe("test_nimbus_about_studies_rollout");
+ let rollout = {
+ ...recipe,
+ branches: [recipe.branches[0]],
+ isRollout: true,
+ };
+ await ExperimentManager.enroll(rollout);
+ await BrowserTestUtils.withNewTab(
+ { gBrowser, url: "about:studies" },
+ async browser => {
+ const studyCount = await SpecialPowers.spawn(browser, [], async () => {
+ await ContentTaskUtils.waitForCondition(
+ () => content.document.querySelector("#shield-studies-learn-more"),
+ "waiting for page/experiment to load"
+ );
+ return content.document.querySelectorAll(".study-name").length;
+ });
+ // Make sure strings are properly shown
+ Assert.equal(studyCount, 0, "Rollout not loaded in non-debug mode");
+ }
+ );
+ Services.prefs.setBoolPref("nimbus.debug", true);
+ await BrowserTestUtils.withNewTab(
+ { gBrowser, url: "about:studies" },
+ async browser => {
+ const studyName = await SpecialPowers.spawn(browser, [], async () => {
+ await ContentTaskUtils.waitForCondition(
+ () => content.document.querySelector(".nimbus .remove-button"),
+ "waiting for page/experiment to load"
+ );
+ return content.document.querySelector(".study-header").innerText;
+ });
+ // Make sure strings are properly shown
+ Assert.ok(studyName.includes("Active"), "Rollout loaded in debug mode");
+ }
+ );
+ await BrowserTestUtils.withNewTab(
+ { gBrowser, url: "about:studies" },
+ async browser => {
+ const name = await SpecialPowers.spawn(browser, [], async () => {
+ content.document.querySelector(".remove-button").click();
+ await ContentTaskUtils.waitForCondition(
+ () => content.document.querySelector(".nimbus.disabled"),
+ "waiting for experiment to become disabled"
+ );
+ return content.document.querySelector(".study-header").innerText;
+ });
+ // Make sure strings are properly shown
+ Assert.ok(name.includes("Complete"), "Rollout was removed");
+ }
+ );
+ // Cleanup for multiple test runs
+ ExperimentManager.store._deleteForTests(rollout.slug);
+ Services.prefs.clearUserPref("nimbus.debug");
+});
+
+add_task(async function test_getStudiesEnabled() {
+ RecipeRunner.initializedPromise = PromiseUtils.defer();
+ let promise = AboutPages.aboutStudies.getStudiesEnabled();
+
+ RecipeRunner.initializedPromise.resolve();
+ let result = await promise;
+
+ Assert.equal(
+ result,
+ Services.prefs.getBoolPref("app.shield.optoutstudies.enabled"),
+ "about:studies is enabled if the pref is enabled"
+ );
+});
+
+add_task(async function test_forceEnroll() {
+ let sandbox = sinon.createSandbox();
+
+ // This simulates a succesful enrollment
+ let stub = sandbox.stub(RemoteSettingsExperimentLoader, "optInToExperiment");
+
+ await BrowserTestUtils.withNewTab(
+ {
+ gBrowser,
+ url: "about:studies?optin_collection=collection123&optin_branch=branch123&optin_slug=slug123",
+ },
+ async browser => {
+ await SpecialPowers.spawn(browser, [], async () => {
+ await ContentTaskUtils.waitForCondition(
+ () => content.document.querySelector(".opt-in-box"),
+ "Should show the opt in message"
+ );
+
+ Assert.equal(
+ content.document
+ .querySelector(".opt-in-box")
+ .classList.contains("opt-in-error"),
+ false,
+ "should not have an error class since the enrollment was successful"
+ );
+
+ return true;
+ });
+ }
+ );
+
+ // Simulates a problem force enrolling
+ stub.rejects(new Error("Testing error"));
+ await BrowserTestUtils.withNewTab(
+ {
+ gBrowser,
+ url: "about:studies?optin_collection=collection123&optin_branch=branch123&optin_slug=slug123",
+ },
+ async browser => {
+ await SpecialPowers.spawn(browser, [], async () => {
+ await ContentTaskUtils.waitForCondition(
+ () => content.document.querySelector(".opt-in-box"),
+ "Should show the opt in message"
+ );
+
+ Assert.ok(
+ content.document
+ .querySelector(".opt-in-box")
+ .classList.contains("opt-in-error"),
+ "should have an error class since the enrollment rejected"
+ );
+
+ Assert.equal(
+ content.document.querySelector(".opt-in-box").textContent,
+ "Testing error",
+ "should render the error"
+ );
+
+ return true;
+ });
+ }
+ );
+
+ sandbox.restore();
+});
diff --git a/toolkit/components/normandy/test/browser/browser_actions_AddonRollbackAction.js b/toolkit/components/normandy/test/browser/browser_actions_AddonRollbackAction.js
new file mode 100644
index 0000000000..b6db1d1a2c
--- /dev/null
+++ b/toolkit/components/normandy/test/browser/browser_actions_AddonRollbackAction.js
@@ -0,0 +1,246 @@
+"use strict";
+
+const { AddonRollbackAction } = ChromeUtils.importESModule(
+ "resource://normandy/actions/AddonRollbackAction.sys.mjs"
+);
+const { AddonRolloutAction } = ChromeUtils.importESModule(
+ "resource://normandy/actions/AddonRolloutAction.sys.mjs"
+);
+const { BaseAction } = ChromeUtils.importESModule(
+ "resource://normandy/actions/BaseAction.sys.mjs"
+);
+const { AddonRollouts } = ChromeUtils.importESModule(
+ "resource://normandy/lib/AddonRollouts.sys.mjs"
+);
+const { NormandyTestUtils } = ChromeUtils.importESModule(
+ "resource://testing-common/NormandyTestUtils.sys.mjs"
+);
+
+// Test that a simple recipe unenrolls as expected
+decorate_task(
+ AddonRollouts.withTestMock(),
+ ensureAddonCleanup(),
+ withMockNormandyApi(),
+ withStub(TelemetryEnvironment, "setExperimentInactive"),
+ withSendEventSpy(),
+ async function simple_recipe_unenrollment({
+ mockNormandyApi,
+ setExperimentInactiveStub,
+ sendEventSpy,
+ }) {
+ const rolloutRecipe = {
+ id: 1,
+ arguments: {
+ slug: "test-rollout",
+ extensionApiId: 1,
+ },
+ };
+ mockNormandyApi.extensionDetails = {
+ [rolloutRecipe.arguments.extensionApiId]: extensionDetailsFactory({
+ id: rolloutRecipe.arguments.extensionApiId,
+ }),
+ };
+
+ const webExtStartupPromise =
+ AddonTestUtils.promiseWebExtensionStartup(FIXTURE_ADDON_ID);
+
+ const rolloutAction = new AddonRolloutAction();
+ await rolloutAction.processRecipe(
+ rolloutRecipe,
+ BaseAction.suitability.FILTER_MATCH
+ );
+ is(rolloutAction.lastError, null, "lastError should be null");
+
+ await webExtStartupPromise;
+
+ const rollbackRecipe = {
+ id: 2,
+ arguments: {
+ rolloutSlug: "test-rollout",
+ },
+ };
+
+ const rollbackAction = new AddonRollbackAction();
+ ok(
+ await AddonRollouts.has(rolloutRecipe.arguments.slug),
+ "Rollout should have been added"
+ );
+ await rollbackAction.processRecipe(
+ rollbackRecipe,
+ BaseAction.suitability.FILTER_MATCH
+ );
+
+ const addon = await AddonManager.getAddonByID(FIXTURE_ADDON_ID);
+ ok(!addon, "add-on is uninstalled");
+
+ const rollouts = await AddonRollouts.getAll();
+ Assert.deepEqual(
+ rollouts,
+ [
+ {
+ recipeId: rolloutRecipe.id,
+ slug: "test-rollout",
+ state: AddonRollouts.STATE_ROLLED_BACK,
+ extensionApiId: 1,
+ addonId: FIXTURE_ADDON_ID,
+ addonVersion: "1.0",
+ xpiUrl: FIXTURE_ADDON_DETAILS["normandydriver-a-1.0"].url,
+ xpiHash: FIXTURE_ADDON_DETAILS["normandydriver-a-1.0"].hash,
+ xpiHashAlgorithm: "sha256",
+ enrollmentId: rollouts[0].enrollmentId,
+ },
+ ],
+ "Rollback should be stored in db"
+ );
+ ok(
+ NormandyTestUtils.isUuid(rollouts[0].enrollmentId),
+ "enrollmentId should be a UUID"
+ );
+
+ sendEventSpy.assertEvents([
+ ["enroll", "addon_rollout", rollbackRecipe.arguments.rolloutSlug],
+ ["unenroll", "addon_rollback", rollbackRecipe.arguments.rolloutSlug],
+ ]);
+
+ Assert.deepEqual(
+ setExperimentInactiveStub.args,
+ [["test-rollout"]],
+ "the telemetry experiment should deactivated"
+ );
+ }
+);
+
+// Add-on already uninstalled
+decorate_task(
+ AddonRollouts.withTestMock(),
+ ensureAddonCleanup(),
+ withMockNormandyApi(),
+ withSendEventSpy(),
+ async function addon_already_uninstalled({ mockNormandyApi, sendEventSpy }) {
+ const rolloutRecipe = {
+ id: 1,
+ arguments: {
+ slug: "test-rollout",
+ extensionApiId: 1,
+ },
+ };
+ mockNormandyApi.extensionDetails = {
+ [rolloutRecipe.arguments.extensionApiId]: extensionDetailsFactory({
+ id: rolloutRecipe.arguments.extensionApiId,
+ }),
+ };
+
+ const webExtStartupPromise =
+ AddonTestUtils.promiseWebExtensionStartup(FIXTURE_ADDON_ID);
+
+ const rolloutAction = new AddonRolloutAction();
+ await rolloutAction.processRecipe(
+ rolloutRecipe,
+ BaseAction.suitability.FILTER_MATCH
+ );
+ is(rolloutAction.lastError, null, "lastError should be null");
+
+ await webExtStartupPromise;
+
+ const rollbackRecipe = {
+ id: 2,
+ arguments: {
+ rolloutSlug: "test-rollout",
+ },
+ };
+
+ let addon = await AddonManager.getAddonByID(FIXTURE_ADDON_ID);
+ await addon.uninstall();
+
+ const rollbackAction = new AddonRollbackAction();
+ await rollbackAction.processRecipe(
+ rollbackRecipe,
+ BaseAction.suitability.FILTER_MATCH
+ );
+
+ addon = await AddonManager.getAddonByID(FIXTURE_ADDON_ID);
+ ok(!addon, "add-on is uninstalled");
+
+ const rollouts = await AddonRollouts.getAll();
+ Assert.deepEqual(
+ rollouts,
+ [
+ {
+ recipeId: rolloutRecipe.id,
+ slug: "test-rollout",
+ state: AddonRollouts.STATE_ROLLED_BACK,
+ extensionApiId: 1,
+ addonId: FIXTURE_ADDON_ID,
+ addonVersion: "1.0",
+ xpiUrl: FIXTURE_ADDON_DETAILS["normandydriver-a-1.0"].url,
+ xpiHash: FIXTURE_ADDON_DETAILS["normandydriver-a-1.0"].hash,
+ xpiHashAlgorithm: "sha256",
+ enrollmentId: rollouts[0].enrollmentId,
+ },
+ ],
+ "Rollback should be stored in db"
+ );
+ ok(
+ NormandyTestUtils.isUuid(rollouts[0].enrollmentId),
+ "enrollment ID should be a UUID"
+ );
+
+ sendEventSpy.assertEvents([
+ ["enroll", "addon_rollout", rollbackRecipe.arguments.rolloutSlug],
+ ["unenroll", "addon_rollback", rollbackRecipe.arguments.rolloutSlug],
+ ]);
+ }
+);
+
+// Already rolled back, do nothing
+decorate_task(
+ AddonRollouts.withTestMock(),
+ ensureAddonCleanup(),
+ withMockNormandyApi(),
+ withSendEventSpy(),
+ async function already_rolled_back({ sendEventSpy }) {
+ const rollout = {
+ recipeId: 1,
+ slug: "test-rollout",
+ state: AddonRollouts.STATE_ROLLED_BACK,
+ extensionApiId: 1,
+ addonId: FIXTURE_ADDON_ID,
+ addonVersion: "1.0",
+ xpiUrl: FIXTURE_ADDON_DETAILS["normandydriver-a-1.0"].url,
+ xpiHash: FIXTURE_ADDON_DETAILS["normandydriver-a-1.0"].hash,
+ xpiHashAlgorithm: "sha256",
+ };
+ AddonRollouts.add(rollout);
+
+ const action = new AddonRollbackAction();
+ await action.processRecipe(
+ {
+ id: 2,
+ arguments: {
+ rolloutSlug: "test-rollout",
+ },
+ },
+ BaseAction.suitability.FILTER_MATCH
+ );
+
+ Assert.deepEqual(
+ await AddonRollouts.getAll(),
+ [
+ {
+ recipeId: 1,
+ slug: "test-rollout",
+ state: AddonRollouts.STATE_ROLLED_BACK,
+ extensionApiId: 1,
+ addonId: FIXTURE_ADDON_ID,
+ addonVersion: "1.0",
+ xpiUrl: FIXTURE_ADDON_DETAILS["normandydriver-a-1.0"].url,
+ xpiHash: FIXTURE_ADDON_DETAILS["normandydriver-a-1.0"].hash,
+ xpiHashAlgorithm: "sha256",
+ },
+ ],
+ "Rollback should be stored in db"
+ );
+
+ sendEventSpy.assertEvents([]);
+ }
+);
diff --git a/toolkit/components/normandy/test/browser/browser_actions_AddonRolloutAction.js b/toolkit/components/normandy/test/browser/browser_actions_AddonRolloutAction.js
new file mode 100644
index 0000000000..d1f2a7246e
--- /dev/null
+++ b/toolkit/components/normandy/test/browser/browser_actions_AddonRolloutAction.js
@@ -0,0 +1,539 @@
+"use strict";
+
+const { AddonRolloutAction } = ChromeUtils.importESModule(
+ "resource://normandy/actions/AddonRolloutAction.sys.mjs"
+);
+const { BaseAction } = ChromeUtils.importESModule(
+ "resource://normandy/actions/BaseAction.sys.mjs"
+);
+const { AddonRollouts } = ChromeUtils.importESModule(
+ "resource://normandy/lib/AddonRollouts.sys.mjs"
+);
+const { NormandyTestUtils } = ChromeUtils.importESModule(
+ "resource://testing-common/NormandyTestUtils.sys.mjs"
+);
+
+// Test that a simple recipe enrolls as expected
+decorate_task(
+ AddonRollouts.withTestMock(),
+ ensureAddonCleanup(),
+ withMockNormandyApi(),
+ withStub(TelemetryEnvironment, "setExperimentActive"),
+ withSendEventSpy(),
+ async function simple_recipe_enrollment({
+ mockNormandyApi,
+ setExperimentActiveStub,
+ sendEventSpy,
+ }) {
+ const recipe = {
+ id: 1,
+ arguments: {
+ slug: "test-rollout",
+ extensionApiId: 1,
+ },
+ };
+ mockNormandyApi.extensionDetails = {
+ [recipe.arguments.extensionApiId]: extensionDetailsFactory({
+ id: recipe.arguments.extensionApiId,
+ }),
+ };
+
+ const webExtStartupPromise =
+ AddonTestUtils.promiseWebExtensionStartup(FIXTURE_ADDON_ID);
+
+ const action = new AddonRolloutAction();
+ await action.processRecipe(recipe, BaseAction.suitability.FILTER_MATCH);
+ is(action.lastError, null, "lastError should be null");
+
+ await webExtStartupPromise;
+
+ // addon was installed
+ const addon = await AddonManager.getAddonByID(FIXTURE_ADDON_ID);
+ is(addon.id, FIXTURE_ADDON_ID, "addon should be installed");
+
+ // rollout was stored
+ const rollouts = await AddonRollouts.getAll();
+ Assert.deepEqual(
+ rollouts,
+ [
+ {
+ recipeId: recipe.id,
+ slug: "test-rollout",
+ state: AddonRollouts.STATE_ACTIVE,
+ extensionApiId: 1,
+ addonId: FIXTURE_ADDON_ID,
+ addonVersion: "1.0",
+ xpiUrl: FIXTURE_ADDON_DETAILS["normandydriver-a-1.0"].url,
+ xpiHash: FIXTURE_ADDON_DETAILS["normandydriver-a-1.0"].hash,
+ xpiHashAlgorithm: "sha256",
+ enrollmentId: rollouts[0].enrollmentId,
+ },
+ ],
+ "Rollout should be stored in db"
+ );
+ ok(
+ NormandyTestUtils.isUuid(rollouts[0].enrollmentId),
+ "enrollmentId should be a UUID"
+ );
+
+ sendEventSpy.assertEvents([
+ ["enroll", "addon_rollout", recipe.arguments.slug],
+ ]);
+ ok(
+ setExperimentActiveStub.calledWithExactly("test-rollout", "active", {
+ type: "normandy-addonrollout",
+ }),
+ "a telemetry experiment should be activated"
+ );
+
+ // cleanup installed addon
+ await addon.uninstall();
+ }
+);
+
+// Test that a rollout can update the addon
+decorate_task(
+ AddonRollouts.withTestMock(),
+ ensureAddonCleanup(),
+ withMockNormandyApi(),
+ withSendEventSpy(),
+ async function update_rollout({ mockNormandyApi, sendEventSpy }) {
+ // first enrollment
+ const recipe = {
+ id: 1,
+ arguments: {
+ slug: "test-rollout",
+ extensionApiId: 1,
+ },
+ };
+ mockNormandyApi.extensionDetails = {
+ [recipe.arguments.extensionApiId]: extensionDetailsFactory({
+ id: recipe.arguments.extensionApiId,
+ }),
+ 2: extensionDetailsFactory({
+ id: 2,
+ xpi: FIXTURE_ADDON_DETAILS["normandydriver-a-2.0"].url,
+ version: "2.0",
+ hash: FIXTURE_ADDON_DETAILS["normandydriver-a-2.0"].hash,
+ }),
+ };
+
+ let webExtStartupPromise =
+ AddonTestUtils.promiseWebExtensionStartup(FIXTURE_ADDON_ID);
+
+ let action = new AddonRolloutAction();
+ await action.processRecipe(recipe, BaseAction.suitability.FILTER_MATCH);
+ is(action.lastError, null, "lastError should be null");
+
+ await webExtStartupPromise;
+
+ // addon was installed
+ let addon = await AddonManager.getAddonByID(FIXTURE_ADDON_ID);
+ is(addon.id, FIXTURE_ADDON_ID, "addon should be installed");
+ is(addon.version, "1.0", "addon should be the correct version");
+
+ // update existing enrollment
+ recipe.arguments.extensionApiId = 2;
+ webExtStartupPromise =
+ AddonTestUtils.promiseWebExtensionStartup(FIXTURE_ADDON_ID);
+ action = new AddonRolloutAction();
+ await action.processRecipe(recipe, BaseAction.suitability.FILTER_MATCH);
+ is(action.lastError, null, "lastError should be null");
+
+ await webExtStartupPromise;
+
+ addon = await AddonManager.getAddonByID(FIXTURE_ADDON_ID);
+ is(addon.id, FIXTURE_ADDON_ID, "addon should still be installed");
+ is(addon.version, "2.0", "addon should be the correct version");
+
+ // rollout in the DB has been updated
+ const rollouts = await AddonRollouts.getAll();
+ Assert.deepEqual(
+ rollouts,
+ [
+ {
+ recipeId: recipe.id,
+ slug: "test-rollout",
+ state: AddonRollouts.STATE_ACTIVE,
+ extensionApiId: 2,
+ addonId: FIXTURE_ADDON_ID,
+ addonVersion: "2.0",
+ xpiUrl: FIXTURE_ADDON_DETAILS["normandydriver-a-2.0"].url,
+ xpiHash: FIXTURE_ADDON_DETAILS["normandydriver-a-2.0"].hash,
+ xpiHashAlgorithm: "sha256",
+ enrollmentId: rollouts[0].enrollmentId,
+ },
+ ],
+ "Rollout should be stored in db"
+ );
+ ok(
+ NormandyTestUtils.isUuid(rollouts[0].enrollmentId),
+ "enrollmentId should be a UUID"
+ );
+
+ sendEventSpy.assertEvents([
+ ["enroll", "addon_rollout", "test-rollout"],
+ ["update", "addon_rollout", "test-rollout"],
+ ]);
+
+ // Cleanup
+ await addon.uninstall();
+ }
+);
+
+// Re-running a recipe does nothing
+decorate_task(
+ AddonRollouts.withTestMock(),
+ ensureAddonCleanup(),
+ withMockNormandyApi(),
+ withSendEventSpy(),
+ async function rerun_recipe({ mockNormandyApi, sendEventSpy }) {
+ const recipe = {
+ id: 1,
+ arguments: {
+ slug: "test-rollout",
+ extensionApiId: 1,
+ },
+ };
+ mockNormandyApi.extensionDetails = {
+ [recipe.arguments.extensionApiId]: extensionDetailsFactory({
+ id: recipe.arguments.extensionApiId,
+ }),
+ };
+
+ const webExtStartupPromise =
+ AddonTestUtils.promiseWebExtensionStartup(FIXTURE_ADDON_ID);
+
+ let action = new AddonRolloutAction();
+ await action.processRecipe(recipe, BaseAction.suitability.FILTER_MATCH);
+ is(action.lastError, null, "lastError should be null");
+
+ await webExtStartupPromise;
+
+ // addon was installed
+ let addon = await AddonManager.getAddonByID(FIXTURE_ADDON_ID);
+ is(addon.id, FIXTURE_ADDON_ID, "addon should be installed");
+ is(addon.version, "1.0", "addon should be the correct version");
+
+ // re-run the same recipe
+ action = new AddonRolloutAction();
+ await action.processRecipe(recipe, BaseAction.suitability.FILTER_MATCH);
+ is(action.lastError, null, "lastError should be null");
+
+ addon = await AddonManager.getAddonByID(FIXTURE_ADDON_ID);
+ is(addon.id, FIXTURE_ADDON_ID, "addon should still be installed");
+ is(addon.version, "1.0", "addon should be the correct version");
+
+ // rollout in the DB has not been updated
+ const rollouts = await AddonRollouts.getAll();
+ Assert.deepEqual(
+ rollouts,
+ [
+ {
+ recipeId: recipe.id,
+ slug: "test-rollout",
+ state: AddonRollouts.STATE_ACTIVE,
+ extensionApiId: 1,
+ addonId: FIXTURE_ADDON_ID,
+ addonVersion: "1.0",
+ xpiUrl: FIXTURE_ADDON_DETAILS["normandydriver-a-1.0"].url,
+ xpiHash: FIXTURE_ADDON_DETAILS["normandydriver-a-1.0"].hash,
+ xpiHashAlgorithm: "sha256",
+ enrollmentId: rollouts[0].enrollmentId,
+ },
+ ],
+ "Rollout should be stored in db"
+ );
+ ok(
+ NormandyTestUtils.isUuid(rollouts[0].enrollmentId),
+ "Enrollment ID should be a UUID"
+ );
+
+ sendEventSpy.assertEvents([["enroll", "addon_rollout", "test-rollout"]]);
+
+ // Cleanup
+ await addon.uninstall();
+ }
+);
+
+// Conflicting rollouts
+decorate_task(
+ AddonRollouts.withTestMock(),
+ ensureAddonCleanup(),
+ withMockNormandyApi(),
+ withSendEventSpy(),
+ async function conflicting_rollout({ mockNormandyApi, sendEventSpy }) {
+ const recipe = {
+ id: 1,
+ arguments: {
+ slug: "test-rollout",
+ extensionApiId: 1,
+ },
+ };
+ mockNormandyApi.extensionDetails = {
+ [recipe.arguments.extensionApiId]: extensionDetailsFactory({
+ id: recipe.arguments.extensionApiId,
+ }),
+ };
+
+ const webExtStartupPromise =
+ AddonTestUtils.promiseWebExtensionStartup(FIXTURE_ADDON_ID);
+
+ let action = new AddonRolloutAction();
+ await action.processRecipe(recipe, BaseAction.suitability.FILTER_MATCH);
+ is(action.lastError, null, "lastError should be null");
+
+ await webExtStartupPromise;
+
+ // addon was installed
+ let addon = await AddonManager.getAddonByID(FIXTURE_ADDON_ID);
+ is(addon.id, FIXTURE_ADDON_ID, "addon should be installed");
+ is(addon.version, "1.0", "addon should be the correct version");
+
+ // update existing enrollment
+ action = new AddonRolloutAction();
+ await action.processRecipe(
+ {
+ ...recipe,
+ id: 2,
+ arguments: {
+ ...recipe.arguments,
+ slug: "test-conflict",
+ },
+ },
+ BaseAction.suitability.FILTER_MATCH
+ );
+ is(action.lastError, null, "lastError should be null");
+
+ addon = await AddonManager.getAddonByID(FIXTURE_ADDON_ID);
+ is(addon.id, FIXTURE_ADDON_ID, "addon should still be installed");
+ is(addon.version, "1.0", "addon should be the correct version");
+
+ // rollout in the DB has not been updated
+ const rollouts = await AddonRollouts.getAll();
+ Assert.deepEqual(
+ rollouts,
+ [
+ {
+ recipeId: recipe.id,
+ slug: "test-rollout",
+ state: AddonRollouts.STATE_ACTIVE,
+ extensionApiId: 1,
+ addonId: FIXTURE_ADDON_ID,
+ addonVersion: "1.0",
+ xpiUrl: FIXTURE_ADDON_DETAILS["normandydriver-a-1.0"].url,
+ xpiHash: FIXTURE_ADDON_DETAILS["normandydriver-a-1.0"].hash,
+ xpiHashAlgorithm: "sha256",
+ enrollmentId: rollouts[0].enrollmentId,
+ },
+ ],
+ "Rollout should be stored in db"
+ );
+ ok(NormandyTestUtils.isUuid(rollouts[0].enrollmentId));
+
+ sendEventSpy.assertEvents([
+ [
+ "enroll",
+ "addon_rollout",
+ "test-rollout",
+ { addonId: FIXTURE_ADDON_ID, enrollmentId: rollouts[0].enrollmentId },
+ ],
+ [
+ "enrollFailed",
+ "addon_rollout",
+ "test-conflict",
+ { enrollmentId: rollouts[0].enrollmentId, reason: "conflict" },
+ ],
+ ]);
+
+ // Cleanup
+ await addon.uninstall();
+ }
+);
+
+// Add-on ID changed
+decorate_task(
+ AddonRollouts.withTestMock(),
+ ensureAddonCleanup(),
+ withMockNormandyApi(),
+ withSendEventSpy(),
+ async function enroll_failed_addon_id_changed({
+ mockNormandyApi,
+ sendEventSpy,
+ }) {
+ const recipe = {
+ id: 1,
+ arguments: {
+ slug: "test-rollout",
+ extensionApiId: 1,
+ },
+ };
+ mockNormandyApi.extensionDetails = {
+ [recipe.arguments.extensionApiId]: extensionDetailsFactory({
+ id: recipe.arguments.extensionApiId,
+ }),
+ 2: extensionDetailsFactory({
+ id: 2,
+ extension_id: "normandydriver-b@example.com",
+ xpi: FIXTURE_ADDON_DETAILS["normandydriver-b-1.0"].url,
+ version: "1.0",
+ hash: FIXTURE_ADDON_DETAILS["normandydriver-b-1.0"].hash,
+ }),
+ };
+
+ const webExtStartupPromise =
+ AddonTestUtils.promiseWebExtensionStartup(FIXTURE_ADDON_ID);
+
+ let action = new AddonRolloutAction();
+ await action.processRecipe(recipe, BaseAction.suitability.FILTER_MATCH);
+ is(action.lastError, null, "lastError should be null");
+
+ await webExtStartupPromise;
+
+ // addon was installed
+ let addon = await AddonManager.getAddonByID(FIXTURE_ADDON_ID);
+ is(addon.id, FIXTURE_ADDON_ID, "addon should be installed");
+ is(addon.version, "1.0", "addon should be the correct version");
+
+ // update existing enrollment
+ recipe.arguments.extensionApiId = 2;
+ action = new AddonRolloutAction();
+ await action.processRecipe(recipe, BaseAction.suitability.FILTER_MATCH);
+ is(action.lastError, null, "lastError should be null");
+
+ addon = await AddonManager.getAddonByID(FIXTURE_ADDON_ID);
+ is(addon.id, FIXTURE_ADDON_ID, "addon should still be installed");
+ is(addon.version, "1.0", "addon should be the correct version");
+
+ // rollout in the DB has not been updated
+ const rollouts = await AddonRollouts.getAll();
+ Assert.deepEqual(
+ rollouts,
+ [
+ {
+ recipeId: recipe.id,
+ slug: "test-rollout",
+ state: AddonRollouts.STATE_ACTIVE,
+ extensionApiId: 1,
+ addonId: FIXTURE_ADDON_ID,
+ addonVersion: "1.0",
+ xpiUrl: FIXTURE_ADDON_DETAILS["normandydriver-a-1.0"].url,
+ xpiHash: FIXTURE_ADDON_DETAILS["normandydriver-a-1.0"].hash,
+ xpiHashAlgorithm: "sha256",
+ enrollmentId: rollouts[0].enrollmentId,
+ },
+ ],
+ "Rollout should be stored in db"
+ );
+ ok(
+ NormandyTestUtils.isUuid(rollouts[0].enrollmentId),
+ "enrollment ID should be a UUID"
+ );
+
+ sendEventSpy.assertEvents([
+ ["enroll", "addon_rollout", "test-rollout"],
+ [
+ "updateFailed",
+ "addon_rollout",
+ "test-rollout",
+ { reason: "addon-id-changed" },
+ ],
+ ]);
+
+ // Cleanup
+ await addon.uninstall();
+ }
+);
+
+// Add-on upgrade required
+decorate_task(
+ AddonRollouts.withTestMock(),
+ ensureAddonCleanup(),
+ withMockNormandyApi(),
+ withSendEventSpy(),
+ async function enroll_failed_upgrade_required({
+ mockNormandyApi,
+ sendEventSpy,
+ }) {
+ const recipe = {
+ id: 1,
+ arguments: {
+ slug: "test-rollout",
+ extensionApiId: 1,
+ },
+ };
+ mockNormandyApi.extensionDetails = {
+ [recipe.arguments.extensionApiId]: extensionDetailsFactory({
+ id: recipe.arguments.extensionApiId,
+ xpi: FIXTURE_ADDON_DETAILS["normandydriver-a-2.0"].url,
+ version: "2.0",
+ hash: FIXTURE_ADDON_DETAILS["normandydriver-a-2.0"].hash,
+ }),
+ 2: extensionDetailsFactory({
+ id: 2,
+ }),
+ };
+
+ const webExtStartupPromise =
+ AddonTestUtils.promiseWebExtensionStartup(FIXTURE_ADDON_ID);
+
+ let action = new AddonRolloutAction();
+ await action.processRecipe(recipe, BaseAction.suitability.FILTER_MATCH);
+ is(action.lastError, null, "lastError should be null");
+
+ await webExtStartupPromise;
+
+ // addon was installed
+ let addon = await AddonManager.getAddonByID(FIXTURE_ADDON_ID);
+ is(addon.id, FIXTURE_ADDON_ID, "addon should be installed");
+ is(addon.version, "2.0", "addon should be the correct version");
+
+ // update existing enrollment
+ recipe.arguments.extensionApiId = 2;
+ action = new AddonRolloutAction();
+ await action.processRecipe(recipe, BaseAction.suitability.FILTER_MATCH);
+ is(action.lastError, null, "lastError should be null");
+
+ addon = await AddonManager.getAddonByID(FIXTURE_ADDON_ID);
+ is(addon.id, FIXTURE_ADDON_ID, "addon should still be installed");
+ is(addon.version, "2.0", "addon should be the correct version");
+
+ // rollout in the DB has not been updated
+ const rollouts = await AddonRollouts.getAll();
+ Assert.deepEqual(
+ rollouts,
+ [
+ {
+ recipeId: recipe.id,
+ slug: "test-rollout",
+ state: AddonRollouts.STATE_ACTIVE,
+ extensionApiId: 1,
+ addonId: FIXTURE_ADDON_ID,
+ addonVersion: "2.0",
+ xpiUrl: FIXTURE_ADDON_DETAILS["normandydriver-a-2.0"].url,
+ xpiHash: FIXTURE_ADDON_DETAILS["normandydriver-a-2.0"].hash,
+ xpiHashAlgorithm: "sha256",
+ enrollmentId: rollouts[0].enrollmentId,
+ },
+ ],
+ "Rollout should be stored in db"
+ );
+ ok(
+ NormandyTestUtils.isUuid(rollouts[0].enrollmentId),
+ "enrollment ID should be a UUID"
+ );
+
+ sendEventSpy.assertEvents([
+ ["enroll", "addon_rollout", "test-rollout"],
+ [
+ "updateFailed",
+ "addon_rollout",
+ "test-rollout",
+ { reason: "upgrade-required" },
+ ],
+ ]);
+
+ // Cleanup
+ await addon.uninstall();
+ }
+);
diff --git a/toolkit/components/normandy/test/browser/browser_actions_BranchedAddonStudyAction.js b/toolkit/components/normandy/test/browser/browser_actions_BranchedAddonStudyAction.js
new file mode 100644
index 0000000000..5a3e959be9
--- /dev/null
+++ b/toolkit/components/normandy/test/browser/browser_actions_BranchedAddonStudyAction.js
@@ -0,0 +1,1662 @@
+"use strict";
+
+const { BaseAction } = ChromeUtils.importESModule(
+ "resource://normandy/actions/BaseAction.sys.mjs"
+);
+const { BranchedAddonStudyAction } = ChromeUtils.importESModule(
+ "resource://normandy/actions/BranchedAddonStudyAction.sys.mjs"
+);
+const { Uptake } = ChromeUtils.importESModule(
+ "resource://normandy/lib/Uptake.sys.mjs"
+);
+
+const { NormandyTestUtils } = ChromeUtils.importESModule(
+ "resource://testing-common/NormandyTestUtils.sys.mjs"
+);
+const { branchedAddonStudyFactory } = NormandyTestUtils.factories;
+
+function branchedAddonStudyRecipeFactory(overrides = {}) {
+ let args = {
+ slug: "fake-slug",
+ userFacingName: "Fake name",
+ userFacingDescription: "fake description",
+ isEnrollmentPaused: false,
+ branches: [
+ {
+ slug: "a",
+ ratio: 1,
+ extensionApiId: 1,
+ },
+ ],
+ };
+ if (Object.hasOwnProperty.call(overrides, "arguments")) {
+ args = Object.assign(args, overrides.arguments);
+ delete overrides.arguments;
+ }
+ return recipeFactory(
+ Object.assign(
+ {
+ action: "branched-addon-study",
+ arguments: args,
+ },
+ overrides
+ )
+ );
+}
+
+function recipeFromStudy(study, overrides = {}) {
+ let args = {
+ slug: study.slug,
+ userFacingName: study.userFacingName,
+ isEnrollmentPaused: false,
+ branches: [
+ {
+ slug: "a",
+ ratio: 1,
+ extensionApiId: study.extensionApiId,
+ },
+ ],
+ };
+
+ if (Object.hasOwnProperty.call(overrides, "arguments")) {
+ args = Object.assign(args, overrides.arguments);
+ delete overrides.arguments;
+ }
+
+ return branchedAddonStudyRecipeFactory(
+ Object.assign(
+ {
+ id: study.recipeId,
+ arguments: args,
+ },
+ overrides
+ )
+ );
+}
+
+// Test that enroll is not called if recipe is already enrolled and update does nothing
+// if recipe is unchanged
+decorate_task(
+ withStudiesEnabled(),
+ ensureAddonCleanup(),
+ withMockNormandyApi(),
+ AddonStudies.withStudies([branchedAddonStudyFactory()]),
+ withSendEventSpy(),
+ withInstalledWebExtensionSafe({ id: FIXTURE_ADDON_ID, version: "1.0" }),
+ async function enrollTwiceFail({
+ mockNormandyApi,
+ addonStudies: [study],
+ sendEventSpy,
+ }) {
+ const recipe = recipeFromStudy(study);
+ mockNormandyApi.extensionDetails = {
+ [study.extensionApiId]: extensionDetailsFactory({
+ id: study.extensionApiId,
+ extension_id: study.addonId,
+ hash: study.extensionHash,
+ }),
+ };
+ const action = new BranchedAddonStudyAction();
+ const enrollSpy = sinon.spy(action, "enroll");
+ const updateSpy = sinon.spy(action, "update");
+ await action.processRecipe(recipe, BaseAction.suitability.FILTER_MATCH);
+ is(action.lastError, null, "lastError should be null");
+ ok(!enrollSpy.called, "enroll should not be called");
+ ok(updateSpy.called, "update should be called");
+ sendEventSpy.assertEvents([]);
+ }
+);
+
+// Test that if the add-on fails to install, the database is cleaned up and the
+// error is correctly reported.
+decorate_task(
+ withStudiesEnabled(),
+ ensureAddonCleanup(),
+ withMockNormandyApi(),
+ withSendEventSpy(),
+ AddonStudies.withStudies(),
+ async function enrollDownloadFail({ mockNormandyApi, sendEventSpy }) {
+ const recipe = branchedAddonStudyRecipeFactory({
+ arguments: {
+ branches: [{ slug: "missing", ratio: 1, extensionApiId: 404 }],
+ },
+ });
+ mockNormandyApi.extensionDetails = {
+ [recipe.arguments.branches[0].extensionApiId]: extensionDetailsFactory({
+ id: recipe.arguments.branches[0].extensionApiId,
+ xpi: "https://example.com/404.xpi",
+ }),
+ };
+ const action = new BranchedAddonStudyAction();
+ await action.processRecipe(recipe, BaseAction.suitability.FILTER_MATCH);
+ is(action.lastError, null, "lastError should be null");
+
+ const studies = await AddonStudies.getAll();
+ Assert.deepEqual(studies, [], "the study should not be in the database");
+
+ sendEventSpy.assertEvents([
+ [
+ "enrollFailed",
+ "addon_study",
+ recipe.arguments.name,
+ {
+ reason: "download-failure",
+ detail: "ERROR_NETWORK_FAILURE",
+ branch: "missing",
+ },
+ ],
+ ]);
+ }
+);
+
+// Ensure that the database is clean and error correctly reported if hash check fails
+decorate_task(
+ withStudiesEnabled(),
+ ensureAddonCleanup(),
+ withMockNormandyApi(),
+ withSendEventSpy(),
+ AddonStudies.withStudies(),
+ async function enrollHashCheckFails({ mockNormandyApi, sendEventSpy }) {
+ const recipe = branchedAddonStudyRecipeFactory();
+ mockNormandyApi.extensionDetails = {
+ [recipe.arguments.branches[0].extensionApiId]: extensionDetailsFactory({
+ id: recipe.arguments.branches[0].extensionApiId,
+ xpi: FIXTURE_ADDON_DETAILS["normandydriver-a-1.0"].url,
+ hash: "badhash",
+ }),
+ };
+ const action = new BranchedAddonStudyAction();
+ await action.processRecipe(recipe, BaseAction.suitability.FILTER_MATCH);
+ is(action.lastError, null, "lastError should be null");
+
+ const studies = await AddonStudies.getAll();
+ Assert.deepEqual(studies, [], "the study should not be in the database");
+
+ sendEventSpy.assertEvents([
+ [
+ "enrollFailed",
+ "addon_study",
+ recipe.arguments.name,
+ {
+ reason: "download-failure",
+ detail: "ERROR_INCORRECT_HASH",
+ branch: "a",
+ },
+ ],
+ ]);
+ }
+);
+
+// Ensure that the database is clean and error correctly reported if there is a metadata mismatch
+decorate_task(
+ withStudiesEnabled(),
+ ensureAddonCleanup(),
+ withMockNormandyApi(),
+ withSendEventSpy(),
+ AddonStudies.withStudies(),
+ async function enrollFailsMetadataMismatch({
+ mockNormandyApi,
+ sendEventSpy,
+ }) {
+ const recipe = branchedAddonStudyRecipeFactory();
+ mockNormandyApi.extensionDetails = {
+ [recipe.arguments.branches[0].extensionApiId]: extensionDetailsFactory({
+ id: recipe.arguments.branches[0].extensionApiId,
+ version: "1.5",
+ xpi: FIXTURE_ADDON_DETAILS["normandydriver-a-1.0"].url,
+ hash: FIXTURE_ADDON_DETAILS["normandydriver-a-1.0"].hash,
+ }),
+ };
+ const action = new BranchedAddonStudyAction();
+ await action.processRecipe(recipe, BaseAction.suitability.FILTER_MATCH);
+ is(action.lastError, null, "lastError should be null");
+
+ const studies = await AddonStudies.getAll();
+ Assert.deepEqual(studies, [], "the study should not be in the database");
+
+ sendEventSpy.assertEvents([
+ [
+ "enrollFailed",
+ "addon_study",
+ recipe.arguments.name,
+ {
+ reason: "metadata-mismatch",
+ branch: "a",
+ },
+ ],
+ ]);
+ }
+);
+
+// Test that in the case of a study add-on conflicting with a non-study add-on, the study does not enroll
+decorate_task(
+ withStudiesEnabled(),
+ ensureAddonCleanup(),
+ withMockNormandyApi(),
+ withSendEventSpy(),
+ withInstalledWebExtensionSafe({ version: "0.1", id: FIXTURE_ADDON_ID }),
+ AddonStudies.withStudies(),
+ async function conflictingEnrollment({
+ mockNormandyApi,
+ sendEventSpy,
+ installedWebExtensionSafe: { addonId },
+ }) {
+ is(
+ addonId,
+ FIXTURE_ADDON_ID,
+ "Generated, installed add-on should have the same ID as the fixture"
+ );
+ const recipe = branchedAddonStudyRecipeFactory({
+ arguments: { slug: "conflicting" },
+ });
+ mockNormandyApi.extensionDetails = {
+ [recipe.arguments.branches[0].extensionApiId]: extensionDetailsFactory({
+ id: recipe.arguments.extensionApiId,
+ addonUrl: FIXTURE_ADDON_DETAILS["normandydriver-a-1.0"].url,
+ }),
+ };
+ const action = new BranchedAddonStudyAction();
+ await action.processRecipe(recipe, BaseAction.suitability.FILTER_MATCH);
+ is(action.lastError, null, "lastError should be null");
+
+ const addon = await AddonManager.getAddonByID(FIXTURE_ADDON_ID);
+ is(addon.version, "0.1", "The installed add-on should not be replaced");
+
+ Assert.deepEqual(
+ await AddonStudies.getAll(),
+ [],
+ "There should be no enrolled studies"
+ );
+
+ sendEventSpy.assertEvents([
+ [
+ "enrollFailed",
+ "addon_study",
+ recipe.arguments.slug,
+ { reason: "conflicting-addon-id" },
+ ],
+ ]);
+ }
+);
+
+// Test a successful update
+decorate_task(
+ withStudiesEnabled(),
+ ensureAddonCleanup(),
+ withMockNormandyApi(),
+ AddonStudies.withStudies([
+ branchedAddonStudyFactory({
+ addonId: FIXTURE_ADDON_ID,
+ extensionHash: FIXTURE_ADDON_DETAILS["normandydriver-a-1.0"].hash,
+ extensionHashAlgorithm: "sha256",
+ addonVersion: "1.0",
+ }),
+ ]),
+ withSendEventSpy(),
+ withInstalledWebExtensionSafe({ id: FIXTURE_ADDON_ID, version: "1.0" }),
+ async function successfulUpdate({
+ mockNormandyApi,
+ addonStudies: [study],
+ sendEventSpy,
+ }) {
+ const addonUrl = FIXTURE_ADDON_DETAILS["normandydriver-a-2.0"].url;
+ const recipe = recipeFromStudy(study, {
+ arguments: {
+ branches: [
+ { slug: "a", extensionApiId: study.extensionApiId, ratio: 1 },
+ ],
+ },
+ });
+ const hash = FIXTURE_ADDON_DETAILS["normandydriver-a-2.0"].hash;
+ mockNormandyApi.extensionDetails = {
+ [recipe.arguments.branches[0].extensionApiId]: extensionDetailsFactory({
+ id: recipe.arguments.branches[0].extensionApiId,
+ extension_id: FIXTURE_ADDON_ID,
+ xpi: addonUrl,
+ hash,
+ version: "2.0",
+ }),
+ };
+ const action = new BranchedAddonStudyAction();
+ const enrollSpy = sinon.spy(action, "enroll");
+ const updateSpy = sinon.spy(action, "update");
+ await action.processRecipe(recipe, BaseAction.suitability.FILTER_MATCH);
+ is(action.lastError, null, "lastError should be null");
+ ok(!enrollSpy.called, "enroll should not be called");
+ ok(updateSpy.called, "update should be called");
+ sendEventSpy.assertEvents([
+ [
+ "update",
+ "addon_study",
+ recipe.arguments.name,
+ {
+ addonId: FIXTURE_ADDON_ID,
+ addonVersion: "2.0",
+ branch: "a",
+ enrollmentId: study.enrollmentId,
+ },
+ ],
+ ]);
+
+ const updatedStudy = await AddonStudies.get(recipe.id);
+ Assert.deepEqual(
+ updatedStudy,
+ {
+ ...study,
+ addonVersion: "2.0",
+ addonUrl,
+ extensionApiId: recipe.arguments.branches[0].extensionApiId,
+ extensionHash: hash,
+ },
+ "study data should be updated"
+ );
+
+ const addon = await AddonManager.getAddonByID(FIXTURE_ADDON_ID);
+ ok(addon.version === "2.0", "add-on should be updated");
+ }
+);
+
+// Test update fails when addon ID does not match
+decorate_task(
+ withStudiesEnabled(),
+ ensureAddonCleanup(),
+ withMockNormandyApi(),
+ AddonStudies.withStudies([
+ branchedAddonStudyFactory({
+ addonId: "test@example.com",
+ extensionHash: "01d",
+ extensionHashAlgorithm: "sha256",
+ addonVersion: "0.1",
+ }),
+ ]),
+ withSendEventSpy(),
+ withInstalledWebExtensionSafe({ id: FIXTURE_ADDON_ID, version: "0.1" }),
+ async function updateFailsAddonIdMismatch({
+ mockNormandyApi,
+ addonStudies: [study],
+ sendEventSpy,
+ }) {
+ const recipe = recipeFromStudy(study);
+ mockNormandyApi.extensionDetails = {
+ [study.extensionApiId]: extensionDetailsFactory({
+ id: study.extensionApiId,
+ extension_id: FIXTURE_ADDON_ID,
+ xpi: FIXTURE_ADDON_DETAILS["normandydriver-a-1.0"].url,
+ }),
+ };
+ const action = new BranchedAddonStudyAction();
+ const updateSpy = sinon.spy(action, "update");
+ await action.processRecipe(recipe, BaseAction.suitability.FILTER_MATCH);
+ is(action.lastError, null, "lastError should be null");
+ ok(updateSpy.called, "update should be called");
+ sendEventSpy.assertEvents([
+ [
+ "updateFailed",
+ "addon_study",
+ recipe.arguments.name,
+ {
+ reason: "addon-id-mismatch",
+ branch: "a",
+ enrollmentId: study.enrollmentId,
+ },
+ ],
+ ]);
+
+ const updatedStudy = await AddonStudies.get(recipe.id);
+ Assert.deepEqual(updatedStudy, study, "study data should be unchanged");
+
+ let addon = await AddonManager.getAddonByID(FIXTURE_ADDON_ID);
+ ok(addon.version === "0.1", "add-on should be unchanged");
+ }
+);
+
+// Test update fails when original addon does not exist
+decorate_task(
+ withStudiesEnabled(),
+ ensureAddonCleanup(),
+ withMockNormandyApi(),
+ AddonStudies.withStudies([
+ branchedAddonStudyFactory({
+ extensionHash: "01d",
+ extensionHashAlgorithm: "sha256",
+ addonVersion: "0.1",
+ }),
+ ]),
+ withSendEventSpy(),
+ withInstalledWebExtensionSafe({ id: "test@example.com", version: "0.1" }),
+ async function updateFailsAddonDoesNotExist({
+ mockNormandyApi,
+ addonStudies: [study],
+ sendEventSpy,
+ }) {
+ const recipe = recipeFromStudy(study);
+ mockNormandyApi.extensionDetails = {
+ [study.extensionApiId]: extensionDetailsFactory({
+ id: study.extensionApiId,
+ extension_id: study.addonId,
+ xpi: FIXTURE_ADDON_DETAILS["normandydriver-a-1.0"].url,
+ }),
+ };
+ const action = new BranchedAddonStudyAction();
+ const updateSpy = sinon.spy(action, "update");
+ await action.processRecipe(recipe, BaseAction.suitability.FILTER_MATCH);
+ is(action.lastError, null, "lastError should be null");
+ ok(updateSpy.called, "update should be called");
+ sendEventSpy.assertEvents([
+ [
+ "updateFailed",
+ "addon_study",
+ recipe.arguments.name,
+ {
+ reason: "addon-does-not-exist",
+ branch: "a",
+ enrollmentId: study.enrollmentId,
+ },
+ ],
+ ]);
+
+ const updatedStudy = await AddonStudies.get(recipe.id);
+ Assert.deepEqual(updatedStudy, study, "study data should be unchanged");
+
+ let addon = await AddonManager.getAddonByID(FIXTURE_ADDON_ID);
+ ok(!addon, "new add-on should not be installed");
+
+ addon = await AddonManager.getAddonByID("test@example.com");
+ ok(addon, "old add-on should still be installed");
+ }
+);
+
+// Test update fails when download fails
+decorate_task(
+ withStudiesEnabled(),
+ ensureAddonCleanup(),
+ withMockNormandyApi(),
+ AddonStudies.withStudies([
+ branchedAddonStudyFactory({
+ addonId: FIXTURE_ADDON_ID,
+ extensionHash: "01d",
+ extensionHashAlgorithm: "sha256",
+ addonVersion: "0.1",
+ }),
+ ]),
+ withSendEventSpy(),
+ withInstalledWebExtensionSafe({ id: FIXTURE_ADDON_ID, version: "0.1" }),
+ async function updateDownloadFailure({
+ mockNormandyApi,
+ addonStudies: [study],
+ sendEventSpy,
+ }) {
+ const recipe = recipeFromStudy(study);
+ mockNormandyApi.extensionDetails = {
+ [study.extensionApiId]: extensionDetailsFactory({
+ id: study.extensionApiId,
+ extension_id: study.addonId,
+ xpi: "https://example.com/404.xpi",
+ }),
+ };
+ const action = new BranchedAddonStudyAction();
+ const updateSpy = sinon.spy(action, "update");
+ await action.processRecipe(recipe, BaseAction.suitability.FILTER_MATCH);
+ is(action.lastError, null, "lastError should be null");
+ ok(updateSpy.called, "update should be called");
+ sendEventSpy.assertEvents([
+ [
+ "updateFailed",
+ "addon_study",
+ recipe.arguments.name,
+ {
+ branch: "a",
+ reason: "download-failure",
+ detail: "ERROR_NETWORK_FAILURE",
+ enrollmentId: study.enrollmentId,
+ },
+ ],
+ ]);
+
+ const updatedStudy = await AddonStudies.get(recipe.id);
+ Assert.deepEqual(updatedStudy, study, "study data should be unchanged");
+
+ const addon = await AddonManager.getAddonByID(FIXTURE_ADDON_ID);
+ ok(addon.version === "0.1", "add-on should be unchanged");
+ }
+);
+
+// Test update fails when hash check fails
+decorate_task(
+ withStudiesEnabled(),
+ ensureAddonCleanup(),
+ withMockNormandyApi(),
+ AddonStudies.withStudies([
+ branchedAddonStudyFactory({
+ addonId: FIXTURE_ADDON_ID,
+ extensionHash: "01d",
+ extensionHashAlgorithm: "sha256",
+ addonVersion: "0.1",
+ }),
+ ]),
+ withSendEventSpy(),
+ withInstalledWebExtensionSafe({ id: FIXTURE_ADDON_ID, version: "0.1" }),
+ async function updateFailsHashCheckFail({
+ mockNormandyApi,
+ addonStudies: [study],
+ sendEventSpy,
+ }) {
+ const recipe = recipeFromStudy(study);
+ mockNormandyApi.extensionDetails = {
+ [study.extensionApiId]: extensionDetailsFactory({
+ id: study.extensionApiId,
+ extension_id: study.addonId,
+ xpi: FIXTURE_ADDON_DETAILS["normandydriver-a-1.0"].url,
+ hash: "badhash",
+ }),
+ };
+ const action = new BranchedAddonStudyAction();
+ const updateSpy = sinon.spy(action, "update");
+ await action.processRecipe(recipe, BaseAction.suitability.FILTER_MATCH);
+ is(action.lastError, null, "lastError should be null");
+ ok(updateSpy.called, "update should be called");
+ sendEventSpy.assertEvents([
+ [
+ "updateFailed",
+ "addon_study",
+ recipe.arguments.name,
+ {
+ branch: "a",
+ reason: "download-failure",
+ detail: "ERROR_INCORRECT_HASH",
+ enrollmentId: study.enrollmentId,
+ },
+ ],
+ ]);
+
+ const updatedStudy = await AddonStudies.get(recipe.id);
+ Assert.deepEqual(updatedStudy, study, "study data should be unchanged");
+
+ const addon = await AddonManager.getAddonByID(FIXTURE_ADDON_ID);
+ ok(addon.version === "0.1", "add-on should be unchanged");
+ }
+);
+
+// Test update fails on downgrade when study version is greater than extension version
+decorate_task(
+ withStudiesEnabled(),
+ ensureAddonCleanup(),
+ withMockNormandyApi(),
+ AddonStudies.withStudies([
+ branchedAddonStudyFactory({
+ addonId: FIXTURE_ADDON_ID,
+ extensionHash: "01d",
+ extensionHashAlgorithm: "sha256",
+ addonVersion: "2.0",
+ }),
+ ]),
+ withSendEventSpy(),
+ withInstalledWebExtensionSafe({ id: FIXTURE_ADDON_ID, version: "2.0" }),
+ async function upgradeFailsNoDowngrades({
+ mockNormandyApi,
+ addonStudies: [study],
+ sendEventSpy,
+ }) {
+ const recipe = recipeFromStudy(study);
+ mockNormandyApi.extensionDetails = {
+ [study.extensionApiId]: extensionDetailsFactory({
+ id: study.extensionApiId,
+ extension_id: study.addonId,
+ xpi: FIXTURE_ADDON_DETAILS["normandydriver-a-1.0"].url,
+ version: "1.0",
+ }),
+ };
+ const action = new BranchedAddonStudyAction();
+ const updateSpy = sinon.spy(action, "update");
+ await action.processRecipe(recipe, BaseAction.suitability.FILTER_MATCH);
+ is(action.lastError, null, "lastError should be null");
+ ok(updateSpy.called, "update should be called");
+ sendEventSpy.assertEvents([
+ [
+ "updateFailed",
+ "addon_study",
+ recipe.arguments.name,
+ {
+ reason: "no-downgrade",
+ branch: "a",
+ enrollmentId: study.enrollmentId,
+ },
+ ],
+ ]);
+
+ const updatedStudy = await AddonStudies.get(recipe.id);
+ Assert.deepEqual(updatedStudy, study, "study data should be unchanged");
+
+ const addon = await AddonManager.getAddonByID(FIXTURE_ADDON_ID);
+ ok(addon.version === "2.0", "add-on should be unchanged");
+ }
+);
+
+// Test update fails when there is a version mismatch with metadata
+decorate_task(
+ withStudiesEnabled(),
+ ensureAddonCleanup(),
+ withMockNormandyApi(),
+ AddonStudies.withStudies([
+ branchedAddonStudyFactory({
+ addonId: FIXTURE_ADDON_ID,
+ extensionHash: FIXTURE_ADDON_DETAILS["normandydriver-a-1.0"].hash,
+ extensionHashAlgorithm: "sha256",
+ addonVersion: "1.0",
+ }),
+ ]),
+ withSendEventSpy(),
+ withInstalledWebExtensionFromURL(
+ FIXTURE_ADDON_DETAILS["normandydriver-a-1.0"].url
+ ),
+ async function upgradeFailsMetadataMismatchVersion({
+ mockNormandyApi,
+ addonStudies: [study],
+ sendEventSpy,
+ }) {
+ const recipe = recipeFromStudy(study);
+ mockNormandyApi.extensionDetails = {
+ [study.extensionApiId]: extensionDetailsFactory({
+ id: study.extensionApiId,
+ extension_id: study.addonId,
+ xpi: FIXTURE_ADDON_DETAILS["normandydriver-a-2.0"].url,
+ version: "3.0",
+ hash: FIXTURE_ADDON_DETAILS["normandydriver-a-2.0"].hash,
+ }),
+ };
+ const action = new BranchedAddonStudyAction();
+ const updateSpy = sinon.spy(action, "update");
+ await action.processRecipe(recipe, BaseAction.suitability.FILTER_MATCH);
+ is(action.lastError, null, "lastError should be null");
+ ok(updateSpy.called, "update should be called");
+ sendEventSpy.assertEvents([
+ [
+ "updateFailed",
+ "addon_study",
+ recipe.arguments.name,
+ {
+ branch: "a",
+ reason: "metadata-mismatch",
+ enrollmentId: study.enrollmentId,
+ },
+ ],
+ ]);
+
+ const updatedStudy = await AddonStudies.get(recipe.id);
+ Assert.deepEqual(updatedStudy, study, "study data should be unchanged");
+
+ const addon = await AddonManager.getAddonByID(FIXTURE_ADDON_ID);
+ ok(addon.version === "1.0", "add-on should be unchanged");
+
+ let addonSourceURI = addon.getResourceURI();
+ if (addonSourceURI instanceof Ci.nsIJARURI) {
+ addonSourceURI = addonSourceURI.JARFile;
+ }
+ const xpiFile = addonSourceURI.QueryInterface(Ci.nsIFileURL).file;
+ const installedHash = CryptoUtils.getFileHash(
+ xpiFile,
+ study.extensionHashAlgorithm
+ );
+ ok(installedHash === study.extensionHash, "add-on should be unchanged");
+ }
+);
+
+// Test that unenrolling fails if the study doesn't exist
+decorate_task(
+ withStudiesEnabled(),
+ ensureAddonCleanup(),
+ AddonStudies.withStudies(),
+ async function unenrollNonexistent() {
+ const action = new BranchedAddonStudyAction();
+ await Assert.rejects(
+ action.unenroll(42),
+ /no study found/i,
+ "unenroll should fail when no study exists"
+ );
+ }
+);
+
+// Test that unenrolling an inactive study fails
+decorate_task(
+ withStudiesEnabled(),
+ ensureAddonCleanup(),
+ AddonStudies.withStudies([branchedAddonStudyFactory({ active: false })]),
+ withSendEventSpy(),
+ async ({ addonStudies: [study], sendEventSpy }) => {
+ const action = new BranchedAddonStudyAction();
+ await Assert.rejects(
+ action.unenroll(study.recipeId),
+ /cannot stop study.*already inactive/i,
+ "unenroll should fail when the requested study is inactive"
+ );
+ }
+);
+
+// test a successful unenrollment
+const testStopId = "testStop@example.com";
+decorate_task(
+ withStudiesEnabled(),
+ ensureAddonCleanup(),
+ AddonStudies.withStudies([
+ branchedAddonStudyFactory({
+ active: true,
+ addonId: testStopId,
+ studyEndDate: null,
+ }),
+ ]),
+ withInstalledWebExtension({ id: testStopId }, { expectUninstall: true }),
+ withSendEventSpy(),
+ withStub(TelemetryEnvironment, "setExperimentInactive"),
+ async function unenrollTest({
+ addonStudies: [study],
+ installedWebExtension: { addonId },
+ sendEventSpy,
+ setExperimentInactiveStub,
+ }) {
+ let addon = await AddonManager.getAddonByID(addonId);
+ ok(addon, "the add-on should be installed before unenrolling");
+
+ const action = new BranchedAddonStudyAction();
+ await action.unenroll(study.recipeId, "test-reason");
+
+ const newStudy = AddonStudies.get(study.recipeId);
+ is(!newStudy, false, "stop should mark the study as inactive");
+ ok(newStudy.studyEndDate !== null, "the study should have an end date");
+
+ addon = await AddonManager.getAddonByID(addonId);
+ is(addon, null, "the add-on should be uninstalled after unenrolling");
+
+ sendEventSpy.assertEvents([
+ [
+ "unenroll",
+ "addon_study",
+ study.name,
+ {
+ addonId,
+ addonVersion: study.addonVersion,
+ reason: "test-reason",
+ enrollmentId: study.enrollmentId,
+ },
+ ],
+ ]);
+
+ Assert.deepEqual(
+ setExperimentInactiveStub.args,
+ [[study.slug]],
+ "setExperimentInactive should be called"
+ );
+ }
+);
+
+// If the add-on for a study isn't installed, a warning should be logged, but the action is still successful
+decorate_task(
+ withStudiesEnabled(),
+ ensureAddonCleanup(),
+ AddonStudies.withStudies([
+ branchedAddonStudyFactory({
+ active: true,
+ addonId: "missingAddon@example.com",
+ }),
+ ]),
+ withSendEventSpy(),
+ async function unenrollMissingAddonTest({
+ addonStudies: [study],
+ sendEventSpy,
+ }) {
+ const action = new BranchedAddonStudyAction();
+
+ await action.unenroll(study.recipeId);
+
+ sendEventSpy.assertEvents([
+ [
+ "unenroll",
+ "addon_study",
+ study.name,
+ {
+ addonId: study.addonId,
+ addonVersion: study.addonVersion,
+ reason: "unknown",
+ enrollmentId: study.enrollmentId,
+ },
+ ],
+ ]);
+
+ SimpleTest.endMonitorConsole();
+ }
+);
+
+// Test that the action respects the study opt-out
+decorate_task(
+ withStudiesEnabled(),
+ ensureAddonCleanup(),
+ withMockNormandyApi(),
+ withSendEventSpy(),
+ withMockPreferences(),
+ AddonStudies.withStudies(),
+ async function testOptOut({
+ mockNormandyApi,
+ sendEventSpy,
+ mockPreferences,
+ }) {
+ mockPreferences.set("app.shield.optoutstudies.enabled", false);
+ const action = new BranchedAddonStudyAction();
+ const enrollSpy = sinon.spy(action, "enroll");
+ const recipe = branchedAddonStudyRecipeFactory();
+ mockNormandyApi.extensionDetails = {
+ [recipe.arguments.branches[0].extensionApiId]: extensionDetailsFactory({
+ id: recipe.arguments.branches[0].extensionApiId,
+ }),
+ };
+ await action.processRecipe(recipe, BaseAction.suitability.FILTER_MATCH);
+ is(
+ action.state,
+ BranchedAddonStudyAction.STATE_DISABLED,
+ "the action should be disabled"
+ );
+ await action.finalize();
+ is(
+ action.state,
+ BranchedAddonStudyAction.STATE_FINALIZED,
+ "the action should be finalized"
+ );
+ is(action.lastError, null, "lastError should be null");
+ Assert.deepEqual(enrollSpy.args, [], "enroll should not be called");
+ sendEventSpy.assertEvents([]);
+ }
+);
+
+// Test that the action does not enroll paused recipes
+decorate_task(
+ withStudiesEnabled(),
+ ensureAddonCleanup(),
+ withMockNormandyApi(),
+ withSendEventSpy(),
+ AddonStudies.withStudies(),
+ async function testEnrollmentPaused({ mockNormandyApi, sendEventSpy }) {
+ const action = new BranchedAddonStudyAction();
+ const enrollSpy = sinon.spy(action, "enroll");
+ const updateSpy = sinon.spy(action, "update");
+ const recipe = branchedAddonStudyRecipeFactory({
+ arguments: { isEnrollmentPaused: true },
+ });
+ const extensionDetails = extensionDetailsFactory({
+ id: recipe.arguments.extensionApiId,
+ });
+ mockNormandyApi.extensionDetails = {
+ [recipe.arguments.extensionApiId]: extensionDetails,
+ };
+ await action.processRecipe(recipe, BaseAction.suitability.FILTER_MATCH);
+ const addon = await AddonManager.getAddonByID(
+ extensionDetails.extension_id
+ );
+ is(addon, null, "the add-on should not have been installed");
+ await action.finalize();
+ ok(!updateSpy.called, "update should not be called");
+ ok(enrollSpy.called, "enroll should be called");
+ sendEventSpy.assertEvents([]);
+ }
+);
+
+// Test that the action updates paused recipes
+decorate_task(
+ withStudiesEnabled(),
+ ensureAddonCleanup(),
+ withMockNormandyApi(),
+ AddonStudies.withStudies([
+ branchedAddonStudyFactory({
+ addonId: FIXTURE_ADDON_ID,
+ extensionHash: FIXTURE_ADDON_DETAILS["normandydriver-a-1.0"].hash,
+ extensionHashAlgorithm: "sha256",
+ addonVersion: "1.0",
+ }),
+ ]),
+ withSendEventSpy(),
+ withInstalledWebExtensionSafe({ id: FIXTURE_ADDON_ID, version: "1.0" }),
+ async function testUpdateEnrollmentPaused({
+ mockNormandyApi,
+ addonStudies: [study],
+ sendEventSpy,
+ }) {
+ const addonUrl = FIXTURE_ADDON_DETAILS["normandydriver-a-2.0"].url;
+ const recipe = recipeFromStudy(study, {
+ arguments: { isEnrollmentPaused: true },
+ });
+ mockNormandyApi.extensionDetails = {
+ [study.extensionApiId]: extensionDetailsFactory({
+ id: study.extensionApiId,
+ extension_id: study.addonId,
+ xpi: addonUrl,
+ hash: FIXTURE_ADDON_DETAILS["normandydriver-a-2.0"].hash,
+ version: "2.0",
+ }),
+ };
+ const action = new BranchedAddonStudyAction();
+ const enrollSpy = sinon.spy(action, "enroll");
+ const updateSpy = sinon.spy(action, "update");
+ await action.processRecipe(recipe, BaseAction.suitability.FILTER_MATCH);
+ is(action.lastError, null, "lastError should be null");
+ ok(!enrollSpy.called, "enroll should not be called");
+ ok(updateSpy.called, "update should be called");
+ sendEventSpy.assertEvents([
+ [
+ "update",
+ "addon_study",
+ recipe.arguments.name,
+ {
+ addonId: FIXTURE_ADDON_ID,
+ addonVersion: "2.0",
+ enrollmentId: study.enrollmentId,
+ },
+ ],
+ ]);
+
+ const addon = await AddonManager.getAddonByID(FIXTURE_ADDON_ID);
+ ok(addon.version === "2.0", "add-on should be updated");
+ }
+);
+
+// Test that unenroll called if the study is no longer sent from the server
+decorate_task(
+ withStudiesEnabled(),
+ ensureAddonCleanup(),
+ AddonStudies.withStudies([branchedAddonStudyFactory()]),
+ async function unenroll({ addonStudies: [study] }) {
+ const action = new BranchedAddonStudyAction();
+ const unenrollSpy = sinon.stub(action, "unenroll");
+ await action.finalize();
+ is(action.lastError, null, "lastError should be null");
+ Assert.deepEqual(
+ unenrollSpy.args,
+ [[study.recipeId, "recipe-not-seen"]],
+ "unenroll should be called"
+ );
+ }
+);
+
+// A test function that will be parameterized over the argument "branch" below.
+// Mocks the branch selector, and then tests that the user correctly gets enrolled in that branch.
+const successEnrollBranchedTest = decorate(
+ withStudiesEnabled(),
+ ensureAddonCleanup(),
+ withMockNormandyApi(),
+ withSendEventSpy(),
+ withStub(TelemetryEnvironment, "setExperimentActive"),
+ AddonStudies.withStudies(),
+ async function ({
+ branch,
+ mockNormandyApi,
+ sendEventSpy,
+ setExperimentActiveStub,
+ }) {
+ ok(branch == "a" || branch == "b", "Branch should be either a or b");
+ const initialAddonIds = (await AddonManager.getAllAddons()).map(
+ addon => addon.id
+ );
+ const addonId = `normandydriver-${branch}@example.com`;
+ const otherBranchAddonId = `normandydriver-${branch == "a" ? "b" : "a"}`;
+ is(
+ await AddonManager.getAddonByID(addonId),
+ null,
+ "The add-on should not be installed at the beginning of the test"
+ );
+ is(
+ await AddonManager.getAddonByID(otherBranchAddonId),
+ null,
+ "The other branch's add-on should not be installed at the beginning of the test"
+ );
+
+ const recipe = branchedAddonStudyRecipeFactory({
+ arguments: {
+ slug: "success",
+ branches: [
+ { slug: "a", ratio: 1, extensionApiId: 1 },
+ { slug: "b", ratio: 1, extensionApiId: 2 },
+ ],
+ },
+ });
+ mockNormandyApi.extensionDetails = {
+ [recipe.arguments.branches[0].extensionApiId]: {
+ id: recipe.arguments.branches[0].extensionApiId,
+ name: "Normandy Fixture A",
+ xpi: FIXTURE_ADDON_DETAILS["normandydriver-a-1.0"].url,
+ extension_id: "normandydriver-a@example.com",
+ version: "1.0",
+ hash: FIXTURE_ADDON_DETAILS["normandydriver-a-1.0"].hash,
+ hash_algorithm: "sha256",
+ },
+ [recipe.arguments.branches[1].extensionApiId]: {
+ id: recipe.arguments.branches[1].extensionApiId,
+ name: "Normandy Fixture B",
+ xpi: FIXTURE_ADDON_DETAILS["normandydriver-b-1.0"].url,
+ extension_id: "normandydriver-b@example.com",
+ version: "1.0",
+ hash: FIXTURE_ADDON_DETAILS["normandydriver-b-1.0"].hash,
+ hash_algorithm: "sha256",
+ },
+ };
+ const extensionApiId =
+ recipe.arguments.branches[branch == "a" ? 0 : 1].extensionApiId;
+ const extensionDetails = mockNormandyApi.extensionDetails[extensionApiId];
+
+ const action = new BranchedAddonStudyAction();
+ const chooseBranchStub = sinon.stub(action, "chooseBranch");
+ chooseBranchStub.callsFake(async ({ branches }) =>
+ branches.find(b => b.slug === branch)
+ );
+ await action.processRecipe(recipe, BaseAction.suitability.FILTER_MATCH);
+ is(action.lastError, null, "lastError should be null");
+
+ const study = await AddonStudies.get(recipe.id);
+ sendEventSpy.assertEvents([
+ [
+ "enroll",
+ "addon_study",
+ recipe.arguments.slug,
+ {
+ addonId,
+ addonVersion: "1.0",
+ branch,
+ enrollmentId: study.enrollmentId,
+ },
+ ],
+ ]);
+
+ Assert.deepEqual(
+ setExperimentActiveStub.args,
+ [
+ [
+ recipe.arguments.slug,
+ branch,
+ { type: "normandy-addonstudy", enrollmentId: study.enrollmentId },
+ ],
+ ],
+ "setExperimentActive should be called"
+ );
+
+ const addon = await AddonManager.getAddonByID(addonId);
+ ok(addon, "The chosen branch's add-on should be installed");
+ is(
+ await AddonManager.getAddonByID(otherBranchAddonId),
+ null,
+ "The other branch's add-on should not be installed"
+ );
+
+ Assert.deepEqual(
+ study,
+ {
+ recipeId: recipe.id,
+ slug: recipe.arguments.slug,
+ userFacingName: recipe.arguments.userFacingName,
+ userFacingDescription: recipe.arguments.userFacingDescription,
+ addonId,
+ addonVersion: "1.0",
+ addonUrl: FIXTURE_ADDON_DETAILS[`normandydriver-${branch}-1.0`].url,
+ active: true,
+ branch,
+ studyStartDate: study.studyStartDate, // This is checked below
+ studyEndDate: null,
+ extensionApiId: extensionDetails.id,
+ extensionHash: extensionDetails.hash,
+ extensionHashAlgorithm: extensionDetails.hash_algorithm,
+ enrollmentId: study.enrollmentId,
+ temporaryErrorDeadline: null,
+ },
+ "the correct study data should be stored"
+ );
+
+ // cleanup
+ await safeUninstallAddon(addon);
+ Assert.deepEqual(
+ (await AddonManager.getAllAddons()).map(addon => addon.id),
+ initialAddonIds,
+ "all test add-ons are removed"
+ );
+ }
+);
+
+add_task(args => successEnrollBranchedTest({ ...args, branch: "a" }));
+add_task(args => successEnrollBranchedTest({ ...args, branch: "b" }));
+
+// If the enrolled branch no longer exists, unenroll
+decorate_task(
+ withStudiesEnabled(),
+ ensureAddonCleanup(),
+ withMockNormandyApi(),
+ AddonStudies.withStudies([branchedAddonStudyFactory()]),
+ withSendEventSpy(),
+ withInstalledWebExtensionSafe(
+ { id: FIXTURE_ADDON_ID, version: "1.0" },
+ { expectUninstall: true }
+ ),
+ async function unenrollIfBranchDisappears({
+ mockNormandyApi,
+ addonStudies: [study],
+ sendEventSpy,
+ installedWebExtensionSafe: { addonId },
+ }) {
+ const recipe = recipeFromStudy(study, {
+ arguments: {
+ branches: [
+ {
+ slug: "b", // different from enrolled study
+ ratio: 1,
+ extensionApiId: study.extensionApiId,
+ },
+ ],
+ },
+ });
+ mockNormandyApi.extensionDetails = {
+ [study.extensionApiId]: extensionDetailsFactory({
+ id: study.extensionApiId,
+ extension_id: study.addonId,
+ hash: study.extensionHash,
+ }),
+ };
+ const action = new BranchedAddonStudyAction();
+ const enrollSpy = sinon.spy(action, "enroll");
+ const unenrollSpy = sinon.spy(action, "unenroll");
+ const updateSpy = sinon.spy(action, "update");
+ await action.processRecipe(recipe, BaseAction.suitability.FILTER_MATCH);
+ is(action.lastError, null, "lastError should be null");
+
+ ok(!enrollSpy.called, "Enroll should not be called");
+ ok(updateSpy.called, "Update should be called");
+ ok(unenrollSpy.called, "Unenroll should be called");
+
+ sendEventSpy.assertEvents([
+ [
+ "unenroll",
+ "addon_study",
+ study.name,
+ {
+ addonId,
+ addonVersion: study.addonVersion,
+ reason: "branch-removed",
+ branch: "a", // the original study branch
+ enrollmentId: study.enrollmentId,
+ },
+ ],
+ ]);
+
+ is(
+ await AddonManager.getAddonByID(addonId),
+ null,
+ "the add-on should be uninstalled"
+ );
+
+ const storedStudy = await AddonStudies.get(recipe.id);
+ ok(!storedStudy.active, "Study should be inactive");
+ ok(storedStudy.branch == "a", "Study's branch should not change");
+ ok(storedStudy.studyEndDate, "Study's end date should be set");
+ }
+);
+
+// Test that branches without an add-on can be enrolled and unenrolled succesfully.
+decorate_task(
+ withStudiesEnabled(),
+ ensureAddonCleanup(),
+ withMockNormandyApi(),
+ withSendEventSpy(),
+ AddonStudies.withStudies(),
+ async function noAddonBranches({ sendEventSpy }) {
+ const initialAddonIds = (await AddonManager.getAllAddons()).map(
+ addon => addon.id
+ );
+
+ const recipe = branchedAddonStudyRecipeFactory({
+ arguments: {
+ slug: "no-op-branch",
+ branches: [{ slug: "a", ratio: 1, extensionApiId: null }],
+ },
+ });
+
+ let action = new BranchedAddonStudyAction();
+ await action.processRecipe(recipe, BaseAction.suitability.FILTER_MATCH);
+ await action.finalize();
+ is(action.lastError, null, "lastError should be null");
+
+ let study = await AddonStudies.get(recipe.id);
+ sendEventSpy.assertEvents([
+ [
+ "enroll",
+ "addon_study",
+ recipe.arguments.name,
+ {
+ addonId: AddonStudies.NO_ADDON_MARKER,
+ addonVersion: AddonStudies.NO_ADDON_MARKER,
+ branch: "a",
+ enrollmentId: study.enrollmentId,
+ },
+ ],
+ ]);
+
+ Assert.deepEqual(
+ (await AddonManager.getAllAddons()).map(addon => addon.id),
+ initialAddonIds,
+ "No add-on should be installed for the study"
+ );
+
+ Assert.deepEqual(
+ study,
+ {
+ recipeId: recipe.id,
+ slug: recipe.arguments.slug,
+ userFacingName: recipe.arguments.userFacingName,
+ userFacingDescription: recipe.arguments.userFacingDescription,
+ addonId: null,
+ addonVersion: null,
+ addonUrl: null,
+ active: true,
+ branch: "a",
+ studyStartDate: study.studyStartDate, // This is checked below
+ studyEndDate: null,
+ extensionApiId: null,
+ extensionHash: null,
+ extensionHashAlgorithm: null,
+ enrollmentId: study.enrollmentId,
+ temporaryErrorDeadline: null,
+ },
+ "the correct study data should be stored"
+ );
+ ok(study.studyStartDate, "studyStartDate should have a value");
+ NormandyTestUtils.isUuid(study.enrollmentId);
+
+ // Now unenroll
+ action = new BranchedAddonStudyAction();
+ await action.finalize();
+ is(action.lastError, null, "lastError should be null");
+
+ sendEventSpy.assertEvents([
+ // The event from before
+ [
+ "enroll",
+ "addon_study",
+ recipe.arguments.name,
+ {
+ addonId: AddonStudies.NO_ADDON_MARKER,
+ addonVersion: AddonStudies.NO_ADDON_MARKER,
+ branch: "a",
+ enrollmentId: study.enrollmentId,
+ },
+ ],
+ // And a new unenroll event
+ [
+ "unenroll",
+ "addon_study",
+ recipe.arguments.name,
+ {
+ addonId: AddonStudies.NO_ADDON_MARKER,
+ addonVersion: AddonStudies.NO_ADDON_MARKER,
+ branch: "a",
+ enrollmentId: study.enrollmentId,
+ },
+ ],
+ ]);
+
+ Assert.deepEqual(
+ (await AddonManager.getAllAddons()).map(addon => addon.id),
+ initialAddonIds,
+ "The set of add-ons should not change"
+ );
+
+ study = await AddonStudies.get(recipe.id);
+ Assert.deepEqual(
+ study,
+ {
+ recipeId: recipe.id,
+ slug: recipe.arguments.slug,
+ userFacingName: recipe.arguments.userFacingName,
+ userFacingDescription: recipe.arguments.userFacingDescription,
+ addonId: null,
+ addonVersion: null,
+ addonUrl: null,
+ active: false,
+ branch: "a",
+ studyStartDate: study.studyStartDate, // This is checked below
+ studyEndDate: study.studyEndDate, // This is checked below
+ extensionApiId: null,
+ extensionHash: null,
+ extensionHashAlgorithm: null,
+ enrollmentId: study.enrollmentId,
+ temporaryErrorDeadline: null,
+ },
+ "the correct study data should be stored"
+ );
+ ok(study.studyStartDate, "studyStartDate should have a value");
+ ok(study.studyEndDate, "studyEndDate should have a value");
+ NormandyTestUtils.isUuid(study.enrollmentId);
+ }
+);
+
+// Check that the appropriate set of suitabilities are considered temporary errors
+decorate_task(
+ withStudiesEnabled(),
+ async function test_temporary_errors_set_deadline() {
+ let suitabilities = [
+ {
+ suitability: BaseAction.suitability.SIGNATURE_ERROR,
+ isTemporaryError: true,
+ },
+ {
+ suitability: BaseAction.suitability.CAPABILITIES_MISMATCH,
+ isTemporaryError: false,
+ },
+ {
+ suitability: BaseAction.suitability.FILTER_MATCH,
+ isTemporaryError: false,
+ },
+ {
+ suitability: BaseAction.suitability.FILTER_MISMATCH,
+ isTemporaryError: false,
+ },
+ {
+ suitability: BaseAction.suitability.FILTER_ERROR,
+ isTemporaryError: true,
+ },
+ {
+ suitability: BaseAction.suitability.ARGUMENTS_INVALID,
+ isTemporaryError: false,
+ },
+ ];
+
+ Assert.deepEqual(
+ suitabilities.map(({ suitability }) => suitability).sort(),
+ Array.from(Object.values(BaseAction.suitability)).sort(),
+ "This test covers all suitabilities"
+ );
+
+ // The action should set a deadline 1 week from now. To avoid intermittent
+ // failures, give this a generous bound of 2 hours on either side.
+ let now = Date.now();
+ let hour = 60 * 60 * 1000;
+ let expectedDeadline = now + 7 * 24 * hour;
+ let minDeadline = new Date(expectedDeadline - 2 * hour);
+ let maxDeadline = new Date(expectedDeadline + 2 * hour);
+
+ // For each suitability, build a decorator that sets up a suitabilty
+ // environment, and then call that decorator with a sub-test that asserts
+ // the suitability is handled correctly.
+ for (const { suitability, isTemporaryError } of suitabilities) {
+ const decorator = AddonStudies.withStudies([
+ branchedAddonStudyFactory({
+ slug: `test-for-suitability-${suitability}`,
+ }),
+ ]);
+ await decorator(async ({ addonStudies: [study] }) => {
+ let action = new BranchedAddonStudyAction();
+ let recipe = recipeFromStudy(study);
+ await action.processRecipe(recipe, suitability);
+ let modifiedStudy = await AddonStudies.get(recipe.id);
+
+ if (isTemporaryError) {
+ ok(
+ // The constructor of this object is a Date, but is not the same as
+ // the Date that we have in our global scope, because it got sent
+ // through IndexedDB. Check the name of the constructor instead.
+ modifiedStudy.temporaryErrorDeadline.constructor.name == "Date",
+ `A temporary failure deadline should be set as a date for suitability ${suitability}`
+ );
+ let deadline = modifiedStudy.temporaryErrorDeadline;
+ ok(
+ deadline >= minDeadline && deadline <= maxDeadline,
+ `The temporary failure deadline should be in the expected range for ` +
+ `suitability ${suitability} (got ${deadline}, expected between ${minDeadline} and ${maxDeadline})`
+ );
+ } else {
+ ok(
+ !modifiedStudy.temporaryErrorDeadline,
+ `No temporary failure deadline should be set for suitability ${suitability}`
+ );
+ }
+ })();
+ }
+ }
+);
+
+// Check that if there is an existing deadline, temporary errors don't overwrite it
+decorate_task(
+ withStudiesEnabled(),
+ async function test_temporary_errors_dont_overwrite_deadline() {
+ let temporaryFailureSuitabilities = [
+ BaseAction.suitability.SIGNATURE_ERROR,
+ BaseAction.suitability.FILTER_ERROR,
+ ];
+
+ // A deadline two hours in the future won't be hit during the test.
+ let now = Date.now();
+ let hour = 2 * 60 * 60 * 1000;
+ let unhitDeadline = new Date(now + hour);
+
+ // For each suitability, build a decorator that sets up a suitabilty
+ // environment, and then call that decorator with a sub-test that asserts
+ // the suitability is handled correctly.
+ for (const suitability of temporaryFailureSuitabilities) {
+ const decorator = AddonStudies.withStudies([
+ branchedAddonStudyFactory({
+ slug: `test-for-suitability-${suitability}`,
+ active: true,
+ temporaryErrorDeadline: unhitDeadline,
+ }),
+ ]);
+ await decorator(async ({ addonStudies: [study] }) => {
+ let action = new BranchedAddonStudyAction();
+ let recipe = recipeFromStudy(study);
+ await action.processRecipe(recipe, suitability);
+ let modifiedStudy = await AddonStudies.get(recipe.id);
+ is(
+ modifiedStudy.temporaryErrorDeadline.toJSON(),
+ unhitDeadline.toJSON(),
+ `The temporary failure deadline should not be cleared for suitability ${suitability}`
+ );
+ })();
+ }
+ }
+);
+
+// Check that if the deadline is past, temporary errors end the study.
+decorate_task(
+ withStudiesEnabled(),
+ async function test_temporary_errors_hit_deadline() {
+ let temporaryFailureSuitabilities = [
+ BaseAction.suitability.SIGNATURE_ERROR,
+ BaseAction.suitability.FILTER_ERROR,
+ ];
+
+ // Set a deadline of two hours in the past, so that the deadline is triggered.
+ let now = Date.now();
+ let hour = 60 * 60 * 1000;
+ let hitDeadline = new Date(now - 2 * hour);
+
+ // For each suitability, build a decorator that sets up a suitabilty
+ // environment, and then call that decorator with a sub-test that asserts
+ // the suitability is handled correctly.
+ for (const suitability of temporaryFailureSuitabilities) {
+ const decorator = AddonStudies.withStudies([
+ branchedAddonStudyFactory({
+ slug: `test-for-suitability-${suitability}`,
+ active: true,
+ temporaryErrorDeadline: hitDeadline,
+ }),
+ ]);
+ await decorator(async ({ addonStudies: [study] }) => {
+ let action = new BranchedAddonStudyAction();
+ let recipe = recipeFromStudy(study);
+ await action.processRecipe(recipe, suitability);
+ let modifiedStudy = await AddonStudies.get(recipe.id);
+ ok(
+ !modifiedStudy.active,
+ `The study should end for suitability ${suitability}`
+ );
+ })();
+ }
+ }
+);
+
+// Check that non-temporary-error suitabilities clear the temporary deadline
+decorate_task(
+ withStudiesEnabled(),
+ async function test_non_temporary_error_clears_temporary_error_deadline() {
+ let suitabilitiesThatShouldClearDeadline = [
+ BaseAction.suitability.CAPABILITIES_MISMATCH,
+ BaseAction.suitability.FILTER_MATCH,
+ BaseAction.suitability.FILTER_MISMATCH,
+ BaseAction.suitability.ARGUMENTS_INVALID,
+ ];
+
+ // Use a deadline in the past to demonstrate that even if the deadline has
+ // passed, only a temporary error suitability ends the study.
+ let now = Date.now();
+ let hour = 60 * 60 * 1000;
+ let hitDeadline = new Date(now - 2 * hour);
+
+ // For each suitability, build a decorator that sets up a suitabilty
+ // environment, and then call that decorator with a sub-test that asserts
+ // the suitability is handled correctly.
+ for (const suitability of suitabilitiesThatShouldClearDeadline) {
+ const decorator = AddonStudies.withStudies([
+ branchedAddonStudyFactory({
+ slug: `test-for-suitability-${suitability}`.toLocaleLowerCase(),
+ active: true,
+ temporaryErrorDeadline: hitDeadline,
+ }),
+ ]);
+ await decorator(async ({ addonStudies: [study] }) => {
+ let action = new BranchedAddonStudyAction();
+ let recipe = recipeFromStudy(study);
+ await action.processRecipe(recipe, suitability);
+ let modifiedStudy = await AddonStudies.get(recipe.id);
+ ok(
+ !modifiedStudy.temporaryErrorDeadline,
+ `The temporary failure deadline should be cleared for suitabilitiy ${suitability}`
+ );
+ })();
+ }
+ }
+);
+
+// Check that invalid deadlines are reset
+decorate_task(
+ withStudiesEnabled(),
+ async function test_non_temporary_error_clears_temporary_error_deadline() {
+ let temporaryFailureSuitabilities = [
+ BaseAction.suitability.SIGNATURE_ERROR,
+ BaseAction.suitability.FILTER_ERROR,
+ ];
+
+ // The action should set a deadline 1 week from now. To avoid intermittent
+ // failures, give this a generous bound of 2 hours on either side.
+ let invalidDeadline = new Date("not a valid date");
+ let now = Date.now();
+ let hour = 60 * 60 * 1000;
+ let expectedDeadline = now + 7 * 24 * hour;
+ let minDeadline = new Date(expectedDeadline - 2 * hour);
+ let maxDeadline = new Date(expectedDeadline + 2 * hour);
+
+ // For each suitability, build a decorator that sets up a suitabilty
+ // environment, and then call that decorator with a sub-test that asserts
+ // the suitability is handled correctly.
+ for (const suitability of temporaryFailureSuitabilities) {
+ const decorator = AddonStudies.withStudies([
+ branchedAddonStudyFactory({
+ slug: `test-for-suitability-${suitability}`.toLocaleLowerCase(),
+ active: true,
+ temporaryErrorDeadline: invalidDeadline,
+ }),
+ ]);
+ await decorator(async ({ addonStudies: [study] }) => {
+ let action = new BranchedAddonStudyAction();
+ let recipe = recipeFromStudy(study);
+ await action.processRecipe(recipe, suitability);
+ is(action.lastError, null, "No errors should be reported");
+ let modifiedStudy = await AddonStudies.get(recipe.id);
+ ok(
+ modifiedStudy.temporaryErrorDeadline != invalidDeadline,
+ `The temporary failure deadline should be reset for suitabilitiy ${suitability}`
+ );
+ let deadline = new Date(modifiedStudy.temporaryErrorDeadline);
+ ok(
+ deadline >= minDeadline && deadline <= maxDeadline,
+ `The temporary failure deadline should be reset to a valid deadline for ${suitability}`
+ );
+ })();
+ }
+ }
+);
+
+// Check that an already unenrolled study doesn't try to unenroll again if
+// the recipe doesn't apply the client anymore.
+decorate_task(
+ withStudiesEnabled(),
+ async function test_unenroll_when_already_expired() {
+ // Use a deadline that is already past
+ const now = new Date();
+ const hour = 1000 * 60 * 60;
+ const temporaryErrorDeadline = new Date(now - hour * 2).toJSON();
+
+ const suitabilitiesToCheck = Object.values(BaseAction.suitability);
+
+ const subtest = decorate(
+ AddonStudies.withStudies([
+ branchedAddonStudyFactory({
+ active: false,
+ temporaryErrorDeadline,
+ }),
+ ]),
+
+ async ({ addonStudies: [study], suitability }) => {
+ const recipe = recipeFromStudy(study);
+ const action = new BranchedAddonStudyAction();
+ const unenrollSpy = sinon.spy(action.unenroll);
+ await action.processRecipe(recipe, suitability);
+ Assert.deepEqual(
+ unenrollSpy.args,
+ [],
+ `Stop should not be called for ${suitability}`
+ );
+ }
+ );
+
+ for (const suitability of suitabilitiesToCheck) {
+ await subtest({ suitability });
+ }
+ }
+);
+
+// If no recipes are received, it should be considered a temporary error
+decorate_task(
+ withStudiesEnabled(),
+ AddonStudies.withStudies([branchedAddonStudyFactory({ active: true })]),
+ withSpy(BranchedAddonStudyAction.prototype, "unenroll"),
+ withStub(BranchedAddonStudyAction.prototype, "_considerTemporaryError"),
+ async function testNoRecipes({
+ unenrollSpy,
+ _considerTemporaryErrorStub,
+ addonStudies: [study],
+ }) {
+ let action = new BranchedAddonStudyAction();
+ await action.finalize({ noRecipes: true });
+
+ Assert.deepEqual(unenrollSpy.args, [], "Unenroll should not be called");
+ Assert.deepEqual(
+ _considerTemporaryErrorStub.args,
+ [[{ study, reason: "no-recipes" }]],
+ "The experiment should accumulate a temporary error"
+ );
+ }
+);
+
+// If recipes are received, but the flag that none were received is set, an error should be thrown
+decorate_task(
+ withStudiesEnabled(),
+ AddonStudies.withStudies([branchedAddonStudyFactory({ active: true })]),
+ async function testNoRecipes({ addonStudies: [study] }) {
+ let action = new BranchedAddonStudyAction();
+ let recipe = recipeFromStudy(study);
+ await action.processRecipe(recipe, BaseAction.suitability.FILTER_MISMATCH);
+ await action.finalize({ noRecipes: true });
+ ok(
+ action.lastError instanceof BranchedAddonStudyAction.BadNoRecipesArg,
+ "An error should be logged since some recipes were received"
+ );
+ }
+);
diff --git a/toolkit/components/normandy/test/browser/browser_actions_ConsoleLogAction.js b/toolkit/components/normandy/test/browser/browser_actions_ConsoleLogAction.js
new file mode 100644
index 0000000000..910de357f6
--- /dev/null
+++ b/toolkit/components/normandy/test/browser/browser_actions_ConsoleLogAction.js
@@ -0,0 +1,62 @@
+"use strict";
+
+const { BaseAction } = ChromeUtils.importESModule(
+ "resource://normandy/actions/BaseAction.sys.mjs"
+);
+const { ConsoleLogAction } = ChromeUtils.importESModule(
+ "resource://normandy/actions/ConsoleLogAction.sys.mjs"
+);
+const { Uptake } = ChromeUtils.importESModule(
+ "resource://normandy/lib/Uptake.sys.mjs"
+);
+
+// Test that logging works
+add_task(async function logging_works() {
+ const action = new ConsoleLogAction();
+ const infoStub = sinon.stub(action.log, "info");
+ try {
+ const recipe = { id: 1, arguments: { message: "Hello, world!" } };
+ await action.processRecipe(recipe, BaseAction.suitability.FILTER_MATCH);
+ is(action.lastError, null, "lastError should be null");
+ Assert.deepEqual(
+ infoStub.args,
+ ["Hello, world!"],
+ "the message should be logged"
+ );
+ } finally {
+ infoStub.restore();
+ }
+});
+
+// test that argument validation works
+decorate_task(
+ withStub(Uptake, "reportRecipe"),
+ async function arguments_are_validated({ reportRecipeStub }) {
+ const action = new ConsoleLogAction();
+ const infoStub = sinon.stub(action.log, "info");
+
+ try {
+ // message is required
+ let recipe = { id: 1, arguments: {} };
+ await action.processRecipe(recipe, BaseAction.suitability.FILTER_MATCH);
+ is(action.lastError, null, "lastError should be null");
+ Assert.deepEqual(infoStub.args, [], "no message should be logged");
+ Assert.deepEqual(reportRecipeStub.args, [
+ [recipe, Uptake.RECIPE_EXECUTION_ERROR],
+ ]);
+
+ reportRecipeStub.reset();
+
+ // message must be a string
+ recipe = { id: 1, arguments: { message: 1 } };
+ await action.processRecipe(recipe, BaseAction.suitability.FILTER_MATCH);
+ is(action.lastError, null, "lastError should be null");
+ Assert.deepEqual(infoStub.args, [], "no message should be logged");
+ Assert.deepEqual(reportRecipeStub.args, [
+ [recipe, Uptake.RECIPE_EXECUTION_ERROR],
+ ]);
+ } finally {
+ infoStub.restore();
+ }
+ }
+);
diff --git a/toolkit/components/normandy/test/browser/browser_actions_MessagingExperimentAction.js b/toolkit/components/normandy/test/browser/browser_actions_MessagingExperimentAction.js
new file mode 100644
index 0000000000..0f16ff1436
--- /dev/null
+++ b/toolkit/components/normandy/test/browser/browser_actions_MessagingExperimentAction.js
@@ -0,0 +1,67 @@
+"use strict";
+
+const { BaseAction } = ChromeUtils.importESModule(
+ "resource://normandy/actions/BaseAction.sys.mjs"
+);
+const { Uptake } = ChromeUtils.importESModule(
+ "resource://normandy/lib/Uptake.sys.mjs"
+);
+const { MessagingExperimentAction } = ChromeUtils.importESModule(
+ "resource://normandy/actions/MessagingExperimentAction.sys.mjs"
+);
+
+const { _ExperimentManager, ExperimentManager } = ChromeUtils.importESModule(
+ "resource://nimbus/lib/ExperimentManager.sys.mjs"
+);
+
+decorate_task(
+ withStudiesEnabled(),
+ withStub(Uptake, "reportRecipe"),
+ async function arguments_are_validated({ reportRecipeStub }) {
+ const action = new MessagingExperimentAction();
+
+ is(
+ action.manager,
+ ExperimentManager,
+ "should set .manager to ExperimentManager singleton"
+ );
+ // Override this for the purposes of the test
+ action.manager = new _ExperimentManager();
+ await action.manager.onStartup();
+ const onRecipeStub = sinon.spy(action.manager, "onRecipe");
+
+ const recipe = {
+ id: 1,
+ arguments: {
+ slug: "foo",
+ isEnrollmentPaused: false,
+ branches: [
+ {
+ slug: "control",
+ ratio: 1,
+ groups: ["green"],
+ value: { title: "hello" },
+ },
+ {
+ slug: "variant",
+ ratio: 1,
+ groups: ["green"],
+ value: { title: "world" },
+ },
+ ],
+ },
+ };
+
+ ok(action.validateArguments(recipe.arguments), "should validate arguments");
+
+ await action.processRecipe(recipe, BaseAction.suitability.FILTER_MATCH);
+ await action.finalize();
+
+ Assert.deepEqual(reportRecipeStub.args, [[recipe, Uptake.RECIPE_SUCCESS]]);
+ Assert.deepEqual(
+ onRecipeStub.args,
+ [[recipe.arguments, "normandy"]],
+ "should call onRecipe with recipe args and 'normandy' source"
+ );
+ }
+);
diff --git a/toolkit/components/normandy/test/browser/browser_actions_PreferenceExperimentAction.js b/toolkit/components/normandy/test/browser/browser_actions_PreferenceExperimentAction.js
new file mode 100644
index 0000000000..ad0ec49913
--- /dev/null
+++ b/toolkit/components/normandy/test/browser/browser_actions_PreferenceExperimentAction.js
@@ -0,0 +1,914 @@
+/* Any copyright is dedicated to the Public Domain.
+ * http://creativecommons.org/publicdomain/zero/1.0/ */
+"use strict";
+
+const { Sampling } = ChromeUtils.importESModule(
+ "resource://gre/modules/components-utils/Sampling.sys.mjs"
+);
+
+const { ClientEnvironment } = ChromeUtils.importESModule(
+ "resource://normandy/lib/ClientEnvironment.sys.mjs"
+);
+const { PreferenceExperiments } = ChromeUtils.importESModule(
+ "resource://normandy/lib/PreferenceExperiments.sys.mjs"
+);
+const { Uptake } = ChromeUtils.importESModule(
+ "resource://normandy/lib/Uptake.sys.mjs"
+);
+const { BaseAction } = ChromeUtils.importESModule(
+ "resource://normandy/actions/BaseAction.sys.mjs"
+);
+const { PreferenceExperimentAction } = ChromeUtils.importESModule(
+ "resource://normandy/actions/PreferenceExperimentAction.sys.mjs"
+);
+const { NormandyTestUtils } = ChromeUtils.importESModule(
+ "resource://testing-common/NormandyTestUtils.sys.mjs"
+);
+
+function branchFactory(opts = {}) {
+ const defaultPreferences = {
+ "fake.preference": {},
+ };
+ const defaultPrefInfo = {
+ preferenceType: "string",
+ preferenceBranchType: "default",
+ preferenceValue: "foo",
+ };
+ const preferences = {};
+ for (const [prefName, prefInfo] of Object.entries(
+ opts.preferences || defaultPreferences
+ )) {
+ preferences[prefName] = { ...defaultPrefInfo, ...prefInfo };
+ }
+ return {
+ slug: "test",
+ ratio: 1,
+ ...opts,
+ preferences,
+ };
+}
+
+function argumentsFactory(args) {
+ const defaultBranches = (args && args.branches) || [{ preferences: [] }];
+ const branches = defaultBranches.map(branchFactory);
+ return {
+ slug: "test",
+ userFacingName: "Super Cool Test Experiment",
+ userFacingDescription:
+ "Test experiment from browser_actions_PreferenceExperimentAction.",
+ isHighPopulation: false,
+ isEnrollmentPaused: false,
+ ...args,
+ branches,
+ };
+}
+
+function prefExperimentRecipeFactory(args) {
+ return recipeFactory({
+ name: "preference-experiment",
+ arguments: argumentsFactory(args),
+ });
+}
+
+decorate_task(
+ withStudiesEnabled(),
+ withStub(Uptake, "reportRecipe"),
+ async function run_without_errors({ reportRecipeStub }) {
+ const action = new PreferenceExperimentAction();
+ const recipe = prefExperimentRecipeFactory();
+ await action.processRecipe(recipe, BaseAction.suitability.FILTER_MATCH);
+ await action.finalize();
+ // Errors thrown in actions are caught and silenced, so instead check for an
+ // explicit success here.
+ Assert.deepEqual(reportRecipeStub.args, [[recipe, Uptake.RECIPE_SUCCESS]]);
+ }
+);
+
+decorate_task(
+ withStudiesEnabled(),
+ withStub(Uptake, "reportRecipe"),
+ withStub(Uptake, "reportAction"),
+ withPrefEnv({ set: [["app.shield.optoutstudies.enabled", false]] }),
+ async function checks_disabled({ reportRecipeStub, reportActionStub }) {
+ const action = new PreferenceExperimentAction();
+ action.log = mockLogger();
+
+ const recipe = prefExperimentRecipeFactory();
+ await action.processRecipe(recipe, BaseAction.suitability.FILTER_MATCH);
+
+ Assert.ok(action.log.debug.args.length === 1);
+ Assert.deepEqual(action.log.debug.args[0], [
+ "User has opted-out of opt-out experiments, disabling action.",
+ ]);
+ Assert.deepEqual(action.log.warn.args, [
+ [
+ "Skipping recipe preference-experiment because PreferenceExperimentAction " +
+ "was disabled during preExecution.",
+ ],
+ ]);
+
+ await action.finalize();
+ Assert.ok(action.log.debug.args.length === 2);
+ Assert.deepEqual(action.log.debug.args[1], [
+ "Skipping post-execution hook for PreferenceExperimentAction because it is disabled.",
+ ]);
+ Assert.deepEqual(reportRecipeStub.args, [
+ [recipe, Uptake.RECIPE_ACTION_DISABLED],
+ ]);
+ Assert.deepEqual(reportActionStub.args, [
+ [action.name, Uptake.ACTION_SUCCESS],
+ ]);
+ }
+);
+
+decorate_task(
+ withStudiesEnabled(),
+ withStub(PreferenceExperiments, "start"),
+ PreferenceExperiments.withMockExperiments([]),
+ async function enroll_user_if_never_been_in_experiment({ startStub }) {
+ const action = new PreferenceExperimentAction();
+ const recipe = prefExperimentRecipeFactory({
+ slug: "test",
+ branches: [
+ {
+ slug: "branch1",
+ preferences: {
+ "fake.preference": {
+ preferenceBranchType: "user",
+ preferenceValue: "branch1",
+ },
+ },
+ ratio: 1,
+ },
+ {
+ slug: "branch2",
+ preferences: {
+ "fake.preference": {
+ preferenceBranchType: "user",
+ preferenceValue: "branch2",
+ },
+ },
+ ratio: 1,
+ },
+ ],
+ });
+ sinon
+ .stub(action, "chooseBranch")
+ .callsFake(async function (slug, branches) {
+ return branches[0];
+ });
+
+ await action.processRecipe(recipe, BaseAction.suitability.FILTER_MATCH);
+ await action.finalize();
+
+ Assert.deepEqual(startStub.args, [
+ [
+ {
+ slug: "test",
+ actionName: "PreferenceExperimentAction",
+ branch: "branch1",
+ preferences: {
+ "fake.preference": {
+ preferenceValue: "branch1",
+ preferenceBranchType: "user",
+ preferenceType: "string",
+ },
+ },
+ experimentType: "exp",
+ userFacingName: "Super Cool Test Experiment",
+ userFacingDescription:
+ "Test experiment from browser_actions_PreferenceExperimentAction.",
+ },
+ ],
+ ]);
+ }
+);
+
+decorate_task(
+ withStudiesEnabled(),
+ withStub(PreferenceExperiments, "markLastSeen"),
+ PreferenceExperiments.withMockExperiments([{ slug: "test", expired: false }]),
+ async function markSeen_if_experiment_active({ markLastSeenStub }) {
+ const action = new PreferenceExperimentAction();
+ const recipe = prefExperimentRecipeFactory({
+ name: "test",
+ });
+
+ await action.processRecipe(recipe, BaseAction.suitability.FILTER_MATCH);
+ await action.finalize();
+
+ Assert.deepEqual(markLastSeenStub.args, [["test"]]);
+ }
+);
+
+decorate_task(
+ withStudiesEnabled(),
+ withStub(PreferenceExperiments, "markLastSeen"),
+ PreferenceExperiments.withMockExperiments([{ slug: "test", expired: true }]),
+ async function dont_markSeen_if_experiment_expired({ markLastSeenStub }) {
+ const action = new PreferenceExperimentAction();
+ const recipe = prefExperimentRecipeFactory({
+ name: "test",
+ });
+
+ await action.processRecipe(recipe, BaseAction.suitability.FILTER_MATCH);
+ await action.finalize();
+
+ Assert.deepEqual(markLastSeenStub.args, [], "markLastSeen was not called");
+ }
+);
+
+decorate_task(
+ withStudiesEnabled(),
+ withStub(PreferenceExperiments, "start"),
+ async function do_nothing_if_enrollment_paused({ startStub }) {
+ const action = new PreferenceExperimentAction();
+ const recipe = prefExperimentRecipeFactory({
+ isEnrollmentPaused: true,
+ });
+
+ await action.processRecipe(recipe, BaseAction.suitability.FILTER_MATCH);
+ await action.finalize();
+
+ Assert.deepEqual(startStub.args, [], "start was not called");
+ }
+);
+
+decorate_task(
+ withStudiesEnabled(),
+ withStub(PreferenceExperiments, "stop"),
+ PreferenceExperiments.withMockExperiments([
+ { slug: "seen", expired: false, actionName: "PreferenceExperimentAction" },
+ {
+ slug: "unseen",
+ expired: false,
+ actionName: "PreferenceExperimentAction",
+ },
+ ]),
+ async function stop_experiments_not_seen({ stopStub }) {
+ const action = new PreferenceExperimentAction();
+ const recipe = prefExperimentRecipeFactory({
+ slug: "seen",
+ });
+
+ await action.processRecipe(recipe, BaseAction.suitability.FILTER_MATCH);
+ await action.finalize();
+
+ Assert.deepEqual(stopStub.args, [
+ [
+ "unseen",
+ {
+ resetValue: true,
+ reason: "recipe-not-seen",
+ caller: "PreferenceExperimentAction._finalize",
+ },
+ ],
+ ]);
+ }
+);
+
+decorate_task(
+ withStudiesEnabled(),
+ withStub(PreferenceExperiments, "stop"),
+ PreferenceExperiments.withMockExperiments([
+ {
+ slug: "seen",
+ expired: false,
+ actionName: "SinglePreferenceExperimentAction",
+ },
+ {
+ slug: "unseen",
+ expired: false,
+ actionName: "SinglePreferenceExperimentAction",
+ },
+ ]),
+ async function dont_stop_experiments_for_other_action({ stopStub }) {
+ const action = new PreferenceExperimentAction();
+ const recipe = prefExperimentRecipeFactory({
+ name: "seen",
+ });
+
+ await action.processRecipe(recipe, BaseAction.suitability.FILTER_MATCH);
+ await action.finalize();
+
+ Assert.deepEqual(
+ stopStub.args,
+ [],
+ "stop not called for other action's experiments"
+ );
+ }
+);
+
+decorate_task(
+ withStudiesEnabled(),
+ withStub(PreferenceExperiments, "start"),
+ withStub(Uptake, "reportRecipe"),
+ PreferenceExperiments.withMockExperiments([
+ {
+ slug: "conflict",
+ preferences: {
+ "conflict.pref": {},
+ },
+ expired: false,
+ },
+ ]),
+ async function do_nothing_if_preference_is_already_being_tested({
+ startStub,
+ reportRecipeStub,
+ }) {
+ const action = new PreferenceExperimentAction();
+ const recipe = prefExperimentRecipeFactory({
+ name: "new",
+ branches: [
+ {
+ preferences: { "conflict.pref": {} },
+ },
+ ],
+ });
+ action.chooseBranch = sinon
+ .stub()
+ .callsFake(async function (slug, branches) {
+ return branches[0];
+ });
+
+ await action.processRecipe(recipe, BaseAction.suitability.FILTER_MATCH);
+ await action.finalize();
+
+ Assert.deepEqual(reportRecipeStub.args, [
+ [recipe, Uptake.RECIPE_EXECUTION_ERROR],
+ ]);
+ Assert.deepEqual(startStub.args, [], "start not called");
+ // No way to get access to log message/Error thrown
+ }
+);
+
+decorate_task(
+ withStudiesEnabled(),
+ withStub(PreferenceExperiments, "start"),
+ PreferenceExperiments.withMockExperiments([]),
+ async function experimentType_with_isHighPopulation_false({ startStub }) {
+ const action = new PreferenceExperimentAction();
+ const recipe = prefExperimentRecipeFactory({
+ isHighPopulation: false,
+ });
+
+ await action.processRecipe(recipe, BaseAction.suitability.FILTER_MATCH);
+ await action.finalize();
+
+ Assert.deepEqual(startStub.args[0][0].experimentType, "exp");
+ }
+);
+
+decorate_task(
+ withStudiesEnabled(),
+ withStub(PreferenceExperiments, "start"),
+ PreferenceExperiments.withMockExperiments([]),
+ async function experimentType_with_isHighPopulation_true({ startStub }) {
+ const action = new PreferenceExperimentAction();
+ const recipe = prefExperimentRecipeFactory({
+ isHighPopulation: true,
+ });
+
+ await action.processRecipe(recipe, BaseAction.suitability.FILTER_MATCH);
+ await action.finalize();
+
+ Assert.deepEqual(startStub.args[0][0].experimentType, "exp-highpop");
+ }
+);
+
+decorate_task(
+ withStudiesEnabled(),
+ withStub(Sampling, "ratioSample"),
+ async function chooseBranch_uses_ratioSample({ ratioSampleStub }) {
+ ratioSampleStub.returns(Promise.resolve(1));
+ const action = new PreferenceExperimentAction();
+ const branches = [
+ {
+ preferences: {
+ "fake.preference": {
+ preferenceValue: "branch0",
+ },
+ },
+ ratio: 1,
+ },
+ {
+ preferences: {
+ "fake.preference": {
+ preferenceValue: "branch1",
+ },
+ },
+ ratio: 2,
+ },
+ ];
+ const sandbox = sinon.createSandbox();
+ let result;
+ try {
+ sandbox.stub(ClientEnvironment, "userId").get(() => "fake-id");
+ result = await action.chooseBranch("exp-slug", branches);
+ } finally {
+ sandbox.restore();
+ }
+
+ Assert.deepEqual(ratioSampleStub.args, [
+ ["fake-id-exp-slug-branch", [1, 2]],
+ ]);
+ Assert.deepEqual(result, branches[1]);
+ }
+);
+
+decorate_task(
+ withStudiesEnabled(),
+ withMockPreferences(),
+ PreferenceExperiments.withMockExperiments([]),
+ async function integration_test_enroll_and_unenroll({ mockPreferences }) {
+ mockPreferences.set("fake.preference", "oldvalue", "user");
+ const recipe = prefExperimentRecipeFactory({
+ slug: "integration test experiment",
+ branches: [
+ {
+ slug: "branch1",
+ preferences: {
+ "fake.preference": {
+ preferenceBranchType: "user",
+ preferenceValue: "branch1",
+ },
+ },
+ ratio: 1,
+ },
+ {
+ slug: "branch2",
+ preferences: {
+ "fake.preference": {
+ preferenceBranchType: "user",
+ preferenceValue: "branch2",
+ },
+ },
+ ratio: 1,
+ },
+ ],
+ userFacingName: "userFacingName",
+ userFacingDescription: "userFacingDescription",
+ });
+
+ // Session 1: we see the above recipe and enroll in the experiment.
+ const action = new PreferenceExperimentAction();
+ sinon
+ .stub(action, "chooseBranch")
+ .callsFake(async function (slug, branches) {
+ return branches[0];
+ });
+ await action.processRecipe(recipe, BaseAction.suitability.FILTER_MATCH);
+ await action.finalize();
+
+ const activeExperiments = await PreferenceExperiments.getAllActive();
+ ok(!!activeExperiments.length);
+ Assert.deepEqual(activeExperiments, [
+ {
+ slug: "integration test experiment",
+ actionName: "PreferenceExperimentAction",
+ branch: "branch1",
+ preferences: {
+ "fake.preference": {
+ preferenceBranchType: "user",
+ preferenceValue: "branch1",
+ preferenceType: "string",
+ previousPreferenceValue: "oldvalue",
+ },
+ },
+ expired: false,
+ lastSeen: activeExperiments[0].lastSeen, // can't predict date
+ experimentType: "exp",
+ userFacingName: "userFacingName",
+ userFacingDescription: "userFacingDescription",
+ enrollmentId: activeExperiments[0].enrollmentId,
+ },
+ ]);
+
+ // Session 2: recipe is filtered out and so does not run.
+ const action2 = new PreferenceExperimentAction();
+ await action2.finalize();
+
+ // Experiment should be unenrolled
+ Assert.deepEqual(await PreferenceExperiments.getAllActive(), []);
+ }
+);
+
+// Check that the appropriate set of suitabilities are considered temporary errors
+decorate_task(
+ withStudiesEnabled(),
+ async function test_temporary_errors_set_deadline() {
+ let suitabilities = [
+ {
+ suitability: BaseAction.suitability.SIGNATURE_ERROR,
+ isTemporaryError: true,
+ },
+ {
+ suitability: BaseAction.suitability.CAPABILITIES_MISMATCH,
+ isTemporaryError: false,
+ },
+ {
+ suitability: BaseAction.suitability.FILTER_MATCH,
+ isTemporaryError: false,
+ },
+ {
+ suitability: BaseAction.suitability.FILTER_MISMATCH,
+ isTemporaryError: false,
+ },
+ {
+ suitability: BaseAction.suitability.FILTER_ERROR,
+ isTemporaryError: true,
+ },
+ {
+ suitability: BaseAction.suitability.ARGUMENTS_INVALID,
+ isTemporaryError: false,
+ },
+ ];
+
+ Assert.deepEqual(
+ suitabilities.map(({ suitability }) => suitability).sort(),
+ Array.from(Object.values(BaseAction.suitability)).sort(),
+ "This test covers all suitabilities"
+ );
+
+ // The action should set a deadline 1 week from now. To avoid intermittent
+ // failures, give this a generous bound of 2 hour on either side.
+ let now = Date.now();
+ let hour = 60 * 60 * 1000;
+ let expectedDeadline = now + 7 * 24 * hour;
+ let minDeadline = new Date(expectedDeadline - 2 * hour);
+ let maxDeadline = new Date(expectedDeadline + 2 * hour);
+
+ // For each suitability, build a decorator that sets up a suitable
+ // environment, and then call that decorator with a sub-test that asserts
+ // the suitability is handled correctly.
+ for (const { suitability, isTemporaryError } of suitabilities) {
+ const decorator = PreferenceExperiments.withMockExperiments([
+ { slug: `test-for-suitability-${suitability}` },
+ ]);
+ await decorator(async ({ prefExperiments: [experiment] }) => {
+ let action = new PreferenceExperimentAction();
+ let recipe = prefExperimentRecipeFactory({ slug: experiment.slug });
+ await action.processRecipe(recipe, suitability);
+ let modifiedExperiment = await PreferenceExperiments.get(
+ experiment.slug
+ );
+ if (isTemporaryError) {
+ is(
+ typeof modifiedExperiment.temporaryErrorDeadline,
+ "string",
+ `A temporary failure deadline should be set as a string for suitability ${suitability}`
+ );
+ let deadline = new Date(modifiedExperiment.temporaryErrorDeadline);
+ ok(
+ deadline >= minDeadline && deadline <= maxDeadline,
+ `The temporary failure deadline should be in the expected range for ` +
+ `suitability ${suitability} (got ${deadline})`
+ );
+ } else {
+ ok(
+ !modifiedExperiment.temporaryErrorDeadline,
+ `No temporary failure deadline should be set for suitability ${suitability}`
+ );
+ }
+ })();
+ }
+ }
+);
+
+// Check that if there is an existing deadline, temporary errors don't overwrite it
+decorate_task(
+ withStudiesEnabled(),
+ PreferenceExperiments.withMockExperiments([]),
+ async function test_temporary_errors_dont_overwrite_deadline() {
+ let temporaryFailureSuitabilities = [
+ BaseAction.suitability.SIGNATURE_ERROR,
+ BaseAction.suitability.FILTER_ERROR,
+ ];
+
+ // A deadline two hours in the future won't be hit during the test.
+ let now = Date.now();
+ let hour = 2 * 60 * 60 * 1000;
+ let unhitDeadline = new Date(now + hour).toJSON();
+
+ // For each suitability, build a decorator that sets up a suitabilty
+ // environment, and then call that decorator with a sub-test that asserts
+ // the suitability is handled correctly.
+ for (const suitability of temporaryFailureSuitabilities) {
+ const decorator = PreferenceExperiments.withMockExperiments([
+ {
+ slug: `test-for-suitability-${suitability}`,
+ expired: false,
+ temporaryErrorDeadline: unhitDeadline,
+ },
+ ]);
+ await decorator(async ({ prefExperiments: [experiment] }) => {
+ let action = new PreferenceExperimentAction();
+ let recipe = prefExperimentRecipeFactory({ slug: experiment.slug });
+ await action.processRecipe(recipe, suitability);
+ let modifiedExperiment = await PreferenceExperiments.get(
+ experiment.slug
+ );
+ is(
+ modifiedExperiment.temporaryErrorDeadline,
+ unhitDeadline,
+ `The temporary failure deadline should not be cleared for suitability ${suitability}`
+ );
+ })();
+ }
+ }
+);
+
+// Check that if the deadline is past, temporary errors end the experiment.
+decorate_task(
+ withStudiesEnabled(),
+ async function test_temporary_errors_hit_deadline() {
+ let temporaryFailureSuitabilities = [
+ BaseAction.suitability.SIGNATURE_ERROR,
+ BaseAction.suitability.FILTER_ERROR,
+ ];
+
+ // Set a deadline of two hours in the past, so that the experiment expires.
+ let now = Date.now();
+ let hour = 2 * 60 * 60 * 1000;
+ let hitDeadline = new Date(now - hour).toJSON();
+
+ // For each suitability, build a decorator that sets up a suitabilty
+ // environment, and then call that decorator with a sub-test that asserts
+ // the suitability is handled correctly.
+ for (const suitability of temporaryFailureSuitabilities) {
+ const decorator = PreferenceExperiments.withMockExperiments([
+ {
+ slug: `test-for-suitability-${suitability}`,
+ expired: false,
+ temporaryErrorDeadline: hitDeadline,
+ preferences: [],
+ branch: "test-branch",
+ },
+ ]);
+ await decorator(async ({ prefExperiments: [experiment] }) => {
+ let action = new PreferenceExperimentAction();
+ let recipe = prefExperimentRecipeFactory({ slug: experiment.slug });
+ await action.processRecipe(recipe, suitability);
+ let modifiedExperiment = await PreferenceExperiments.get(
+ experiment.slug
+ );
+ ok(
+ modifiedExperiment.expired,
+ `The experiment should be expired for suitability ${suitability}`
+ );
+ })();
+ }
+ }
+);
+
+// Check that non-temporary-error suitabilities clear the temporary deadline
+decorate_task(
+ withStudiesEnabled(),
+ PreferenceExperiments.withMockExperiments([]),
+ async function test_non_temporary_error_clears_temporary_error_deadline() {
+ let suitabilitiesThatShouldClearDeadline = [
+ BaseAction.suitability.CAPABILITIES_MISMATCH,
+ BaseAction.suitability.FILTER_MATCH,
+ BaseAction.suitability.FILTER_MISMATCH,
+ BaseAction.suitability.ARGUMENTS_INVALID,
+ ];
+
+ // Use a deadline in the past to demonstrate that even if the deadline has
+ // passed, only a temporary error suitability ends the experiment.
+ let now = Date.now();
+ let hour = 60 * 60 * 1000;
+ let hitDeadline = new Date(now - 2 * hour).toJSON();
+
+ // For each suitability, build a decorator that sets up a suitabilty
+ // environment, and then call that decorator with a sub-test that asserts
+ // the suitability is handled correctly.
+ for (const suitability of suitabilitiesThatShouldClearDeadline) {
+ const decorator = PreferenceExperiments.withMockExperiments([
+ NormandyTestUtils.factories.preferenceStudyFactory({
+ slug: `test-for-suitability-${suitability}`.toLocaleLowerCase(),
+ expired: false,
+ temporaryErrorDeadline: hitDeadline,
+ }),
+ ]);
+ await decorator(async ({ prefExperiments: [experiment] }) => {
+ let action = new PreferenceExperimentAction();
+ let recipe = prefExperimentRecipeFactory({ slug: experiment.slug });
+ await action.processRecipe(recipe, suitability);
+ let modifiedExperiment = await PreferenceExperiments.get(
+ experiment.slug
+ );
+ ok(
+ !modifiedExperiment.temporaryErrorDeadline,
+ `The temporary failure deadline should be cleared for suitabilitiy ${suitability}`
+ );
+ })();
+ }
+ }
+);
+
+// Check that invalid deadlines are reset
+decorate_task(
+ withStudiesEnabled(),
+ PreferenceExperiments.withMockExperiments([]),
+ async function test_non_temporary_error_clears_temporary_error_deadline() {
+ let temporaryFailureSuitabilities = [
+ BaseAction.suitability.SIGNATURE_ERROR,
+ BaseAction.suitability.FILTER_ERROR,
+ ];
+
+ // The action should set a deadline 1 week from now. To avoid intermittent
+ // failures, give this a generous bound of 2 hours on either side.
+ let invalidDeadline = "not a valid date";
+ let now = Date.now();
+ let hour = 60 * 60 * 1000;
+ let expectedDeadline = now + 7 * 24 * hour;
+ let minDeadline = new Date(expectedDeadline - 2 * hour);
+ let maxDeadline = new Date(expectedDeadline + 2 * hour);
+
+ // For each suitability, build a decorator that sets up a suitabilty
+ // environment, and then call that decorator with a sub-test that asserts
+ // the suitability is handled correctly.
+ for (const suitability of temporaryFailureSuitabilities) {
+ const decorator = PreferenceExperiments.withMockExperiments([
+ NormandyTestUtils.factories.preferenceStudyFactory({
+ slug: `test-for-suitability-${suitability}`.toLocaleLowerCase(),
+ expired: false,
+ temporaryErrorDeadline: invalidDeadline,
+ }),
+ ]);
+ await decorator(async ({ prefExperiments: [experiment] }) => {
+ let action = new PreferenceExperimentAction();
+ let recipe = prefExperimentRecipeFactory({ slug: experiment.slug });
+ await action.processRecipe(recipe, suitability);
+ is(action.lastError, null, "No errors should be reported");
+ let modifiedExperiment = await PreferenceExperiments.get(
+ experiment.slug
+ );
+ ok(
+ modifiedExperiment.temporaryErrorDeadline != invalidDeadline,
+ `The temporary failure deadline should be reset for suitabilitiy ${suitability}`
+ );
+ let deadline = new Date(modifiedExperiment.temporaryErrorDeadline);
+ ok(
+ deadline >= minDeadline && deadline <= maxDeadline,
+ `The temporary failure deadline should be reset to a valid deadline for ${suitability}`
+ );
+ })();
+ }
+ }
+);
+
+// Check that an already unenrolled experiment doesn't try to unenroll again if
+// the filter does not match.
+decorate_task(
+ withStudiesEnabled(),
+ withSpy(PreferenceExperiments, "stop"),
+ async function test_stop_when_already_expired({ stopSpy }) {
+ // Use a deadline that is already past
+ const now = new Date();
+ const hour = 1000 * 60 * 60;
+ const temporaryErrorDeadline = new Date(now - hour * 2).toJSON();
+
+ const suitabilitiesToCheck = Object.values(BaseAction.suitability);
+
+ const subtest = decorate(
+ PreferenceExperiments.withMockExperiments([
+ NormandyTestUtils.factories.preferenceStudyFactory({
+ expired: true,
+ temporaryErrorDeadline,
+ }),
+ ]),
+
+ async ({ prefExperiments: [experiment], suitability }) => {
+ const recipe = prefExperimentRecipeFactory({ slug: experiment.slug });
+ const action = new PreferenceExperimentAction();
+ await action.processRecipe(recipe, suitability);
+ Assert.deepEqual(
+ stopSpy.args,
+ [],
+ `Stop should not be called for ${suitability}`
+ );
+ }
+ );
+
+ for (const suitability of suitabilitiesToCheck) {
+ await subtest({ suitability });
+ }
+ }
+);
+
+// If no recipes are received, it should be considered a temporary error
+decorate_task(
+ withStudiesEnabled(),
+ PreferenceExperiments.withMockExperiments([
+ NormandyTestUtils.factories.preferenceStudyFactory({ expired: false }),
+ ]),
+ withSpy(PreferenceExperiments, "stop"),
+ withStub(PreferenceExperimentAction.prototype, "_considerTemporaryError"),
+ async function testNoRecipes({
+ stopSpy,
+ _considerTemporaryErrorStub,
+ prefExperiments: [experiment],
+ }) {
+ let action = new PreferenceExperimentAction();
+ await action.finalize({ noRecipes: true });
+
+ Assert.deepEqual(stopSpy.args, [], "Stop should not be called");
+ Assert.deepEqual(
+ _considerTemporaryErrorStub.args,
+ [[{ experiment, reason: "no-recipes" }]],
+ "The experiment should accumulate a temporary error"
+ );
+ }
+);
+
+// If recipes are received, but the flag that none were received is set, an error should be thrown
+decorate_task(
+ withStudiesEnabled(),
+ PreferenceExperiments.withMockExperiments([
+ NormandyTestUtils.factories.preferenceStudyFactory({ expired: false }),
+ ]),
+ withSpy(PreferenceExperiments, "stop"),
+ withStub(PreferenceExperimentAction.prototype, "_considerTemporaryError"),
+ async function testNoRecipes({
+ stopSpy,
+ _considerTemporaryErrorStub,
+ prefExperiments: [experiment],
+ }) {
+ const action = new PreferenceExperimentAction();
+ const recipe = prefExperimentRecipeFactory({ slug: experiment.slug });
+ await action.processRecipe(recipe, BaseAction.suitability.FILTER_MISMATCH);
+ await action.finalize({ noRecipes: true });
+ ok(
+ action.lastError instanceof PreferenceExperimentAction.BadNoRecipesArg,
+ "An error should be logged since some recipes were received"
+ );
+ }
+);
+
+// Unenrolling from an experiment where a user has changed some prefs should not override user choice
+decorate_task(
+ withStudiesEnabled(),
+ withMockPreferences(),
+ PreferenceExperiments.withMockExperiments(),
+ async function testUserPrefNoReset({ mockPreferences }) {
+ mockPreferences.set("test.pref.should-reset", "builtin value", "default");
+ mockPreferences.set("test.pref.user-override", "builtin value", "default");
+
+ await PreferenceExperiments.start({
+ slug: "test-experiment",
+ actionName: "PreferenceExperimentAction",
+ isHighPopulation: false,
+ isEnrollmentPaused: false,
+ userFacingName: "Test Experiment",
+ userFacingDescription: "Test description",
+ branch: "test",
+ preferences: {
+ "test.pref.should-reset": {
+ preferenceValue: "experiment value",
+ preferenceType: "string",
+ previousPreferenceValue: "builtin value",
+ preferenceBranchType: "user",
+ overridden: false,
+ },
+ "test.pref.user-override": {
+ preferenceValue: "experiment value",
+ preferenceType: "string",
+ previousPreferenceValue: "builtin value",
+ preferenceBranchType: "user",
+ overridden: false,
+ },
+ },
+ });
+
+ mockPreferences.set("test.pref.user-override", "user value", "user");
+
+ let exp = await PreferenceExperiments.get("test-experiment");
+ is(
+ exp.preferences["test.pref.user-override"].overridden,
+ true,
+ "Changed pref should be marked as overridden"
+ );
+ is(
+ exp.preferences["test.pref.should-reset"].overridden,
+ false,
+ "Unchanged pref should not be marked as overridden"
+ );
+
+ await PreferenceExperiments.stop("test-experiment", {
+ resetValue: true,
+ reason: "test-reason",
+ });
+
+ is(
+ Services.prefs.getCharPref("test.pref.should-reset"),
+ "builtin value",
+ "pref that was not overridden should reset to builtin"
+ );
+ is(
+ Services.prefs.getCharPref("test.pref.user-override"),
+ "user value",
+ "pref that was overridden should keep user value"
+ );
+ }
+);
diff --git a/toolkit/components/normandy/test/browser/browser_actions_PreferenceRollbackAction.js b/toolkit/components/normandy/test/browser/browser_actions_PreferenceRollbackAction.js
new file mode 100644
index 0000000000..36d71b72fc
--- /dev/null
+++ b/toolkit/components/normandy/test/browser/browser_actions_PreferenceRollbackAction.js
@@ -0,0 +1,355 @@
+"use strict";
+
+const { BaseAction } = ChromeUtils.importESModule(
+ "resource://normandy/actions/BaseAction.sys.mjs"
+);
+const { PreferenceRollbackAction } = ChromeUtils.importESModule(
+ "resource://normandy/actions/PreferenceRollbackAction.sys.mjs"
+);
+const { Uptake } = ChromeUtils.importESModule(
+ "resource://normandy/lib/Uptake.sys.mjs"
+);
+const { PreferenceRollouts } = ChromeUtils.importESModule(
+ "resource://normandy/lib/PreferenceRollouts.sys.mjs"
+);
+
+// Test that a simple recipe rollsback as expected
+decorate_task(
+ withStub(TelemetryEnvironment, "setExperimentInactive"),
+ withSendEventSpy(),
+ PreferenceRollouts.withTestMock(),
+ async function simple_rollback({ setExperimentInactiveStub, sendEventSpy }) {
+ Services.prefs.getDefaultBranch("").setIntPref("test.pref1", 2);
+ Services.prefs
+ .getDefaultBranch("")
+ .setCharPref("test.pref2", "rollout value");
+ Services.prefs.getDefaultBranch("").setBoolPref("test.pref3", true);
+
+ await PreferenceRollouts.add({
+ slug: "test-rollout",
+ state: PreferenceRollouts.STATE_ACTIVE,
+ preferences: [
+ { preferenceName: "test.pref1", value: 2, previousValue: 1 },
+ {
+ preferenceName: "test.pref2",
+ value: "rollout value",
+ previousValue: "builtin value",
+ },
+ { preferenceName: "test.pref3", value: true, previousValue: false },
+ ],
+ enrollmentId: "test-enrollment-id",
+ });
+
+ const recipe = { id: 1, arguments: { rolloutSlug: "test-rollout" } };
+
+ const action = new PreferenceRollbackAction();
+ await action.processRecipe(recipe, BaseAction.suitability.FILTER_MATCH);
+ await action.finalize();
+ is(action.lastError, null, "lastError should be null");
+
+ // rollout prefs are reset
+ is(
+ Services.prefs.getIntPref("test.pref1"),
+ 1,
+ "integer pref should be rolled back"
+ );
+ is(
+ Services.prefs.getCharPref("test.pref2"),
+ "builtin value",
+ "string pref should be rolled back"
+ );
+ is(
+ Services.prefs.getBoolPref("test.pref3"),
+ false,
+ "boolean pref should be rolled back"
+ );
+
+ // start up prefs are unset
+ is(
+ Services.prefs.getPrefType("app.normandy.startupRolloutPrefs.test.pref1"),
+ Services.prefs.PREF_INVALID,
+ "integer startup pref should be unset"
+ );
+ is(
+ Services.prefs.getPrefType("app.normandy.startupRolloutPrefs.test.pref2"),
+ Services.prefs.PREF_INVALID,
+ "string startup pref should be unset"
+ );
+ is(
+ Services.prefs.getPrefType("app.normandy.startupRolloutPrefs.test.pref3"),
+ Services.prefs.PREF_INVALID,
+ "boolean startup pref should be unset"
+ );
+
+ // rollout in db was updated
+ const rollouts = await PreferenceRollouts.getAll();
+ Assert.deepEqual(
+ rollouts,
+ [
+ {
+ slug: "test-rollout",
+ state: PreferenceRollouts.STATE_ROLLED_BACK,
+ preferences: [
+ { preferenceName: "test.pref1", value: 2, previousValue: 1 },
+ {
+ preferenceName: "test.pref2",
+ value: "rollout value",
+ previousValue: "builtin value",
+ },
+ { preferenceName: "test.pref3", value: true, previousValue: false },
+ ],
+ enrollmentId: rollouts[0].enrollmentId,
+ },
+ ],
+ "Rollout should be updated in db"
+ );
+
+ // Telemetry is updated
+ sendEventSpy.assertEvents([
+ [
+ "unenroll",
+ "preference_rollback",
+ recipe.arguments.rolloutSlug,
+ { reason: "rollback" },
+ ],
+ ]);
+ Assert.deepEqual(
+ setExperimentInactiveStub.args,
+ [["test-rollout"]],
+ "the telemetry experiment should deactivated"
+ );
+
+ // Cleanup
+ Services.prefs.getDefaultBranch("").deleteBranch("test.pref1");
+ Services.prefs.getDefaultBranch("").deleteBranch("test.pref2");
+ Services.prefs.getDefaultBranch("").deleteBranch("test.pref3");
+ }
+);
+
+// Test that a graduated rollout can't be rolled back
+decorate_task(
+ withSendEventSpy(),
+ PreferenceRollouts.withTestMock(),
+ async function cant_rollback_graduated({ sendEventSpy }) {
+ Services.prefs.getDefaultBranch("").setIntPref("test.pref", 1);
+ await PreferenceRollouts.add({
+ slug: "graduated-rollout",
+ state: PreferenceRollouts.STATE_GRADUATED,
+ preferences: [
+ { preferenceName: "test.pref", value: 1, previousValue: 1 },
+ ],
+ enrollmentId: "test-enrollment-id",
+ });
+
+ let recipe = { id: 1, arguments: { rolloutSlug: "graduated-rollout" } };
+
+ const action = new PreferenceRollbackAction();
+ await action.processRecipe(recipe, BaseAction.suitability.FILTER_MATCH);
+ await action.finalize();
+ is(action.lastError, null, "lastError should be null");
+
+ is(Services.prefs.getIntPref("test.pref"), 1, "pref should not change");
+ is(
+ Services.prefs.getPrefType("app.normandy.startupRolloutPrefs.test.pref"),
+ Services.prefs.PREF_INVALID,
+ "no startup pref should be added"
+ );
+
+ // rollout in the DB hasn't changed
+ Assert.deepEqual(
+ await PreferenceRollouts.getAll(),
+ [
+ {
+ slug: "graduated-rollout",
+ state: PreferenceRollouts.STATE_GRADUATED,
+ preferences: [
+ { preferenceName: "test.pref", value: 1, previousValue: 1 },
+ ],
+ enrollmentId: "test-enrollment-id",
+ },
+ ],
+ "Rollout should not change in db"
+ );
+
+ sendEventSpy.assertEvents([
+ [
+ "unenrollFailed",
+ "preference_rollback",
+ "graduated-rollout",
+ { reason: "graduated", enrollmentId: "test-enrollment-id" },
+ ],
+ ]);
+
+ // Cleanup
+ Services.prefs.getDefaultBranch("").deleteBranch("test.pref");
+ }
+);
+
+// Test that a rollback without a matching rollout does not send telemetry
+decorate_task(
+ withSendEventSpy(),
+ withStub(Uptake, "reportRecipe"),
+ PreferenceRollouts.withTestMock(),
+ async function rollback_without_rollout({ sendEventSpy, reportRecipeStub }) {
+ let recipe = { id: 1, arguments: { rolloutSlug: "missing-rollout" } };
+
+ const action = new PreferenceRollbackAction();
+ await action.processRecipe(recipe, BaseAction.suitability.FILTER_MATCH);
+ await action.finalize();
+ is(action.lastError, null, "lastError should be null");
+
+ sendEventSpy.assertEvents([]);
+ Assert.deepEqual(
+ reportRecipeStub.args,
+ [[recipe, Uptake.RECIPE_SUCCESS]],
+ "recipe should be reported as succesful"
+ );
+ }
+);
+
+// Test that rolling back an already rolled back recipe doesn't do anything
+decorate_task(
+ withStub(TelemetryEnvironment, "setExperimentInactive"),
+ withSendEventSpy(),
+ PreferenceRollouts.withTestMock(),
+ async function rollback_already_rolled_back({
+ setExperimentInactiveStub,
+ sendEventSpy,
+ }) {
+ Services.prefs.getDefaultBranch("").setIntPref("test.pref", 1);
+
+ const recipe = { id: 1, arguments: { rolloutSlug: "test-rollout" } };
+ const rollout = {
+ slug: "test-rollout",
+ state: PreferenceRollouts.STATE_ROLLED_BACK,
+ preferences: [
+ { preferenceName: "test.pref", value: 2, previousValue: 1 },
+ ],
+ enrollmentId: "test-rollout-id",
+ };
+ await PreferenceRollouts.add(rollout);
+
+ const action = new PreferenceRollbackAction();
+ await action.processRecipe(recipe, BaseAction.suitability.FILTER_MATCH);
+ await action.finalize();
+ is(action.lastError, null, "lastError should be null");
+
+ is(Services.prefs.getIntPref("test.pref"), 1, "pref shouldn't change");
+ is(
+ Services.prefs.getPrefType("app.normandy.startupRolloutPrefs.test.pref"),
+ Services.prefs.PREF_INVALID,
+ "startup pref should not be set"
+ );
+
+ // rollout in db was updated
+ Assert.deepEqual(
+ await PreferenceRollouts.getAll(),
+ [rollout],
+ "Rollout shouldn't change in db"
+ );
+
+ // Telemetry is updated
+ sendEventSpy.assertEvents([]);
+ Assert.deepEqual(
+ setExperimentInactiveStub.args,
+ [],
+ "telemetry experiments should not be updated"
+ );
+
+ // Cleanup
+ Services.prefs.getDefaultBranch("").deleteBranch("test.pref");
+ }
+);
+
+// Test that a rollback doesn't affect user prefs
+decorate_task(
+ PreferenceRollouts.withTestMock(),
+ async function simple_rollback() {
+ Services.prefs
+ .getDefaultBranch("")
+ .setCharPref("test.pref", "rollout value");
+ Services.prefs.setCharPref("test.pref", "user value");
+
+ await PreferenceRollouts.add({
+ slug: "test-rollout",
+ state: PreferenceRollouts.STATE_ACTIVE,
+ preferences: [
+ {
+ preferenceName: "test.pref",
+ value: "rollout value",
+ previousValue: "builtin value",
+ },
+ ],
+ enrollmentId: "test-enrollment-id",
+ });
+
+ const recipe = { id: 1, arguments: { rolloutSlug: "test-rollout" } };
+
+ const action = new PreferenceRollbackAction();
+ await action.processRecipe(recipe, BaseAction.suitability.FILTER_MATCH);
+ await action.finalize();
+ is(action.lastError, null, "lastError should be null");
+
+ is(
+ Services.prefs.getDefaultBranch("").getCharPref("test.pref"),
+ "builtin value",
+ "default branch should be reset"
+ );
+ is(
+ Services.prefs.getCharPref("test.pref"),
+ "user value",
+ "user branch should remain the same"
+ );
+
+ // Cleanup
+ Services.prefs.deleteBranch("test.pref");
+ Services.prefs.getDefaultBranch("").deleteBranch("test.pref");
+ }
+);
+
+// Test that a rollouts in the graduation set can't be rolled back
+decorate_task(
+ withSendEventSpy(),
+ PreferenceRollouts.withTestMock({
+ graduationSet: new Set(["graduated-rollout"]),
+ }),
+ async function cant_rollback_graduation_set({ sendEventSpy }) {
+ Services.prefs.getDefaultBranch("").setIntPref("test.pref", 1);
+
+ let recipe = { id: 1, arguments: { rolloutSlug: "graduated-rollout" } };
+
+ const action = new PreferenceRollbackAction();
+ await action.processRecipe(recipe, BaseAction.suitability.FILTER_MATCH);
+ await action.finalize();
+ is(action.lastError, null, "lastError should be null");
+
+ is(Services.prefs.getIntPref("test.pref"), 1, "pref should not change");
+ is(
+ Services.prefs.getPrefType("app.normandy.startupRolloutPrefs.test.pref"),
+ Services.prefs.PREF_INVALID,
+ "no startup pref should be added"
+ );
+
+ // No entry in the DB
+ Assert.deepEqual(
+ await PreferenceRollouts.getAll(),
+ [],
+ "Rollout should be in the db"
+ );
+
+ sendEventSpy.assertEvents([
+ [
+ "unenrollFailed",
+ "preference_rollback",
+ "graduated-rollout",
+ {
+ reason: "in-graduation-set",
+ enrollmentId: TelemetryEvents.NO_ENROLLMENT_ID,
+ },
+ ],
+ ]);
+
+ // Cleanup
+ Services.prefs.getDefaultBranch("").deleteBranch("test.pref");
+ }
+);
diff --git a/toolkit/components/normandy/test/browser/browser_actions_PreferenceRolloutAction.js b/toolkit/components/normandy/test/browser/browser_actions_PreferenceRolloutAction.js
new file mode 100644
index 0000000000..64b60b3483
--- /dev/null
+++ b/toolkit/components/normandy/test/browser/browser_actions_PreferenceRolloutAction.js
@@ -0,0 +1,725 @@
+"use strict";
+
+const { BaseAction } = ChromeUtils.importESModule(
+ "resource://normandy/actions/BaseAction.sys.mjs"
+);
+const { PreferenceRolloutAction } = ChromeUtils.importESModule(
+ "resource://normandy/actions/PreferenceRolloutAction.sys.mjs"
+);
+const { PreferenceRollouts } = ChromeUtils.importESModule(
+ "resource://normandy/lib/PreferenceRollouts.sys.mjs"
+);
+const { NormandyTestUtils } = ChromeUtils.importESModule(
+ "resource://testing-common/NormandyTestUtils.sys.mjs"
+);
+
+// Test that a simple recipe enrolls as expected
+decorate_task(
+ withStub(TelemetryEnvironment, "setExperimentActive"),
+ withSendEventSpy(),
+ PreferenceRollouts.withTestMock(),
+ async function simple_recipe_enrollment({
+ setExperimentActiveStub,
+ sendEventSpy,
+ }) {
+ const recipe = {
+ id: 1,
+ arguments: {
+ slug: "test-rollout",
+ preferences: [
+ { preferenceName: "test.pref1", value: 1 },
+ { preferenceName: "test.pref2", value: true },
+ { preferenceName: "test.pref3", value: "it works" },
+ ],
+ },
+ };
+
+ const action = new PreferenceRolloutAction();
+ await action.processRecipe(recipe, BaseAction.suitability.FILTER_MATCH);
+ await action.finalize();
+ is(action.lastError, null, "lastError should be null");
+
+ // rollout prefs are set
+ is(
+ Services.prefs.getIntPref("test.pref1"),
+ 1,
+ "integer pref should be set"
+ );
+ is(
+ Services.prefs.getBoolPref("test.pref2"),
+ true,
+ "boolean pref should be set"
+ );
+ is(
+ Services.prefs.getCharPref("test.pref3"),
+ "it works",
+ "string pref should be set"
+ );
+
+ // start up prefs are set
+ is(
+ Services.prefs.getIntPref("app.normandy.startupRolloutPrefs.test.pref1"),
+ 1,
+ "integer startup pref should be set"
+ );
+ is(
+ Services.prefs.getBoolPref("app.normandy.startupRolloutPrefs.test.pref2"),
+ true,
+ "boolean startup pref should be set"
+ );
+ is(
+ Services.prefs.getCharPref("app.normandy.startupRolloutPrefs.test.pref3"),
+ "it works",
+ "string startup pref should be set"
+ );
+
+ // rollout was stored
+ let rollouts = await PreferenceRollouts.getAll();
+ Assert.deepEqual(
+ rollouts,
+ [
+ {
+ slug: "test-rollout",
+ state: PreferenceRollouts.STATE_ACTIVE,
+ preferences: [
+ { preferenceName: "test.pref1", value: 1, previousValue: null },
+ { preferenceName: "test.pref2", value: true, previousValue: null },
+ {
+ preferenceName: "test.pref3",
+ value: "it works",
+ previousValue: null,
+ },
+ ],
+ enrollmentId: rollouts[0].enrollmentId,
+ },
+ ],
+ "Rollout should be stored in db"
+ );
+ ok(
+ NormandyTestUtils.isUuid(rollouts[0].enrollmentId),
+ "Rollout should have a UUID enrollmentId"
+ );
+
+ sendEventSpy.assertEvents([
+ [
+ "enroll",
+ "preference_rollout",
+ recipe.arguments.slug,
+ { enrollmentId: rollouts[0].enrollmentId },
+ ],
+ ]);
+ ok(
+ setExperimentActiveStub.calledWithExactly("test-rollout", "active", {
+ type: "normandy-prefrollout",
+ enrollmentId: rollouts[0].enrollmentId,
+ }),
+ "a telemetry experiment should be activated"
+ );
+
+ // Cleanup
+ Services.prefs.getDefaultBranch("").deleteBranch("test.pref1");
+ Services.prefs.getDefaultBranch("").deleteBranch("test.pref2");
+ Services.prefs.getDefaultBranch("").deleteBranch("test.pref3");
+ }
+);
+
+// Test that an enrollment's values can change, be removed, and be added
+decorate_task(
+ withSendEventSpy(),
+ PreferenceRollouts.withTestMock(),
+ async function update_enrollment({ sendEventSpy }) {
+ // first enrollment
+ const recipe = {
+ id: 1,
+ arguments: {
+ slug: "test-rollout",
+ preferences: [
+ { preferenceName: "test.pref1", value: 1 },
+ { preferenceName: "test.pref2", value: 1 },
+ ],
+ },
+ };
+
+ let action = new PreferenceRolloutAction();
+ await action.processRecipe(recipe, BaseAction.suitability.FILTER_MATCH);
+ await action.finalize();
+ is(action.lastError, null, "lastError should be null");
+
+ const defaultBranch = Services.prefs.getDefaultBranch("");
+ is(defaultBranch.getIntPref("test.pref1"), 1, "pref1 should be set");
+ is(defaultBranch.getIntPref("test.pref2"), 1, "pref2 should be set");
+ is(
+ Services.prefs.getIntPref("app.normandy.startupRolloutPrefs.test.pref1"),
+ 1,
+ "startup pref1 should be set"
+ );
+ is(
+ Services.prefs.getIntPref("app.normandy.startupRolloutPrefs.test.pref2"),
+ 1,
+ "startup pref2 should be set"
+ );
+
+ // update existing enrollment
+ recipe.arguments.preferences = [
+ // pref1 is removed
+ // pref2's value is updated
+ { preferenceName: "test.pref2", value: 2 },
+ // pref3 is added
+ { preferenceName: "test.pref3", value: 2 },
+ ];
+ action = new PreferenceRolloutAction();
+ await action.processRecipe(recipe, BaseAction.suitability.FILTER_MATCH);
+ await action.finalize();
+ is(action.lastError, null, "lastError should be null");
+
+ /* Todo because of bug 1502410 and bug 1505941 */
+ todo_is(
+ Services.prefs.getPrefType("test.pref1"),
+ Services.prefs.PREF_INVALID,
+ "pref1 should be removed"
+ );
+ is(Services.prefs.getIntPref("test.pref2"), 2, "pref2 should be updated");
+ is(Services.prefs.getIntPref("test.pref3"), 2, "pref3 should be added");
+
+ is(
+ Services.prefs.getPrefType("app.normandy.startupRolloutPrefs.test.pref1"),
+ Services.prefs.PREF_INVALID,
+ "startup pref1 should be removed"
+ );
+ is(
+ Services.prefs.getIntPref("app.normandy.startupRolloutPrefs.test.pref2"),
+ 2,
+ "startup pref2 should be updated"
+ );
+ is(
+ Services.prefs.getIntPref("app.normandy.startupRolloutPrefs.test.pref3"),
+ 2,
+ "startup pref3 should be added"
+ );
+
+ // rollout in the DB has been updated
+ const rollouts = await PreferenceRollouts.getAll();
+ Assert.deepEqual(
+ rollouts,
+ [
+ {
+ slug: "test-rollout",
+ state: PreferenceRollouts.STATE_ACTIVE,
+ preferences: [
+ { preferenceName: "test.pref2", value: 2, previousValue: null },
+ { preferenceName: "test.pref3", value: 2, previousValue: null },
+ ],
+ },
+ ],
+ "Rollout should be updated in db"
+ );
+
+ sendEventSpy.assertEvents([
+ [
+ "enroll",
+ "preference_rollout",
+ "test-rollout",
+ { enrollmentId: rollouts[0].enrollmentId },
+ ],
+ [
+ "update",
+ "preference_rollout",
+ "test-rollout",
+ { previousState: "active", enrollmentId: rollouts[0].enrollmentId },
+ ],
+ ]);
+
+ // Cleanup
+ Services.prefs.getDefaultBranch("").deleteBranch("test.pref1");
+ Services.prefs.getDefaultBranch("").deleteBranch("test.pref2");
+ Services.prefs.getDefaultBranch("").deleteBranch("test.pref3");
+ }
+);
+
+// Test that a graduated rollout can be ungraduated
+decorate_task(
+ withSendEventSpy(),
+ PreferenceRollouts.withTestMock(),
+ async function ungraduate_enrollment({ sendEventSpy }) {
+ Services.prefs.getDefaultBranch("").setIntPref("test.pref", 1);
+ await PreferenceRollouts.add({
+ slug: "test-rollout",
+ state: PreferenceRollouts.STATE_GRADUATED,
+ preferences: [
+ { preferenceName: "test.pref", value: 1, previousValue: 1 },
+ ],
+ enrollmentId: "test-enrollment-id",
+ });
+
+ let recipe = {
+ id: 1,
+ arguments: {
+ slug: "test-rollout",
+ preferences: [{ preferenceName: "test.pref", value: 2 }],
+ },
+ };
+
+ const action = new PreferenceRolloutAction();
+ await action.processRecipe(recipe, BaseAction.suitability.FILTER_MATCH);
+ await action.finalize();
+ is(action.lastError, null, "lastError should be null");
+
+ is(Services.prefs.getIntPref("test.pref"), 2, "pref should be updated");
+ is(
+ Services.prefs.getIntPref("app.normandy.startupRolloutPrefs.test.pref"),
+ 2,
+ "startup pref should be set"
+ );
+
+ // rollout in the DB has been ungraduated
+ const rollouts = await PreferenceRollouts.getAll();
+ Assert.deepEqual(
+ rollouts,
+ [
+ {
+ slug: "test-rollout",
+ state: PreferenceRollouts.STATE_ACTIVE,
+ preferences: [
+ { preferenceName: "test.pref", value: 2, previousValue: 1 },
+ ],
+ },
+ ],
+ "Rollout should be updated in db"
+ );
+
+ sendEventSpy.assertEvents([
+ [
+ "update",
+ "preference_rollout",
+ "test-rollout",
+ { previousState: "graduated", enrollmentId: "test-enrollment-id" },
+ ],
+ ]);
+
+ // Cleanup
+ Services.prefs.getDefaultBranch("").deleteBranch("test.pref");
+ }
+);
+
+// Test when recipes conflict, only one is applied
+decorate_task(
+ withSendEventSpy(),
+ PreferenceRollouts.withTestMock(),
+ async function conflicting_recipes({ sendEventSpy }) {
+ // create two recipes that each share a pref and have a unique pref.
+ const recipe1 = {
+ id: 1,
+ arguments: {
+ slug: "test-rollout-1",
+ preferences: [
+ { preferenceName: "test.pref1", value: 1 },
+ { preferenceName: "test.pref2", value: 1 },
+ ],
+ },
+ };
+ const recipe2 = {
+ id: 2,
+ arguments: {
+ slug: "test-rollout-2",
+ preferences: [
+ { preferenceName: "test.pref1", value: 2 },
+ { preferenceName: "test.pref3", value: 2 },
+ ],
+ },
+ };
+
+ // running both in the same session
+ let action = new PreferenceRolloutAction();
+ await action.processRecipe(recipe1, BaseAction.suitability.FILTER_MATCH);
+ await action.processRecipe(recipe2, BaseAction.suitability.FILTER_MATCH);
+ await action.finalize();
+ is(action.lastError, null, "lastError should be null");
+
+ // running recipe2 in a separate session shouldn't change things
+ action = new PreferenceRolloutAction();
+ await action.processRecipe(recipe2, BaseAction.suitability.FILTER_MATCH);
+ await action.finalize();
+ is(action.lastError, null, "lastError should be null");
+
+ is(
+ Services.prefs.getIntPref("test.pref1"),
+ 1,
+ "pref1 is set to recipe1's value"
+ );
+ is(
+ Services.prefs.getIntPref("test.pref2"),
+ 1,
+ "pref2 is set to recipe1's value"
+ );
+ is(
+ Services.prefs.getPrefType("test.pref3"),
+ Services.prefs.PREF_INVALID,
+ "pref3 is not set"
+ );
+
+ is(
+ Services.prefs.getIntPref("app.normandy.startupRolloutPrefs.test.pref1"),
+ 1,
+ "startup pref1 is set to recipe1's value"
+ );
+ is(
+ Services.prefs.getIntPref("app.normandy.startupRolloutPrefs.test.pref2"),
+ 1,
+ "startup pref2 is set to recipe1's value"
+ );
+ is(
+ Services.prefs.getPrefType("app.normandy.startupRolloutPrefs.test.pref3"),
+ Services.prefs.PREF_INVALID,
+ "startup pref3 is not set"
+ );
+
+ // only successful rollout was stored
+ const rollouts = await PreferenceRollouts.getAll();
+ Assert.deepEqual(
+ rollouts,
+ [
+ {
+ slug: "test-rollout-1",
+ state: PreferenceRollouts.STATE_ACTIVE,
+ preferences: [
+ { preferenceName: "test.pref1", value: 1, previousValue: null },
+ { preferenceName: "test.pref2", value: 1, previousValue: null },
+ ],
+ enrollmentId: rollouts[0].enrollmentId,
+ },
+ ],
+ "Only recipe1's rollout should be stored in db"
+ );
+
+ sendEventSpy.assertEvents([
+ ["enroll", "preference_rollout", recipe1.arguments.slug],
+ [
+ "enrollFailed",
+ "preference_rollout",
+ recipe2.arguments.slug,
+ { reason: "conflict", preference: "test.pref1" },
+ ],
+ [
+ "enrollFailed",
+ "preference_rollout",
+ recipe2.arguments.slug,
+ { reason: "conflict", preference: "test.pref1" },
+ ],
+ ]);
+
+ // Cleanup
+ Services.prefs.getDefaultBranch("").deleteBranch("test.pref1");
+ Services.prefs.getDefaultBranch("").deleteBranch("test.pref2");
+ Services.prefs.getDefaultBranch("").deleteBranch("test.pref3");
+ }
+);
+
+// Test when the wrong value type is given, the recipe is not applied
+decorate_task(
+ withSendEventSpy(),
+ PreferenceRollouts.withTestMock(),
+ async function wrong_preference_value({ sendEventSpy }) {
+ Services.prefs.getDefaultBranch("").setCharPref("test.pref", "not an int");
+ const recipe = {
+ id: 1,
+ arguments: {
+ slug: "test-rollout",
+ preferences: [{ preferenceName: "test.pref", value: 1 }],
+ },
+ };
+
+ const action = new PreferenceRolloutAction();
+ await action.processRecipe(recipe, BaseAction.suitability.FILTER_MATCH);
+ await action.finalize();
+ is(action.lastError, null, "lastError should be null");
+
+ is(
+ Services.prefs.getCharPref("test.pref"),
+ "not an int",
+ "the pref should not be modified"
+ );
+ is(
+ Services.prefs.getPrefType("app.normandy.startupRolloutPrefs.test.pref"),
+ Services.prefs.PREF_INVALID,
+ "startup pref is not set"
+ );
+
+ Assert.deepEqual(
+ await PreferenceRollouts.getAll(),
+ [],
+ "no rollout is stored in the db"
+ );
+ sendEventSpy.assertEvents([
+ [
+ "enrollFailed",
+ "preference_rollout",
+ recipe.arguments.slug,
+ { reason: "invalid type", preference: "test.pref" },
+ ],
+ ]);
+
+ // Cleanup
+ Services.prefs.getDefaultBranch("").deleteBranch("test.pref");
+ }
+);
+
+// Test that even when applying a rollout, user prefs are preserved
+decorate_task(
+ PreferenceRollouts.withTestMock(),
+ async function preserves_user_prefs() {
+ Services.prefs
+ .getDefaultBranch("")
+ .setCharPref("test.pref", "builtin value");
+ Services.prefs.setCharPref("test.pref", "user value");
+ const recipe = {
+ id: 1,
+ arguments: {
+ slug: "test-rollout",
+ preferences: [{ preferenceName: "test.pref", value: "rollout value" }],
+ },
+ };
+
+ const action = new PreferenceRolloutAction();
+ await action.processRecipe(recipe, BaseAction.suitability.FILTER_MATCH);
+ await action.finalize();
+ is(action.lastError, null, "lastError should be null");
+
+ is(
+ Services.prefs.getCharPref("test.pref"),
+ "user value",
+ "user branch value should be preserved"
+ );
+ is(
+ Services.prefs.getDefaultBranch("").getCharPref("test.pref"),
+ "rollout value",
+ "default branch value should change"
+ );
+
+ const rollouts = await PreferenceRollouts.getAll();
+ Assert.deepEqual(
+ rollouts,
+ [
+ {
+ slug: "test-rollout",
+ state: PreferenceRollouts.STATE_ACTIVE,
+ preferences: [
+ {
+ preferenceName: "test.pref",
+ value: "rollout value",
+ previousValue: "builtin value",
+ },
+ ],
+ enrollmentId: rollouts[0].enrollmentId,
+ },
+ ],
+ "the rollout is added to the db with the correct previous value"
+ );
+
+ // Cleanup
+ Services.prefs.getDefaultBranch("").deleteBranch("test.pref");
+ Services.prefs.deleteBranch("test.pref");
+ }
+);
+
+// Enrollment works for prefs with only a user branch value, and no default value.
+decorate_task(
+ PreferenceRollouts.withTestMock(),
+ async function simple_recipe_enrollment() {
+ const recipe = {
+ id: 1,
+ arguments: {
+ slug: "test-rollout",
+ preferences: [{ preferenceName: "test.pref", value: 1 }],
+ },
+ };
+
+ // Set a pref on the user branch only
+ Services.prefs.setIntPref("test.pref", 2);
+
+ const action = new PreferenceRolloutAction();
+ await action.processRecipe(recipe, BaseAction.suitability.FILTER_MATCH);
+ await action.finalize();
+ is(action.lastError, null, "lastError should be null");
+
+ is(
+ Services.prefs.getIntPref("test.pref"),
+ 2,
+ "original user branch value still visible"
+ );
+ is(
+ Services.prefs.getDefaultBranch("").getIntPref("test.pref"),
+ 1,
+ "default branch was set"
+ );
+ is(
+ Services.prefs.getIntPref("app.normandy.startupRolloutPrefs.test.pref"),
+ 1,
+ "startup pref is est"
+ );
+
+ // Cleanup
+ Services.prefs.getDefaultBranch("").deleteBranch("test.pref");
+ }
+);
+
+// When running a rollout a second time on a pref that doesn't have an existing
+// value, the previous value is handled correctly.
+decorate_task(
+ PreferenceRollouts.withTestMock(),
+ withSendEventSpy(),
+ async function ({ sendEventSpy }) {
+ const recipe = {
+ id: 1,
+ arguments: {
+ slug: "test-rollout",
+ preferences: [{ preferenceName: "test.pref", value: 1 }],
+ },
+ };
+
+ // run once
+ let action = new PreferenceRolloutAction();
+ await action.processRecipe(recipe, BaseAction.suitability.FILTER_MATCH);
+ await action.finalize();
+ is(action.lastError, null, "lastError should be null");
+
+ // run a second time
+ action = new PreferenceRolloutAction();
+ await action.processRecipe(recipe, BaseAction.suitability.FILTER_MATCH);
+ await action.finalize();
+ is(action.lastError, null, "lastError should be null");
+
+ const rollouts = await PreferenceRollouts.getAll();
+
+ Assert.deepEqual(
+ rollouts,
+ [
+ {
+ slug: "test-rollout",
+ state: PreferenceRollouts.STATE_ACTIVE,
+ preferences: [
+ { preferenceName: "test.pref", value: 1, previousValue: null },
+ ],
+ enrollmentId: rollouts[0].enrollmentId,
+ },
+ ],
+ "the DB should have the correct value stored for previousValue"
+ );
+
+ sendEventSpy.assertEvents([
+ [
+ "enroll",
+ "preference_rollout",
+ "test-rollout",
+ { enrollmentId: rollouts[0].enrollmentId },
+ ],
+ ]);
+ }
+);
+
+// New rollouts that are no-ops should send errors
+decorate_task(
+ withStub(TelemetryEnvironment, "setExperimentActive"),
+ withSendEventSpy(),
+ PreferenceRollouts.withTestMock(),
+ async function no_op_new_recipe({ setExperimentActiveStub, sendEventSpy }) {
+ Services.prefs.getDefaultBranch("").setIntPref("test.pref", 1);
+
+ const recipe = {
+ id: 1,
+ arguments: {
+ slug: "test-rollout",
+ preferences: [{ preferenceName: "test.pref", value: 1 }],
+ },
+ };
+
+ const action = new PreferenceRolloutAction();
+ await action.processRecipe(recipe, BaseAction.suitability.FILTER_MATCH);
+ await action.finalize();
+ is(action.lastError, null, "lastError should be null");
+
+ is(Services.prefs.getIntPref("test.pref"), 1, "pref should not change");
+
+ // start up pref isn't set
+ is(
+ Services.prefs.getPrefType("app.normandy.startupRolloutPrefs.test.pref"),
+ Services.prefs.PREF_INVALID,
+ "startup pref1 should not be set"
+ );
+
+ // rollout was not stored
+ Assert.deepEqual(
+ await PreferenceRollouts.getAll(),
+ [],
+ "Rollout should not be stored in db"
+ );
+
+ sendEventSpy.assertEvents([
+ [
+ "enrollFailed",
+ "preference_rollout",
+ recipe.arguments.slug,
+ { reason: "would-be-no-op" },
+ ],
+ ]);
+ Assert.deepEqual(
+ setExperimentActiveStub.args,
+ [],
+ "a telemetry experiment should not be activated"
+ );
+
+ // Cleanup
+ Services.prefs.getDefaultBranch("").deleteBranch("test.pref");
+ }
+);
+
+// New rollouts in the graduation set should silently do nothing
+decorate_task(
+ withStub(TelemetryEnvironment, "setExperimentActive"),
+ withSendEventSpy(),
+ PreferenceRollouts.withTestMock({ graduationSet: new Set(["test-rollout"]) }),
+ async function graduationSetNewRecipe({
+ setExperimentActiveStub,
+ sendEventSpy,
+ }) {
+ Services.prefs.getDefaultBranch("").setIntPref("test.pref", 1);
+
+ const recipe = {
+ id: 1,
+ arguments: {
+ slug: "test-rollout",
+ preferences: [{ preferenceName: "test.pref", value: 1 }],
+ },
+ };
+
+ const action = new PreferenceRolloutAction();
+ await action.processRecipe(recipe, BaseAction.suitability.FILTER_MATCH);
+ await action.finalize();
+ is(action.lastError, null, "lastError should be null");
+
+ is(Services.prefs.getIntPref("test.pref"), 1, "pref should not change");
+
+ // start up pref isn't set
+ is(
+ Services.prefs.getPrefType("app.normandy.startupRolloutPrefs.test.pref"),
+ Services.prefs.PREF_INVALID,
+ "startup pref1 should not be set"
+ );
+
+ // rollout was not stored
+ Assert.deepEqual(
+ await PreferenceRollouts.getAll(),
+ [],
+ "Rollout should not be stored in db"
+ );
+
+ sendEventSpy.assertEvents([]);
+ Assert.deepEqual(
+ setExperimentActiveStub.args,
+ [],
+ "a telemetry experiment should not be activated"
+ );
+
+ // Cleanup
+ Services.prefs.getDefaultBranch("").deleteBranch("test.pref");
+ }
+);
diff --git a/toolkit/components/normandy/test/browser/browser_actions_ShowHeartbeatAction.js b/toolkit/components/normandy/test/browser/browser_actions_ShowHeartbeatAction.js
new file mode 100644
index 0000000000..393f31b5ae
--- /dev/null
+++ b/toolkit/components/normandy/test/browser/browser_actions_ShowHeartbeatAction.js
@@ -0,0 +1,377 @@
+"use strict";
+
+const { BaseAction } = ChromeUtils.importESModule(
+ "resource://normandy/actions/BaseAction.sys.mjs"
+);
+const { ClientEnvironment } = ChromeUtils.importESModule(
+ "resource://normandy/lib/ClientEnvironment.sys.mjs"
+);
+const { Heartbeat } = ChromeUtils.importESModule(
+ "resource://normandy/lib/Heartbeat.sys.mjs"
+);
+
+const { Uptake } = ChromeUtils.importESModule(
+ "resource://normandy/lib/Uptake.sys.mjs"
+);
+const { NormandyTestUtils } = ChromeUtils.importESModule(
+ "resource://testing-common/NormandyTestUtils.sys.mjs"
+);
+
+const HOUR_IN_MS = 60 * 60 * 1000;
+
+function heartbeatRecipeFactory(overrides = {}) {
+ const defaults = {
+ revision_id: 1,
+ name: "Test Recipe",
+ action: "show-heartbeat",
+ arguments: {
+ surveyId: "a survey",
+ message: "test message",
+ engagementButtonLabel: "",
+ thanksMessage: "thanks!",
+ postAnswerUrl: "http://example.com",
+ learnMoreMessage: "Learn More",
+ learnMoreUrl: "http://example.com",
+ repeatOption: "once",
+ },
+ };
+
+ if (overrides.arguments) {
+ defaults.arguments = Object.assign(defaults.arguments, overrides.arguments);
+ delete overrides.arguments;
+ }
+
+ return recipeFactory(Object.assign(defaults, overrides));
+}
+
+// Test that a normal heartbeat works as expected
+decorate_task(
+ withStubbedHeartbeat(),
+ withClearStorage(),
+ async function testHappyPath({ heartbeatClassStub, heartbeatInstanceStub }) {
+ const recipe = heartbeatRecipeFactory();
+ const action = new ShowHeartbeatAction();
+ await action.processRecipe(recipe, BaseAction.suitability.FILTER_MATCH);
+ await action.finalize();
+ is(
+ action.state,
+ ShowHeartbeatAction.STATE_FINALIZED,
+ "Action should be finalized"
+ );
+ is(action.lastError, null, "No errors should have been thrown");
+
+ const options = heartbeatClassStub.args[0][1];
+ Assert.deepEqual(
+ heartbeatClassStub.args,
+ [
+ [
+ heartbeatClassStub.args[0][0], // target window
+ {
+ surveyId: options.surveyId,
+ message: recipe.arguments.message,
+ engagementButtonLabel: recipe.arguments.engagementButtonLabel,
+ thanksMessage: recipe.arguments.thanksMessage,
+ learnMoreMessage: recipe.arguments.learnMoreMessage,
+ learnMoreUrl: recipe.arguments.learnMoreUrl,
+ postAnswerUrl: options.postAnswerUrl,
+ flowId: options.flowId,
+ surveyVersion: recipe.revision_id,
+ },
+ ],
+ ],
+ "expected arguments were passed"
+ );
+
+ ok(NormandyTestUtils.isUuid(options.flowId, "flowId should be a uuid"));
+
+ // postAnswerUrl gains several query string parameters. Check that the prefix is right
+ ok(options.postAnswerUrl.startsWith(recipe.arguments.postAnswerUrl));
+
+ ok(
+ heartbeatInstanceStub.eventEmitter.once.calledWith("Voted"),
+ "Voted event handler should be registered"
+ );
+ ok(
+ heartbeatInstanceStub.eventEmitter.once.calledWith("Engaged"),
+ "Engaged event handler should be registered"
+ );
+ }
+);
+
+/* Test that heartbeat doesn't show if an unrelated heartbeat has shown recently. */
+decorate_task(
+ withStubbedHeartbeat(),
+ withClearStorage(),
+ async function testRepeatGeneral({ heartbeatClassStub }) {
+ const allHeartbeatStorage = new Storage("normandy-heartbeat");
+ await allHeartbeatStorage.setItem("lastShown", Date.now());
+ const recipe = heartbeatRecipeFactory();
+
+ const action = new ShowHeartbeatAction();
+ await action.processRecipe(recipe, BaseAction.suitability.FILTER_MATCH);
+ is(action.lastError, null, "No errors should have been thrown");
+
+ is(
+ heartbeatClassStub.args.length,
+ 0,
+ "Heartbeat should not be called once"
+ );
+ }
+);
+
+/* Test that a heartbeat shows if an unrelated heartbeat showed more than 24 hours ago. */
+decorate_task(
+ withStubbedHeartbeat(),
+ withClearStorage(),
+ async function testRepeatUnrelated({ heartbeatClassStub }) {
+ const allHeartbeatStorage = new Storage("normandy-heartbeat");
+ await allHeartbeatStorage.setItem(
+ "lastShown",
+ Date.now() - 25 * HOUR_IN_MS
+ );
+ const recipe = heartbeatRecipeFactory();
+
+ const action = new ShowHeartbeatAction();
+ await action.processRecipe(recipe, BaseAction.suitability.FILTER_MATCH);
+ is(action.lastError, null, "No errors should have been thrown");
+
+ is(heartbeatClassStub.args.length, 1, "Heartbeat should be called once");
+ }
+);
+
+/* Test that a repeat=once recipe is not shown again, even more than 24 hours ago. */
+decorate_task(
+ withStubbedHeartbeat(),
+ withClearStorage(),
+ async function testRepeatTypeOnce({ heartbeatClassStub }) {
+ const recipe = heartbeatRecipeFactory({
+ arguments: { repeatOption: "once" },
+ });
+ const recipeStorage = new Storage(recipe.id);
+ await recipeStorage.setItem("lastShown", Date.now() - 25 * HOUR_IN_MS);
+
+ const action = new ShowHeartbeatAction();
+ await action.processRecipe(recipe, BaseAction.suitability.FILTER_MATCH);
+ is(action.lastError, null, "No errors should have been thrown");
+
+ is(heartbeatClassStub.args.length, 0, "Heartbeat should not be called");
+ }
+);
+
+/* Test that a repeat=xdays recipe is shown again, only after the expected number of days. */
+decorate_task(
+ withStubbedHeartbeat(),
+ withClearStorage(),
+ async function testRepeatTypeXdays({ heartbeatClassStub }) {
+ const recipe = heartbeatRecipeFactory({
+ arguments: {
+ repeatOption: "xdays",
+ repeatEvery: 2,
+ },
+ });
+ const recipeStorage = new Storage(recipe.id);
+ const allHeartbeatStorage = new Storage("normandy-heartbeat");
+
+ await recipeStorage.setItem("lastShown", Date.now() - 25 * HOUR_IN_MS);
+ await allHeartbeatStorage.setItem(
+ "lastShown",
+ Date.now() - 25 * HOUR_IN_MS
+ );
+ const action = new ShowHeartbeatAction();
+ await action.processRecipe(recipe, BaseAction.suitability.FILTER_MATCH);
+ is(action.lastError, null, "No errors should have been thrown");
+ is(heartbeatClassStub.args.length, 0, "Heartbeat should not be called");
+
+ await recipeStorage.setItem("lastShown", Date.now() - 50 * HOUR_IN_MS);
+ await allHeartbeatStorage.setItem(
+ "lastShown",
+ Date.now() - 50 * HOUR_IN_MS
+ );
+ await action.processRecipe(recipe, BaseAction.suitability.FILTER_MATCH);
+ is(action.lastError, null, "No errors should have been thrown");
+ is(
+ heartbeatClassStub.args.length,
+ 1,
+ "Heartbeat should have been called once"
+ );
+ }
+);
+
+/* Test that a repeat=nag recipe is shown again until lastInteraction is set */
+decorate_task(
+ withStubbedHeartbeat(),
+ withClearStorage(),
+ async function testRepeatTypeNag({ heartbeatClassStub }) {
+ const recipe = heartbeatRecipeFactory({
+ arguments: { repeatOption: "nag" },
+ });
+ const recipeStorage = new Storage(recipe.id);
+ const allHeartbeatStorage = new Storage("normandy-heartbeat");
+
+ await allHeartbeatStorage.setItem(
+ "lastShown",
+ Date.now() - 25 * HOUR_IN_MS
+ );
+ await recipeStorage.setItem("lastShown", Date.now() - 25 * HOUR_IN_MS);
+ const action = new ShowHeartbeatAction();
+ await action.processRecipe(recipe, BaseAction.suitability.FILTER_MATCH);
+ is(action.lastError, null, "No errors should have been thrown");
+ is(heartbeatClassStub.args.length, 1, "Heartbeat should be called");
+
+ await allHeartbeatStorage.setItem(
+ "lastShown",
+ Date.now() - 50 * HOUR_IN_MS
+ );
+ await recipeStorage.setItem("lastShown", Date.now() - 50 * HOUR_IN_MS);
+ await action.processRecipe(recipe, BaseAction.suitability.FILTER_MATCH);
+ is(action.lastError, null, "No errors should have been thrown");
+ is(heartbeatClassStub.args.length, 2, "Heartbeat should be called again");
+
+ await allHeartbeatStorage.setItem(
+ "lastShown",
+ Date.now() - 75 * HOUR_IN_MS
+ );
+ await recipeStorage.setItem("lastShown", Date.now() - 75 * HOUR_IN_MS);
+ await recipeStorage.setItem(
+ "lastInteraction",
+ Date.now() - 50 * HOUR_IN_MS
+ );
+ await action.processRecipe(recipe, BaseAction.suitability.FILTER_MATCH);
+ is(action.lastError, null, "No errors should have been thrown");
+ is(
+ heartbeatClassStub.args.length,
+ 2,
+ "Heartbeat should not be called again"
+ );
+ }
+);
+
+/* generatePostAnswerURL shouldn't annotate empty strings */
+add_task(async function postAnswerEmptyString() {
+ const recipe = heartbeatRecipeFactory({ arguments: { postAnswerUrl: "" } });
+ const action = new ShowHeartbeatAction();
+ is(
+ await action.generatePostAnswerURL(recipe),
+ "",
+ "an empty string should not be annotated"
+ );
+});
+
+/* generatePostAnswerURL should include the right details */
+add_task(async function postAnswerUrl() {
+ const recipe = heartbeatRecipeFactory({
+ arguments: {
+ postAnswerUrl: "https://example.com/survey?survey_id=42",
+ includeTelemetryUUID: false,
+ message: "Hello, World!",
+ },
+ });
+ const action = new ShowHeartbeatAction();
+ const url = new URL(await action.generatePostAnswerURL(recipe));
+
+ is(
+ url.searchParams.get("survey_id"),
+ "42",
+ "Pre-existing search parameters should be preserved"
+ );
+ is(
+ url.searchParams.get("fxVersion"),
+ Services.appinfo.version,
+ "Firefox version should be included"
+ );
+ is(
+ url.searchParams.get("surveyversion"),
+ Services.appinfo.version,
+ "Survey version should also be the Firefox version"
+ );
+ ok(
+ ["0", "1"].includes(url.searchParams.get("syncSetup")),
+ `syncSetup should be 0 or 1, got ${url.searchParams.get("syncSetup")}`
+ );
+ is(
+ url.searchParams.get("updateChannel"),
+ UpdateUtils.getUpdateChannel("false"),
+ "Update channel should be included"
+ );
+ ok(!url.searchParams.has("userId"), "no user id should be included");
+ is(
+ url.searchParams.get("utm_campaign"),
+ "Hello%2CWorld!",
+ "utm_campaign should be an encoded version of the message"
+ );
+ is(
+ url.searchParams.get("utm_medium"),
+ "show-heartbeat",
+ "utm_medium should be the action name"
+ );
+ is(
+ url.searchParams.get("utm_source"),
+ "firefox",
+ "utm_source should be firefox"
+ );
+});
+
+/* generatePostAnswerURL shouldn't override existing values in the url */
+add_task(async function postAnswerUrlNoOverwite() {
+ const recipe = heartbeatRecipeFactory({
+ arguments: {
+ postAnswerUrl:
+ "https://example.com/survey?utm_source=shady_tims_firey_fox",
+ },
+ });
+ const action = new ShowHeartbeatAction();
+ const url = new URL(await action.generatePostAnswerURL(recipe));
+ is(
+ url.searchParams.get("utm_source"),
+ "shady_tims_firey_fox",
+ "utm_source should not be overwritten"
+ );
+});
+
+/* generatePostAnswerURL should only include userId if requested */
+add_task(async function postAnswerUrlUserIdIfRequested() {
+ const recipeWithId = heartbeatRecipeFactory({
+ arguments: { includeTelemetryUUID: true },
+ });
+ const recipeWithoutId = heartbeatRecipeFactory({
+ arguments: { includeTelemetryUUID: false },
+ });
+ const action = new ShowHeartbeatAction();
+
+ const urlWithId = new URL(await action.generatePostAnswerURL(recipeWithId));
+ is(
+ urlWithId.searchParams.get("userId"),
+ ClientEnvironment.userId,
+ "clientId should be included"
+ );
+
+ const urlWithoutId = new URL(
+ await action.generatePostAnswerURL(recipeWithoutId)
+ );
+ ok(!urlWithoutId.searchParams.has("userId"), "userId should not be included");
+});
+
+/* generateSurveyId should include userId only if requested */
+decorate_task(
+ withStubbedHeartbeat(),
+ withClearStorage(),
+ async function testGenerateSurveyId() {
+ const recipeWithoutId = heartbeatRecipeFactory({
+ arguments: { surveyId: "test-id", includeTelemetryUUID: false },
+ });
+ const recipeWithId = heartbeatRecipeFactory({
+ arguments: { surveyId: "test-id", includeTelemetryUUID: true },
+ });
+ const action = new ShowHeartbeatAction();
+ is(
+ action.generateSurveyId(recipeWithoutId),
+ "test-id",
+ "userId should not be included if not requested"
+ );
+ is(
+ action.generateSurveyId(recipeWithId),
+ `test-id::${ClientEnvironment.userId}`,
+ "userId should be included if requested"
+ );
+ }
+);
diff --git a/toolkit/components/normandy/test/browser/head.js b/toolkit/components/normandy/test/browser/head.js
new file mode 100644
index 0000000000..354c38647e
--- /dev/null
+++ b/toolkit/components/normandy/test/browser/head.js
@@ -0,0 +1,642 @@
+const { Preferences } = ChromeUtils.importESModule(
+ "resource://gre/modules/Preferences.sys.mjs"
+);
+const { AddonTestUtils } = ChromeUtils.importESModule(
+ "resource://testing-common/AddonTestUtils.sys.mjs"
+);
+const { AboutPages } = ChromeUtils.importESModule(
+ "resource://normandy-content/AboutPages.sys.mjs"
+);
+const { AddonStudies } = ChromeUtils.importESModule(
+ "resource://normandy/lib/AddonStudies.sys.mjs"
+);
+const { NormandyApi } = ChromeUtils.importESModule(
+ "resource://normandy/lib/NormandyApi.sys.mjs"
+);
+const { TelemetryEvents } = ChromeUtils.importESModule(
+ "resource://normandy/lib/TelemetryEvents.sys.mjs"
+);
+const { ShowHeartbeatAction } = ChromeUtils.importESModule(
+ "resource://normandy/actions/ShowHeartbeatAction.sys.mjs"
+);
+
+// The name of this module conflicts with the window.Storage
+// DOM global - https://developer.mozilla.org/en-US/docs/Web/API/Storage .
+// eslint-disable-next-line mozilla/no-redeclare-with-import-autofix
+const { Storage } = ChromeUtils.importESModule(
+ "resource://normandy/lib/Storage.sys.mjs"
+);
+
+ChromeUtils.defineESModuleGetters(this, {
+ TelemetryTestUtils: "resource://testing-common/TelemetryTestUtils.sys.mjs",
+});
+
+const CryptoHash = Components.Constructor(
+ "@mozilla.org/security/hash;1",
+ "nsICryptoHash",
+ "initWithString"
+);
+const FileInputStream = Components.Constructor(
+ "@mozilla.org/network/file-input-stream;1",
+ "nsIFileInputStream",
+ "init"
+);
+
+const { sinon } = ChromeUtils.importESModule(
+ "resource://testing-common/Sinon.sys.mjs"
+);
+
+// Make sinon assertions fail in a way that mochitest understands
+sinon.assert.fail = function (message) {
+ ok(false, message);
+};
+
+// Prep Telemetry to receive events from tests
+TelemetryEvents.init();
+
+this.TEST_XPI_URL = (function () {
+ const dir = getChromeDir(getResolvedURI(gTestPath));
+ dir.append("addons");
+ dir.append("normandydriver-a-1.0.xpi");
+ return Services.io.newFileURI(dir).spec;
+})();
+
+this.withWebExtension = function (
+ manifestOverrides = {},
+ { as = "webExtension" } = {}
+) {
+ return function wrapper(testFunction) {
+ return async function wrappedTestFunction(args) {
+ const random = Math.random().toString(36).replace(/0./, "").substr(-3);
+ let addonId = `normandydriver_${random}@example.com`;
+ if ("id" in manifestOverrides) {
+ addonId = manifestOverrides.id;
+ delete manifestOverrides.id;
+ }
+
+ const manifest = Object.assign(
+ {
+ manifest_version: 2,
+ name: "normandy_fixture",
+ version: "1.0",
+ description: "Dummy test fixture that's a webextension",
+ browser_specific_settings: {
+ gecko: { id: addonId },
+ },
+ },
+ manifestOverrides
+ );
+
+ const addonFile = AddonTestUtils.createTempWebExtensionFile({ manifest });
+
+ // Workaround: Add-on files are cached by URL, and
+ // createTempWebExtensionFile re-uses filenames if the previous file has
+ // been deleted. So we need to flush the cache to avoid it.
+ Services.obs.notifyObservers(addonFile, "flush-cache-entry");
+
+ try {
+ await testFunction({ ...args, [as]: { addonId, addonFile } });
+ } finally {
+ AddonTestUtils.cleanupTempXPIs();
+ }
+ };
+ };
+};
+
+this.withCorruptedWebExtension = function (options) {
+ // This should be an invalid manifest version, so that installing this add-on fails.
+ return this.withWebExtension({ manifest_version: -1 }, options);
+};
+
+this.withInstalledWebExtension = function (
+ manifestOverrides = {},
+ { expectUninstall = false, as = "installedWebExtension" } = {}
+) {
+ return function wrapper(testFunction) {
+ return decorate(
+ withWebExtension(manifestOverrides, { as }),
+ async function wrappedTestFunction(args) {
+ const { addonId, addonFile } = args[as];
+ const startupPromise =
+ AddonTestUtils.promiseWebExtensionStartup(addonId);
+ const addonInstall = await AddonManager.getInstallForFile(
+ addonFile,
+ "application/x-xpinstall"
+ );
+ await addonInstall.install();
+ await startupPromise;
+
+ try {
+ await testFunction(args);
+ } finally {
+ const addonToUninstall = await AddonManager.getAddonByID(addonId);
+ if (addonToUninstall) {
+ await addonToUninstall.uninstall();
+ } else {
+ ok(
+ expectUninstall,
+ "Add-on should not be unexpectedly uninstalled during test"
+ );
+ }
+ }
+ }
+ );
+ };
+};
+
+this.withMockNormandyApi = function () {
+ return function (testFunction) {
+ return async function inner(args) {
+ const mockNormandyApi = {
+ actions: [],
+ recipes: [],
+ implementations: {},
+ extensionDetails: {},
+ };
+
+ // Use callsFake instead of resolves so that the current values in mockApi are used.
+ mockNormandyApi.fetchExtensionDetails = sinon
+ .stub(NormandyApi, "fetchExtensionDetails")
+ .callsFake(async extensionId => {
+ const details = mockNormandyApi.extensionDetails[extensionId];
+ if (!details) {
+ throw new Error(`Missing extension details for ${extensionId}`);
+ }
+ return details;
+ });
+
+ try {
+ await testFunction({ ...args, mockNormandyApi });
+ } finally {
+ mockNormandyApi.fetchExtensionDetails.restore();
+ }
+ };
+ };
+};
+
+const preferenceBranches = {
+ user: Preferences,
+ default: new Preferences({ defaultBranch: true }),
+};
+
+this.withMockPreferences = function () {
+ return function (testFunction) {
+ return async function inner(args) {
+ const mockPreferences = new MockPreferences();
+ try {
+ await testFunction({ ...args, mockPreferences });
+ } finally {
+ mockPreferences.cleanup();
+ }
+ };
+ };
+};
+
+class MockPreferences {
+ constructor() {
+ this.oldValues = { user: {}, default: {} };
+ }
+
+ set(name, value, branch = "user") {
+ this.preserve(name, branch);
+ preferenceBranches[branch].set(name, value);
+ }
+
+ preserve(name, branch) {
+ if (branch !== "user" && branch !== "default") {
+ throw new Error(`Unexpected branch ${branch}`);
+ }
+ if (!(name in this.oldValues[branch])) {
+ const preferenceBranch = preferenceBranches[branch];
+ let oldValue;
+ let existed;
+ try {
+ oldValue = preferenceBranch.get(name);
+ existed = preferenceBranch.has(name);
+ } catch (e) {
+ oldValue = null;
+ existed = false;
+ }
+ this.oldValues[branch][name] = { oldValue, existed };
+ }
+ }
+
+ cleanup() {
+ for (const [branchName, values] of Object.entries(this.oldValues)) {
+ const preferenceBranch = preferenceBranches[branchName];
+ for (const [name, { oldValue, existed }] of Object.entries(values)) {
+ const before = preferenceBranch.get(name);
+
+ if (before === oldValue) {
+ continue;
+ }
+
+ if (existed) {
+ preferenceBranch.set(name, oldValue);
+ } else if (branchName === "default") {
+ Services.prefs.getDefaultBranch(name).deleteBranch("");
+ } else {
+ preferenceBranch.reset(name);
+ }
+
+ const after = preferenceBranch.get(name);
+ if (before === after && before !== undefined) {
+ throw new Error(
+ `Couldn't reset pref "${name}" to "${oldValue}" on "${branchName}" branch ` +
+ `(value stayed "${before}", did ${existed ? "" : "not "}exist)`
+ );
+ }
+ }
+ }
+ }
+}
+
+this.withPrefEnv = function (inPrefs) {
+ return function wrapper(testFunc) {
+ return async function inner(args) {
+ await SpecialPowers.pushPrefEnv(inPrefs);
+ try {
+ await testFunc(args);
+ } finally {
+ await SpecialPowers.popPrefEnv();
+ }
+ };
+ };
+};
+
+this.withStudiesEnabled = function () {
+ return function (testFunc) {
+ return async function inner(args) {
+ await SpecialPowers.pushPrefEnv({
+ set: [["app.shield.optoutstudies.enabled", true]],
+ });
+ try {
+ await testFunc(args);
+ } finally {
+ await SpecialPowers.popPrefEnv();
+ }
+ };
+ };
+};
+
+/**
+ * Combine a list of functions right to left. The rightmost function is passed
+ * to the preceding function as the argument; the result of this is passed to
+ * the next function until all are exhausted. For example, this:
+ *
+ * decorate(func1, func2, func3);
+ *
+ * is equivalent to this:
+ *
+ * func1(func2(func3));
+ */
+this.decorate = function (...args) {
+ const funcs = Array.from(args);
+ let decorated = funcs.pop();
+ const origName = decorated.name;
+ funcs.reverse();
+ for (const func of funcs) {
+ decorated = func(decorated);
+ }
+ Object.defineProperty(decorated, "name", { value: origName });
+ return decorated;
+};
+
+/**
+ * Wrapper around add_task for declaring tests that use several with-style
+ * wrappers. The last argument should be your test function; all other arguments
+ * should be functions that accept a single test function argument.
+ *
+ * The arguments are combined using decorate and passed to add_task as a single
+ * test function.
+ *
+ * @param {[Function]} args
+ * @example
+ * decorate_task(
+ * withMockPreferences(),
+ * withMockNormandyApi(),
+ * async function myTest(mockPreferences, mockApi) {
+ * // Do a test
+ * }
+ * );
+ */
+this.decorate_task = function (...args) {
+ return add_task(decorate(...args));
+};
+
+this.withStub = function (
+ object,
+ method,
+ { returnValue, as = `${method}Stub` } = {}
+) {
+ return function wrapper(testFunction) {
+ return async function wrappedTestFunction(args) {
+ const stub = sinon.stub(object, method);
+ stub.returnValue = returnValue;
+ try {
+ await testFunction({ ...args, [as]: stub });
+ } finally {
+ stub.restore();
+ }
+ };
+ };
+};
+
+this.withSpy = function (object, method, { as = `${method}Spy` } = {}) {
+ return function wrapper(testFunction) {
+ return async function wrappedTestFunction(args) {
+ const spy = sinon.spy(object, method);
+ try {
+ await testFunction({ ...args, [as]: spy });
+ } finally {
+ spy.restore();
+ }
+ };
+ };
+};
+
+this.studyEndObserved = function (recipeId) {
+ return TestUtils.topicObserved(
+ "shield-study-ended",
+ (subject, endedRecipeId) => Number.parseInt(endedRecipeId) === recipeId
+ );
+};
+
+this.withSendEventSpy = function () {
+ return function (testFunction) {
+ return async function wrappedTestFunction(args) {
+ const sendEventSpy = sinon.spy(TelemetryEvents, "sendEvent");
+ sendEventSpy.assertEvents = expected => {
+ expected = expected.map(event => ["normandy"].concat(event));
+ TelemetryTestUtils.assertEvents(
+ expected,
+ { category: "normandy" },
+ { clear: false }
+ );
+ };
+ Services.telemetry.clearEvents();
+ try {
+ await testFunction({ ...args, sendEventSpy });
+ } finally {
+ sendEventSpy.restore();
+ Assert.ok(!sendEventSpy.threw(), "Telemetry events should not fail");
+ }
+ };
+ };
+};
+
+let _recipeId = 1;
+this.recipeFactory = function (overrides = {}) {
+ return Object.assign(
+ {
+ id: _recipeId++,
+ arguments: overrides.arguments || {},
+ },
+ overrides
+ );
+};
+
+function mockLogger() {
+ const logStub = sinon.stub();
+ logStub.fatal = sinon.stub();
+ logStub.error = sinon.stub();
+ logStub.warn = sinon.stub();
+ logStub.info = sinon.stub();
+ logStub.config = sinon.stub();
+ logStub.debug = sinon.stub();
+ logStub.trace = sinon.stub();
+ return logStub;
+}
+
+this.CryptoUtils = {
+ _getHashStringForCrypto(aCrypto) {
+ // return the two-digit hexadecimal code for a byte
+ let toHexString = charCode => ("0" + charCode.toString(16)).slice(-2);
+
+ // convert the binary hash data to a hex string.
+ let binary = aCrypto.finish(false);
+ let hash = Array.from(binary, c => toHexString(c.charCodeAt(0)));
+ return hash.join("").toLowerCase();
+ },
+
+ /**
+ * Get the computed hash for a given file
+ * @param {nsIFile} file The file to be hashed
+ * @param {string} [algorithm] The hashing algorithm to use
+ */
+ getFileHash(file, algorithm = "sha256") {
+ const crypto = CryptoHash(algorithm);
+ const fis = new FileInputStream(file, -1, -1, false);
+ crypto.updateFromStream(fis, file.fileSize);
+ const hash = this._getHashStringForCrypto(crypto);
+ fis.close();
+ return hash;
+ },
+};
+
+const FIXTURE_ADDON_ID = "normandydriver-a@example.com";
+const FIXTURE_ADDON_BASE_URL =
+ getRootDirectory(gTestPath).replace(
+ "chrome://mochitests/content",
+ "http://example.com"
+ ) + "/addons/";
+
+const FIXTURE_ADDONS = [
+ "normandydriver-a-1.0",
+ "normandydriver-b-1.0",
+ "normandydriver-a-2.0",
+];
+
+// Generate fixture add-on details
+this.FIXTURE_ADDON_DETAILS = {};
+FIXTURE_ADDONS.forEach(addon => {
+ const filename = `${addon}.xpi`;
+ const dir = getChromeDir(getResolvedURI(gTestPath));
+ dir.append("addons");
+ dir.append(filename);
+ const xpiFile = Services.io
+ .newFileURI(dir)
+ .QueryInterface(Ci.nsIFileURL).file;
+
+ FIXTURE_ADDON_DETAILS[addon] = {
+ url: `${FIXTURE_ADDON_BASE_URL}${filename}`,
+ hash: CryptoUtils.getFileHash(xpiFile, "sha256"),
+ };
+});
+
+this.extensionDetailsFactory = function (overrides = {}) {
+ return Object.assign(
+ {
+ id: 1,
+ name: "Normandy Fixture",
+ xpi: FIXTURE_ADDON_DETAILS["normandydriver-a-1.0"].url,
+ extension_id: FIXTURE_ADDON_ID,
+ version: "1.0",
+ hash: FIXTURE_ADDON_DETAILS["normandydriver-a-1.0"].hash,
+ hash_algorithm: "sha256",
+ },
+ overrides
+ );
+};
+
+/**
+ * Utility function to uninstall addons safely. Preventing the issue mentioned
+ * in bug 1485569.
+ *
+ * addon.uninstall is async, but it also triggers the AddonStudies onUninstall
+ * listener, which is not awaited. Wrap it here and trigger a promise once it's
+ * done so we can wait until AddonStudies cleanup is finished.
+ */
+this.safeUninstallAddon = async function (addon) {
+ const activeStudies = (await AddonStudies.getAll()).filter(
+ study => study.active
+ );
+ const matchingStudy = activeStudies.find(study => study.addonId === addon.id);
+
+ let studyEndedPromise;
+ if (matchingStudy) {
+ studyEndedPromise = TestUtils.topicObserved(
+ "shield-study-ended",
+ (subject, message) => {
+ return message === `${matchingStudy.recipeId}`;
+ }
+ );
+ }
+
+ const addonUninstallPromise = addon.uninstall();
+
+ return Promise.all([studyEndedPromise, addonUninstallPromise]);
+};
+
+/**
+ * Test decorator that is a modified version of the withInstalledWebExtension
+ * decorator that safely uninstalls the created addon.
+ */
+this.withInstalledWebExtensionSafe = function (
+ manifestOverrides = {},
+ { as = "installedWebExtensionSafe" } = {}
+) {
+ return testFunction => {
+ return async function wrappedTestFunction(args) {
+ const decorated = withInstalledWebExtension(manifestOverrides, {
+ expectUninstall: true,
+ as,
+ })(async ({ [as]: { addonId, addonFile } }) => {
+ try {
+ await testFunction({ ...args, [as]: { addonId, addonFile } });
+ } finally {
+ let addon = await AddonManager.getAddonByID(addonId);
+ if (addon) {
+ await safeUninstallAddon(addon);
+ addon = await AddonManager.getAddonByID(addonId);
+ ok(!addon, "add-on should be uninstalled");
+ }
+ }
+ });
+ await decorated();
+ };
+ };
+};
+
+/**
+ * Test decorator to provide a web extension installed from a URL.
+ */
+this.withInstalledWebExtensionFromURL = function (
+ url,
+ { as = "installedWebExtension" } = {}
+) {
+ return function wrapper(testFunction) {
+ return async function wrappedTestFunction(args) {
+ let startupPromise;
+ let addonId;
+
+ const install = await AddonManager.getInstallForURL(url);
+ const listener = {
+ onInstallStarted(cbInstall) {
+ addonId = cbInstall.addon.id;
+ startupPromise = AddonTestUtils.promiseWebExtensionStartup(addonId);
+ },
+ };
+ install.addListener(listener);
+
+ await install.install();
+ await startupPromise;
+
+ try {
+ await testFunction({ ...args, [as]: { addonId, url } });
+ } finally {
+ const addonToUninstall = await AddonManager.getAddonByID(addonId);
+ await safeUninstallAddon(addonToUninstall);
+ }
+ };
+ };
+};
+
+/**
+ * Test decorator that checks that the test cleans up all add-ons installed
+ * during the test. Likely needs to be the first decorator used.
+ */
+this.ensureAddonCleanup = function () {
+ return function (testFunction) {
+ return async function wrappedTestFunction(args) {
+ const beforeAddons = new Set(await AddonManager.getAllAddons());
+
+ try {
+ await testFunction(args);
+ } finally {
+ const afterAddons = new Set(await AddonManager.getAllAddons());
+ Assert.deepEqual(
+ beforeAddons,
+ afterAddons,
+ "The add-ons should be same before and after the test"
+ );
+ }
+ };
+ };
+};
+
+class MockHeartbeat {
+ constructor() {
+ this.eventEmitter = new MockEventEmitter();
+ }
+}
+
+class MockEventEmitter {
+ constructor() {
+ this.once = sinon.stub();
+ }
+}
+
+function withStubbedHeartbeat() {
+ return function (testFunction) {
+ return async function wrappedTestFunction(args) {
+ const heartbeatInstanceStub = new MockHeartbeat();
+ const heartbeatClassStub = sinon.stub();
+ heartbeatClassStub.returns(heartbeatInstanceStub);
+ ShowHeartbeatAction.overrideHeartbeatForTests(heartbeatClassStub);
+
+ try {
+ await testFunction({
+ ...args,
+ heartbeatClassStub,
+ heartbeatInstanceStub,
+ });
+ } finally {
+ ShowHeartbeatAction.overrideHeartbeatForTests();
+ }
+ };
+ };
+}
+
+function withClearStorage() {
+ return function (testFunction) {
+ return async function wrappedTestFunction(args) {
+ Storage.clearAllStorage();
+ try {
+ await testFunction(args);
+ } finally {
+ Storage.clearAllStorage();
+ }
+ };
+ };
+}
diff --git a/toolkit/components/normandy/test/browser/moz.build b/toolkit/components/normandy/test/browser/moz.build
new file mode 100644
index 0000000000..a6fcd8c09a
--- /dev/null
+++ b/toolkit/components/normandy/test/browser/moz.build
@@ -0,0 +1,27 @@
+# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
+# vim: set filetype=python:
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+BROWSER_CHROME_MANIFESTS += [
+ "browser.ini",
+]
+
+addons = [
+ "normandydriver-a-1.0",
+ "normandydriver-b-1.0",
+ "normandydriver-a-2.0",
+]
+
+output_dir = (
+ OBJDIR_FILES._tests.testing.mochitest.browser.toolkit.components.normandy.test.browser.addons
+)
+
+for addon in addons:
+ indir = "addons/%s" % addon
+ xpi = "%s.xpi" % indir
+
+ GeneratedFile(xpi, script="../create_xpi.py", inputs=[indir])
+
+ output_dir += ["!%s" % xpi]
diff --git a/toolkit/components/normandy/test/create_xpi.py b/toolkit/components/normandy/test/create_xpi.py
new file mode 100644
index 0000000000..a34f25a2ed
--- /dev/null
+++ b/toolkit/components/normandy/test/create_xpi.py
@@ -0,0 +1,12 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+from os.path import abspath
+
+from mozbuild.action.zip import main as create_zip
+
+
+def main(output, input_dir):
+ output.close()
+ return create_zip(["-C", input_dir, abspath(output.name), "**"])
diff --git a/toolkit/components/normandy/test/unit/cookie_server.sjs b/toolkit/components/normandy/test/unit/cookie_server.sjs
new file mode 100644
index 0000000000..ab6099f6a4
--- /dev/null
+++ b/toolkit/components/normandy/test/unit/cookie_server.sjs
@@ -0,0 +1,12 @@
+/**
+ * Sends responses that sets a cookie.
+ */
+function handleRequest(request, response) {
+ // Allow cross-origin, so you can XHR to it!
+ response.setHeader("Access-Control-Allow-Origin", "*", false);
+ // Avoid confusing cache behaviors
+ response.setHeader("Cache-Control", "no-cache", false);
+ // Set a cookie
+ response.setHeader("Set-Cookie", "type=chocolate-chip", false);
+ response.write("");
+}
diff --git a/toolkit/components/normandy/test/unit/echo_server.sjs b/toolkit/components/normandy/test/unit/echo_server.sjs
new file mode 100644
index 0000000000..012f2b406e
--- /dev/null
+++ b/toolkit/components/normandy/test/unit/echo_server.sjs
@@ -0,0 +1,21 @@
+/**
+ * Reads an HTTP status code and response body from the querystring and sends
+ * back a matching response.
+ */
+function handleRequest(request, response) {
+ // Allow cross-origin, so you can XHR to it!
+ response.setHeader("Access-Control-Allow-Origin", "*", false);
+ // Avoid confusing cache behaviors
+ response.setHeader("Cache-Control", "no-cache", false);
+
+ const params = request.queryString.split("&");
+ for (const param of params) {
+ const [key, value] = param.split("=");
+ if (key === "status") {
+ response.setStatusLine(null, value);
+ } else if (key === "body") {
+ response.write(value);
+ }
+ }
+ response.write("");
+}
diff --git a/toolkit/components/normandy/test/unit/head_xpc.js b/toolkit/components/normandy/test/unit/head_xpc.js
new file mode 100644
index 0000000000..ad2192be4b
--- /dev/null
+++ b/toolkit/components/normandy/test/unit/head_xpc.js
@@ -0,0 +1,5 @@
+"use strict";
+
+var { sinon } = ChromeUtils.importESModule(
+ "resource://testing-common/Sinon.sys.mjs"
+);
diff --git a/toolkit/components/normandy/test/unit/invalid_recipe_signature_api/api/v1/index.json b/toolkit/components/normandy/test/unit/invalid_recipe_signature_api/api/v1/index.json
new file mode 100644
index 0000000000..5bef8d1302
--- /dev/null
+++ b/toolkit/components/normandy/test/unit/invalid_recipe_signature_api/api/v1/index.json
@@ -0,0 +1,4 @@
+{
+ "recipe-signed": "/api/v1/recipe/signed/",
+ "classify-client": "/api/v1/classify_client/"
+}
diff --git a/toolkit/components/normandy/test/unit/invalid_recipe_signature_api/api/v1/recipe/signed/index.json b/toolkit/components/normandy/test/unit/invalid_recipe_signature_api/api/v1/recipe/signed/index.json
new file mode 100644
index 0000000000..d5495fa87f
--- /dev/null
+++ b/toolkit/components/normandy/test/unit/invalid_recipe_signature_api/api/v1/recipe/signed/index.json
@@ -0,0 +1,24 @@
+[
+ {
+ "recipe": {
+ "action": "console-log",
+ "arguments": { "message": "this signature does not match this recipe" },
+ "channels": [],
+ "countries": [],
+ "enabled": true,
+ "extra_filter_expression": "true || true",
+ "filter_expression": "true || true",
+ "id": 1,
+ "last_updated": "2017-02-17T18:29:09.839239Z",
+ "locales": [],
+ "name": "system-addon-test",
+ "revision_id": "b2cb8a26e132182d7d02cf50695d2c7f06cf3b954ff2ff63bca49d724ee91950"
+ },
+ "signature": {
+ "public_key": "MHYwEAYHKoZIzj0CAQYFK4EEACIDYgAEVEKiCAIkwRg1VFsP8JOYdSF6a3qvgbRPoEK9eTuLbrB6QixozscKR4iWJ8ZOOX6RPCRgFdfVDoZqjFBFNJN9QtRBk0mVtHbnErx64d2vMF0oWencS1hyLW2whgOgOz7p",
+ "signature": "p4g3eurmPsJK5UcGT97BRyKstpwZ_2mNJkDGpd6QXlkXfvgwprjeyb5yeIEkKUXqc6krWid4obB_OP9-CwOi9tvKY1pV8p98CT5BhF0IVgpF3b7KBW1a0BVdg5owoG5W",
+ "timestamp": "2017-02-17T18:29:09.847614Z",
+ "x5u": "/normandy.content-signature.mozilla.org-20210705.dev.chain"
+ }
+ }
+]
diff --git a/toolkit/components/normandy/test/unit/invalid_recipe_signature_api/normandy.content-signature.mozilla.org-20210705.dev.chain b/toolkit/components/normandy/test/unit/invalid_recipe_signature_api/normandy.content-signature.mozilla.org-20210705.dev.chain
new file mode 100644
index 0000000000..5bf53787d8
--- /dev/null
+++ b/toolkit/components/normandy/test/unit/invalid_recipe_signature_api/normandy.content-signature.mozilla.org-20210705.dev.chain
@@ -0,0 +1,123 @@
+-----BEGIN CERTIFICATE-----
+MIIGRTCCBC2gAwIBAgIEAQAABTANBgkqhkiG9w0BAQwFADBrMQswCQYDVQQGEwJV
+UzEQMA4GA1UEChMHQWxsaXpvbTEXMBUGA1UECxMOQ2xvdWQgU2VydmljZXMxMTAv
+BgNVBAMTKERldnppbGxhIFNpZ25pbmcgU2VydmljZXMgSW50ZXJtZWRpYXRlIDEw
+HhcNMTYwNzA2MjE1NzE1WhcNMjEwNzA1MjE1NzE1WjCBrzELMAkGA1UEBhMCVVMx
+EzARBgNVBAgTCkNhbGlmb3JuaWExHDAaBgNVBAoTE01vemlsbGEgQ29ycG9yYXRp
+b24xFzAVBgNVBAsTDkNsb3VkIFNlcnZpY2VzMS8wLQYDVQQDEyZub3JtYW5keS5j
+b250ZW50LXNpZ25hdHVyZS5tb3ppbGxhLm9yZzEjMCEGCSqGSIb3DQEJARYUc2Vj
+dXJpdHlAbW96aWxsYS5vcmcwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAARUQqIIAiTB
+GDVUWw/wk5h1IXpreq+BtE+gQr15O4tusHpCLGjOxwpHiJYnxk45fpE8JGAV19UO
+hmqMUEU0k31C1EGTSZW0ducSvHrh3a8wXShZ6dxLWHItbbCGA6A7PumjggJYMIIC
+VDAdBgNVHQ4EFgQUVfksSjlZ0i1TBiS1vcoObaMeXn0wge8GA1UdIwSB5zCB5IAU
+/YboUIXAovChEpudDBuodHKbjUuhgcWkgcIwgb8xCzAJBgNVBAYTAlVTMQswCQYD
+VQQIEwJDQTEWMBQGA1UEBxMNTW91bnRhaW4gVmlldzEmMCQGA1UEChMdQ29udGVu
+dCBTaWduYXR1cmUgRGV2IFNpZ25pbmcxJjAkBgNVBAMTHWRldi5jb250ZW50LXNp
+Z25hdHVyZS5yb290LmNhMTswOQYJKoZIhvcNAQkBFixjbG91ZHNlYytkZXZyb290
+Y29udGVudHNpZ25hdHVyZUBtb3ppbGxhLmNvbYIEAQAABDAMBgNVHRMBAf8EAjAA
+MA4GA1UdDwEB/wQEAwIHgDAWBgNVHSUBAf8EDDAKBggrBgEFBQcDAzBEBgNVHR8E
+PTA7MDmgN6A1hjNodHRwczovL2NvbnRlbnQtc2lnbmF0dXJlLmRldi5tb3phd3Mu
+bmV0L2NhL2NybC5wZW0wQgYJYIZIAYb4QgEEBDUWM2h0dHBzOi8vY29udGVudC1z
+aWduYXR1cmUuZGV2Lm1vemF3cy5uZXQvY2EvY3JsLnBlbTBOBggrBgEFBQcBAQRC
+MEAwPgYIKwYBBQUHMAKGMmh0dHBzOi8vY29udGVudC1zaWduYXR1cmUuZGV2Lm1v
+emF3cy5uZXQvY2EvY2EucGVtMDEGA1UdEQQqMCiCJm5vcm1hbmR5LmNvbnRlbnQt
+c2lnbmF0dXJlLm1vemlsbGEub3JnMA0GCSqGSIb3DQEBDAUAA4ICAQCwb+8JTAB7
+ZfQmFqPUIV2cQQv696AaDPQCtA9YS4zmUfcLMvfZVAbK397zFr0RMDdLiTUQDoeq
+rBEmPXhJRPiv6JAK4n7Jf6Y6XfXcNxx+q3garR09Vm/0CnEq/iV+ZAtPkoKIO9kr
+Nkzecd894yQCF4hIuPQ5qtMySeqJmH3Dp13eq4T0Oew1Bu32rNHuBJh2xYBkWdun
+aAw/YX0I5EqZBP/XA6gbiA160tTK+hnpnlMtw/ljkvfhHbWpICD4aSiTL8L3vABQ
+j7bqjMKR5xDkuGWshZfcmonpvQhGTye/RZ1vz5IzA3VOJt1mz5bdZlitpaOm/Yv0
+x6aODz8GP/PiRWFQ5CW8Uf/7pGc5rSyvnfZV2ix8EzFlo8cUtuN1fjrPFPOFOLvG
+iiB6S9nlXiKBGYIDdd8V8iC5xJpzjiAWJQigwSNzuc2K30+iPo3w0zazkwe5V8jW
+gj6gItYxh5xwVQTPHD0EOd9HvV1ou42+rH5Y+ISFUm25zz02UtUHEK0BKtL0lmdt
+DwVq5jcHn6bx2/iwUtlKvPXtfM/6JjTJlkLZLtS7U5/pwcS0owo9zAL0qg3bdm16
++v/olmPqQFLUHmamJTzv3rojj5X/uVdx1HMM3wBjV9tRYoYaZw9RIInRmM8Z1pHv
+JJ+CIZgCyd5vgp57BKiodRZcgHoCH+BkOQ==
+-----END CERTIFICATE-----
+-----BEGIN CERTIFICATE-----
+MIIHijCCBXKgAwIBAgIEAQAABDANBgkqhkiG9w0BAQwFADCBvzELMAkGA1UEBhMC
+VVMxCzAJBgNVBAgTAkNBMRYwFAYDVQQHEw1Nb3VudGFpbiBWaWV3MSYwJAYDVQQK
+Ex1Db250ZW50IFNpZ25hdHVyZSBEZXYgU2lnbmluZzEmMCQGA1UEAxMdZGV2LmNv
+bnRlbnQtc2lnbmF0dXJlLnJvb3QuY2ExOzA5BgkqhkiG9w0BCQEWLGNsb3Vkc2Vj
+K2RldnJvb3Rjb250ZW50c2lnbmF0dXJlQG1vemlsbGEuY29tMB4XDTE2MDcwNjIx
+NDkyNloXDTIxMDcwNTIxNDkyNlowazELMAkGA1UEBhMCVVMxEDAOBgNVBAoTB0Fs
+bGl6b20xFzAVBgNVBAsTDkNsb3VkIFNlcnZpY2VzMTEwLwYDVQQDEyhEZXZ6aWxs
+YSBTaWduaW5nIFNlcnZpY2VzIEludGVybWVkaWF0ZSAxMIICIjANBgkqhkiG9w0B
+AQEFAAOCAg8AMIICCgKCAgEAypIfUURYgWzGw8G/1Pz9zW+Tsjirx2owThiv2gys
+wJiWL/9/2gzKOrYDEqlDUudfA/BjVRtT9+NbYgnhaCkNfADOAacWS83aMhedAqhP
+bVd5YhGQdpijI7f1AVTSb0ehrU2nhOZHvHX5Tk2fbRx3ryefIazNTLFGpiMBbsNv
+tSI/+fjW8s0MhKNqlLnk6a9mZKo0mEy7HjGYV8nzsgI17rKLx/s2HG4TFG0+JQzc
+UGlum3Tg58ritDzWdyKIkmKNZ48oLBX99Qc8j8B1UyiLv6TZmjVX0I+Ds7eSWHZk
+0axLEpTyf2r7fHvN4iDNCPajw+ZpuuBfbs80Ha8b8MHvnpf9fbwiirodNQOVpY4c
+t5E3Us3eYwBKdqDEbECWxCKGAS2/iVVUCNKHsg0sSxgqcwxrxyrddQRUQ0EM38DZ
+F/vHt+vTdHt07kezbjJe0Kklel59uSpghA0iL4vxzbTns1fuwYOgVrNGs3acTkiB
+GhFOxRXUPGtpdYmv+AaR9WlWJQY1GIEoVrinPVH7bcCwyh1CcUbHL+oAFTcmc6kZ
+7azNg21tWILIRL7R0IZYQm0tF5TTwCsjVC7FuHaBtkxtVrrZqeKjvVXQ8TK5VoI0
+BUQ6BKHGeTtm+0HBpheYBDy3wkOsEGbGHLEM6cMeiz6PyCXF8wXli8Qb/TjN3LHZ
+e30CAwEAAaOCAd8wggHbMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgGG
+MBYGA1UdJQEB/wQMMAoGCCsGAQUFBwMDMB0GA1UdDgQWBBT9huhQhcCi8KESm50M
+G6h0cpuNSzCB7AYDVR0jBIHkMIHhgBSDx8s0qJaMyQCehKcuzgzVNRA75qGBxaSB
+wjCBvzELMAkGA1UEBhMCVVMxCzAJBgNVBAgTAkNBMRYwFAYDVQQHEw1Nb3VudGFp
+biBWaWV3MSYwJAYDVQQKEx1Db250ZW50IFNpZ25hdHVyZSBEZXYgU2lnbmluZzEm
+MCQGA1UEAxMdZGV2LmNvbnRlbnQtc2lnbmF0dXJlLnJvb3QuY2ExOzA5BgkqhkiG
+9w0BCQEWLGNsb3Vkc2VjK2RldnJvb3Rjb250ZW50c2lnbmF0dXJlQG1vemlsbGEu
+Y29tggEBMEIGCWCGSAGG+EIBBAQ1FjNodHRwczovL2NvbnRlbnQtc2lnbmF0dXJl
+LmRldi5tb3phd3MubmV0L2NhL2NybC5wZW0wTgYIKwYBBQUHAQEEQjBAMD4GCCsG
+AQUFBzAChjJodHRwczovL2NvbnRlbnQtc2lnbmF0dXJlLmRldi5tb3phd3MubmV0
+L2NhL2NhLnBlbTANBgkqhkiG9w0BAQwFAAOCAgEAbum0z0ccqI1Wp49VtsGmUPHA
+gjPPy2Xa5NePmqY87WrGdhjN3xbLVb3hx8T2N6pqDjMY2rEynXKEOek3oJkQ3C6J
+8AFP6Y93gaAlNz6EA0J0mqdW5TMI8YEYsu2ma+dQQ8wm9f/5b+/Y8qwfhztP06W5
+H6IG04/RvgUwYMnSR4QvT309fu5UmCRUDzsO53ZmQCfmN94u3NxHc4S6n0Q6AKAM
+kh5Ld9SQnlqqqDykzn7hYDi8nTLWc7IYqkGfNMilDEKbAl4CjnSfyEvpdFAJ9sPR
+UL+kaWFSMvaqIPNpxS5OpoPZjmxEc9HHnCHxtfDHWdXTJILjijPrCdMaxOCHfIqV
+5roOJggI4RZ0YM68IL1MfN4IEVOrHhKjDHtd1gcmy2KU4jfj9LQq9YTnyvZ2z1yS
+lS310HG3or1K8Nnu5Utfe7T6ppX8bLRMkS1/w0p7DKxHaf4D/GJcCtM9lcSt9JpW
+6ACKFikjWR4ZxczYKgApc0wcjd7XBuO5777xtOeyEUDHdDft3jiXA91dYM5UAzc3
+69z/3zmaELzo0gWcrjLXh7fU9AvbU4EUF6rwzxbPGF78jJcGK+oBf8uWUCkBykDt
+VsAEZI1u4EDg8e/C1nFqaH9gNMArAgquYIB9rve+hdprIMnva0S147pflWopBWcb
+jwzgpfquuYnnxe0CNBA=
+-----END CERTIFICATE-----
+-----BEGIN CERTIFICATE-----
+MIIH3DCCBcSgAwIBAgIBATANBgkqhkiG9w0BAQwFADCBvzELMAkGA1UEBhMCVVMx
+CzAJBgNVBAgTAkNBMRYwFAYDVQQHEw1Nb3VudGFpbiBWaWV3MSYwJAYDVQQKEx1D
+b250ZW50IFNpZ25hdHVyZSBEZXYgU2lnbmluZzEmMCQGA1UEAxMdZGV2LmNvbnRl
+bnQtc2lnbmF0dXJlLnJvb3QuY2ExOzA5BgkqhkiG9w0BCQEWLGNsb3Vkc2VjK2Rl
+dnJvb3Rjb250ZW50c2lnbmF0dXJlQG1vemlsbGEuY29tMB4XDTE2MDcwNjE4MTUy
+MloXDTI2MDcwNDE4MTUyMlowgb8xCzAJBgNVBAYTAlVTMQswCQYDVQQIEwJDQTEW
+MBQGA1UEBxMNTW91bnRhaW4gVmlldzEmMCQGA1UEChMdQ29udGVudCBTaWduYXR1
+cmUgRGV2IFNpZ25pbmcxJjAkBgNVBAMTHWRldi5jb250ZW50LXNpZ25hdHVyZS5y
+b290LmNhMTswOQYJKoZIhvcNAQkBFixjbG91ZHNlYytkZXZyb290Y29udGVudHNp
+Z25hdHVyZUBtb3ppbGxhLmNvbTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoC
+ggIBAJcPcXhD8MzF6OTn5qZ0L7lX1+PEgLKhI9g1HxxDYDVup4Zm0kZhPGmFSlml
+6eVO99OvvHdAlHhQGCIG7h+w1cp66mWjfpcvtQH23uRoKZfiW3jy1jUWrvdXolxR
+t1taZosjzo+9OP8TvG6LpJj7AvqUiYD4wYnQJtt0jNRN4d6MUfQwiavSS5uTBuxd
+ZJ4TsPvEI+Iv4A4PSobSzxkg79LTMvsGtDLQv7nN5hMs9T18EL5GnIKoqnSQCU0d
+n2CN7S3QiQ+cbORWsSYqCTj1gUbFh6X3duEB/ypLcyWFbqeJmPHikjY8q8pLjZSB
+IYiTJYLyvYlKdM5QleC/xuBNnMPCftrwwLHjWE4Dd7C9t7k0R5xyOetuiHLCwOcQ
+tuckp7RgFKoviMNv3gdkzwVapOklcsaRkRscv6OMTKJNsdJVIDLrPF1wMABhbEQB
+64BL0uL4lkFtpXXbJzQ6mgUNQveJkfUWOoB+cA/6GtI4J0aQfvQgloCYI6jxNn/e
+Nvk5OV9KFOhXS2dnDft3wHU46sg5yXOuds1u6UrOoATBNFlkS95m4zIX1Svu091+
+CKTiLK85+ZiFtAlU2bPr3Bk3GhL3Z586ae6a4QUEx6SPQVXc18ezB4qxKqFc+avI
+ylccYMRhVP+ruADxtUM5Vy6x3U8BwBK5RLdecRx2FEBDImY1AgMBAAGjggHfMIIB
+2zAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBhjAWBgNVHSUBAf8EDDAK
+BggrBgEFBQcDAzAdBgNVHQ4EFgQUg8fLNKiWjMkAnoSnLs4M1TUQO+YwgewGA1Ud
+IwSB5DCB4YAUg8fLNKiWjMkAnoSnLs4M1TUQO+ahgcWkgcIwgb8xCzAJBgNVBAYT
+AlVTMQswCQYDVQQIEwJDQTEWMBQGA1UEBxMNTW91bnRhaW4gVmlldzEmMCQGA1UE
+ChMdQ29udGVudCBTaWduYXR1cmUgRGV2IFNpZ25pbmcxJjAkBgNVBAMTHWRldi5j
+b250ZW50LXNpZ25hdHVyZS5yb290LmNhMTswOQYJKoZIhvcNAQkBFixjbG91ZHNl
+YytkZXZyb290Y29udGVudHNpZ25hdHVyZUBtb3ppbGxhLmNvbYIBATBCBglghkgB
+hvhCAQQENRYzaHR0cHM6Ly9jb250ZW50LXNpZ25hdHVyZS5kZXYubW96YXdzLm5l
+dC9jYS9jcmwucGVtME4GCCsGAQUFBwEBBEIwQDA+BggrBgEFBQcwAoYyaHR0cHM6
+Ly9jb250ZW50LXNpZ25hdHVyZS5kZXYubW96YXdzLm5ldC9jYS9jYS5wZW0wDQYJ
+KoZIhvcNAQEMBQADggIBAAAQ+fotZE79FfZ8Lz7eiTUzlwHXCdSE2XD3nMROu6n6
+uLTBPrf2C+k+U1FjKVvL5/WCUj6hIzP2X6Sb8+o0XHX9mKN0yoMORTEYJOnazYPK
+KSUUFnE4vGgQkr6k/31gGRMTICdnf3VOOAlUCQ5bOmGIuWi401E3sbd85U+TJe0A
+nHlU+XjtfzlqcBvQivdbA0s+GEG55uRPvn952aTBEMHfn+2JqKeLShl4AtUAfu+h
+6md3Z2HrEC7B3GK8ekWPu0G/ZuWTuFvOimZ+5C8IPRJXcIR/siPQl1x6dpTCew6t
+lPVcVuvg6SQwzvxetkNrGUe2Wb2s9+PK2PUvfOS8ee25SNmfG3XK9qJpqGUhzSBX
+T8QQgyxd0Su5G7Wze7aaHZd/fqIm/G8YFR0HiC2xni/lnDTXFDPCe+HCnSk0bH6U
+wpr6I1yK8oZ2IdnNVfuABGMmGOhvSQ8r7//ea9WKhCsGNQawpVWVioY7hpyNAJ0O
+Vn4xqG5f6allz8lgpwAQ+AeEEClHca6hh6mj9KhD1Of1CC2Vx52GHNh/jMYEc3/g
+zLKniencBqn3Y2XH2daITGJddcleN09+a1NaTkT3hgr7LumxM8EVssPkC+z9j4Vf
+Gbste+8S5QCMhh00g5vR9QF8EaFqdxCdSxrsA4GmpCa5UQl8jtCnpp2DLKXuOh72
+-----END CERTIFICATE-----
diff --git a/toolkit/components/normandy/test/unit/mock_api/api/v1/classify_client/index.json b/toolkit/components/normandy/test/unit/mock_api/api/v1/classify_client/index.json
new file mode 100644
index 0000000000..a9b6239e48
--- /dev/null
+++ b/toolkit/components/normandy/test/unit/mock_api/api/v1/classify_client/index.json
@@ -0,0 +1,4 @@
+{
+ "country": "US",
+ "request_time": "2017-02-22T17:43:24.657841Z"
+}
diff --git a/toolkit/components/normandy/test/unit/mock_api/api/v1/extension/1/index.json b/toolkit/components/normandy/test/unit/mock_api/api/v1/extension/1/index.json
new file mode 100644
index 0000000000..f088592a9b
--- /dev/null
+++ b/toolkit/components/normandy/test/unit/mock_api/api/v1/extension/1/index.json
@@ -0,0 +1,9 @@
+{
+ "id": 1,
+ "name": "Normandy Fixture",
+ "xpi": "http://example.com/browser/toolkit/components/normandy/test/browser/fixtures/normandy.xpi",
+ "extension_id": "normandydriver@example.com",
+ "version": "1.0",
+ "hash": "ade1c14196ec4fe0aa0a6ba40ac433d7c8d1ec985581a8a94d43dc58991b5171",
+ "hash_algorithm": "sha256"
+}
diff --git a/toolkit/components/normandy/test/unit/mock_api/api/v1/extension/index.json b/toolkit/components/normandy/test/unit/mock_api/api/v1/extension/index.json
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/toolkit/components/normandy/test/unit/mock_api/api/v1/extension/index.json
diff --git a/toolkit/components/normandy/test/unit/mock_api/api/v1/index.json b/toolkit/components/normandy/test/unit/mock_api/api/v1/index.json
new file mode 100644
index 0000000000..d2414056c0
--- /dev/null
+++ b/toolkit/components/normandy/test/unit/mock_api/api/v1/index.json
@@ -0,0 +1,5 @@
+{
+ "classify-client": "/api/v1/classify_client/",
+ "extension-list": "/api/v1/extension/",
+ "recipe-signed": "/api/v1/recipe/signed/"
+}
diff --git a/toolkit/components/normandy/test/unit/mock_api/api/v1/recipe/signed/index.json b/toolkit/components/normandy/test/unit/mock_api/api/v1/recipe/signed/index.json
new file mode 100644
index 0000000000..5f3515dc97
--- /dev/null
+++ b/toolkit/components/normandy/test/unit/mock_api/api/v1/recipe/signed/index.json
@@ -0,0 +1,24 @@
+[
+ {
+ "recipe": {
+ "action": "console-log",
+ "arguments": { "message": "asdfasfda sdf sa" },
+ "channels": [],
+ "countries": [],
+ "enabled": true,
+ "extra_filter_expression": "true || true",
+ "filter_expression": "true || true",
+ "id": 1,
+ "last_updated": "2017-02-17T18:29:09.839239Z",
+ "locales": [],
+ "name": "system-addon-test",
+ "revision_id": "b2cb8a26e132182d7d02cf50695d2c7f06cf3b954ff2ff63bca49d724ee91950"
+ },
+ "signature": {
+ "public_key": "MHYwEAYHKoZIzj0CAQYFK4EEACIDYgAEVEKiCAIkwRg1VFsP8JOYdSF6a3qvgbRPoEK9eTuLbrB6QixozscKR4iWJ8ZOOX6RPCRgFdfVDoZqjFBFNJN9QtRBk0mVtHbnErx64d2vMF0oWencS1hyLW2whgOgOz7p",
+ "signature": "p4g3eurmPsJK5UcGT97BRyKstpwZ_2mNJkDGpd6QXlkXfvgwprjeyb5yeIEkKUXqc6krWid4obB_OP9-CwOi9tvKY1pV8p98CT5BhF0IVgpF3b7KBW1a0BVdg5owoG5W",
+ "timestamp": "2017-02-17T18:29:09.847614Z",
+ "x5u": "/normandy.content-signature.mozilla.org-20210705.dev.chain"
+ }
+ }
+]
diff --git a/toolkit/components/normandy/test/unit/mock_api/normandy.content-signature.mozilla.org-20210705.dev.chain b/toolkit/components/normandy/test/unit/mock_api/normandy.content-signature.mozilla.org-20210705.dev.chain
new file mode 100644
index 0000000000..5bf53787d8
--- /dev/null
+++ b/toolkit/components/normandy/test/unit/mock_api/normandy.content-signature.mozilla.org-20210705.dev.chain
@@ -0,0 +1,123 @@
+-----BEGIN CERTIFICATE-----
+MIIGRTCCBC2gAwIBAgIEAQAABTANBgkqhkiG9w0BAQwFADBrMQswCQYDVQQGEwJV
+UzEQMA4GA1UEChMHQWxsaXpvbTEXMBUGA1UECxMOQ2xvdWQgU2VydmljZXMxMTAv
+BgNVBAMTKERldnppbGxhIFNpZ25pbmcgU2VydmljZXMgSW50ZXJtZWRpYXRlIDEw
+HhcNMTYwNzA2MjE1NzE1WhcNMjEwNzA1MjE1NzE1WjCBrzELMAkGA1UEBhMCVVMx
+EzARBgNVBAgTCkNhbGlmb3JuaWExHDAaBgNVBAoTE01vemlsbGEgQ29ycG9yYXRp
+b24xFzAVBgNVBAsTDkNsb3VkIFNlcnZpY2VzMS8wLQYDVQQDEyZub3JtYW5keS5j
+b250ZW50LXNpZ25hdHVyZS5tb3ppbGxhLm9yZzEjMCEGCSqGSIb3DQEJARYUc2Vj
+dXJpdHlAbW96aWxsYS5vcmcwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAARUQqIIAiTB
+GDVUWw/wk5h1IXpreq+BtE+gQr15O4tusHpCLGjOxwpHiJYnxk45fpE8JGAV19UO
+hmqMUEU0k31C1EGTSZW0ducSvHrh3a8wXShZ6dxLWHItbbCGA6A7PumjggJYMIIC
+VDAdBgNVHQ4EFgQUVfksSjlZ0i1TBiS1vcoObaMeXn0wge8GA1UdIwSB5zCB5IAU
+/YboUIXAovChEpudDBuodHKbjUuhgcWkgcIwgb8xCzAJBgNVBAYTAlVTMQswCQYD
+VQQIEwJDQTEWMBQGA1UEBxMNTW91bnRhaW4gVmlldzEmMCQGA1UEChMdQ29udGVu
+dCBTaWduYXR1cmUgRGV2IFNpZ25pbmcxJjAkBgNVBAMTHWRldi5jb250ZW50LXNp
+Z25hdHVyZS5yb290LmNhMTswOQYJKoZIhvcNAQkBFixjbG91ZHNlYytkZXZyb290
+Y29udGVudHNpZ25hdHVyZUBtb3ppbGxhLmNvbYIEAQAABDAMBgNVHRMBAf8EAjAA
+MA4GA1UdDwEB/wQEAwIHgDAWBgNVHSUBAf8EDDAKBggrBgEFBQcDAzBEBgNVHR8E
+PTA7MDmgN6A1hjNodHRwczovL2NvbnRlbnQtc2lnbmF0dXJlLmRldi5tb3phd3Mu
+bmV0L2NhL2NybC5wZW0wQgYJYIZIAYb4QgEEBDUWM2h0dHBzOi8vY29udGVudC1z
+aWduYXR1cmUuZGV2Lm1vemF3cy5uZXQvY2EvY3JsLnBlbTBOBggrBgEFBQcBAQRC
+MEAwPgYIKwYBBQUHMAKGMmh0dHBzOi8vY29udGVudC1zaWduYXR1cmUuZGV2Lm1v
+emF3cy5uZXQvY2EvY2EucGVtMDEGA1UdEQQqMCiCJm5vcm1hbmR5LmNvbnRlbnQt
+c2lnbmF0dXJlLm1vemlsbGEub3JnMA0GCSqGSIb3DQEBDAUAA4ICAQCwb+8JTAB7
+ZfQmFqPUIV2cQQv696AaDPQCtA9YS4zmUfcLMvfZVAbK397zFr0RMDdLiTUQDoeq
+rBEmPXhJRPiv6JAK4n7Jf6Y6XfXcNxx+q3garR09Vm/0CnEq/iV+ZAtPkoKIO9kr
+Nkzecd894yQCF4hIuPQ5qtMySeqJmH3Dp13eq4T0Oew1Bu32rNHuBJh2xYBkWdun
+aAw/YX0I5EqZBP/XA6gbiA160tTK+hnpnlMtw/ljkvfhHbWpICD4aSiTL8L3vABQ
+j7bqjMKR5xDkuGWshZfcmonpvQhGTye/RZ1vz5IzA3VOJt1mz5bdZlitpaOm/Yv0
+x6aODz8GP/PiRWFQ5CW8Uf/7pGc5rSyvnfZV2ix8EzFlo8cUtuN1fjrPFPOFOLvG
+iiB6S9nlXiKBGYIDdd8V8iC5xJpzjiAWJQigwSNzuc2K30+iPo3w0zazkwe5V8jW
+gj6gItYxh5xwVQTPHD0EOd9HvV1ou42+rH5Y+ISFUm25zz02UtUHEK0BKtL0lmdt
+DwVq5jcHn6bx2/iwUtlKvPXtfM/6JjTJlkLZLtS7U5/pwcS0owo9zAL0qg3bdm16
++v/olmPqQFLUHmamJTzv3rojj5X/uVdx1HMM3wBjV9tRYoYaZw9RIInRmM8Z1pHv
+JJ+CIZgCyd5vgp57BKiodRZcgHoCH+BkOQ==
+-----END CERTIFICATE-----
+-----BEGIN CERTIFICATE-----
+MIIHijCCBXKgAwIBAgIEAQAABDANBgkqhkiG9w0BAQwFADCBvzELMAkGA1UEBhMC
+VVMxCzAJBgNVBAgTAkNBMRYwFAYDVQQHEw1Nb3VudGFpbiBWaWV3MSYwJAYDVQQK
+Ex1Db250ZW50IFNpZ25hdHVyZSBEZXYgU2lnbmluZzEmMCQGA1UEAxMdZGV2LmNv
+bnRlbnQtc2lnbmF0dXJlLnJvb3QuY2ExOzA5BgkqhkiG9w0BCQEWLGNsb3Vkc2Vj
+K2RldnJvb3Rjb250ZW50c2lnbmF0dXJlQG1vemlsbGEuY29tMB4XDTE2MDcwNjIx
+NDkyNloXDTIxMDcwNTIxNDkyNlowazELMAkGA1UEBhMCVVMxEDAOBgNVBAoTB0Fs
+bGl6b20xFzAVBgNVBAsTDkNsb3VkIFNlcnZpY2VzMTEwLwYDVQQDEyhEZXZ6aWxs
+YSBTaWduaW5nIFNlcnZpY2VzIEludGVybWVkaWF0ZSAxMIICIjANBgkqhkiG9w0B
+AQEFAAOCAg8AMIICCgKCAgEAypIfUURYgWzGw8G/1Pz9zW+Tsjirx2owThiv2gys
+wJiWL/9/2gzKOrYDEqlDUudfA/BjVRtT9+NbYgnhaCkNfADOAacWS83aMhedAqhP
+bVd5YhGQdpijI7f1AVTSb0ehrU2nhOZHvHX5Tk2fbRx3ryefIazNTLFGpiMBbsNv
+tSI/+fjW8s0MhKNqlLnk6a9mZKo0mEy7HjGYV8nzsgI17rKLx/s2HG4TFG0+JQzc
+UGlum3Tg58ritDzWdyKIkmKNZ48oLBX99Qc8j8B1UyiLv6TZmjVX0I+Ds7eSWHZk
+0axLEpTyf2r7fHvN4iDNCPajw+ZpuuBfbs80Ha8b8MHvnpf9fbwiirodNQOVpY4c
+t5E3Us3eYwBKdqDEbECWxCKGAS2/iVVUCNKHsg0sSxgqcwxrxyrddQRUQ0EM38DZ
+F/vHt+vTdHt07kezbjJe0Kklel59uSpghA0iL4vxzbTns1fuwYOgVrNGs3acTkiB
+GhFOxRXUPGtpdYmv+AaR9WlWJQY1GIEoVrinPVH7bcCwyh1CcUbHL+oAFTcmc6kZ
+7azNg21tWILIRL7R0IZYQm0tF5TTwCsjVC7FuHaBtkxtVrrZqeKjvVXQ8TK5VoI0
+BUQ6BKHGeTtm+0HBpheYBDy3wkOsEGbGHLEM6cMeiz6PyCXF8wXli8Qb/TjN3LHZ
+e30CAwEAAaOCAd8wggHbMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgGG
+MBYGA1UdJQEB/wQMMAoGCCsGAQUFBwMDMB0GA1UdDgQWBBT9huhQhcCi8KESm50M
+G6h0cpuNSzCB7AYDVR0jBIHkMIHhgBSDx8s0qJaMyQCehKcuzgzVNRA75qGBxaSB
+wjCBvzELMAkGA1UEBhMCVVMxCzAJBgNVBAgTAkNBMRYwFAYDVQQHEw1Nb3VudGFp
+biBWaWV3MSYwJAYDVQQKEx1Db250ZW50IFNpZ25hdHVyZSBEZXYgU2lnbmluZzEm
+MCQGA1UEAxMdZGV2LmNvbnRlbnQtc2lnbmF0dXJlLnJvb3QuY2ExOzA5BgkqhkiG
+9w0BCQEWLGNsb3Vkc2VjK2RldnJvb3Rjb250ZW50c2lnbmF0dXJlQG1vemlsbGEu
+Y29tggEBMEIGCWCGSAGG+EIBBAQ1FjNodHRwczovL2NvbnRlbnQtc2lnbmF0dXJl
+LmRldi5tb3phd3MubmV0L2NhL2NybC5wZW0wTgYIKwYBBQUHAQEEQjBAMD4GCCsG
+AQUFBzAChjJodHRwczovL2NvbnRlbnQtc2lnbmF0dXJlLmRldi5tb3phd3MubmV0
+L2NhL2NhLnBlbTANBgkqhkiG9w0BAQwFAAOCAgEAbum0z0ccqI1Wp49VtsGmUPHA
+gjPPy2Xa5NePmqY87WrGdhjN3xbLVb3hx8T2N6pqDjMY2rEynXKEOek3oJkQ3C6J
+8AFP6Y93gaAlNz6EA0J0mqdW5TMI8YEYsu2ma+dQQ8wm9f/5b+/Y8qwfhztP06W5
+H6IG04/RvgUwYMnSR4QvT309fu5UmCRUDzsO53ZmQCfmN94u3NxHc4S6n0Q6AKAM
+kh5Ld9SQnlqqqDykzn7hYDi8nTLWc7IYqkGfNMilDEKbAl4CjnSfyEvpdFAJ9sPR
+UL+kaWFSMvaqIPNpxS5OpoPZjmxEc9HHnCHxtfDHWdXTJILjijPrCdMaxOCHfIqV
+5roOJggI4RZ0YM68IL1MfN4IEVOrHhKjDHtd1gcmy2KU4jfj9LQq9YTnyvZ2z1yS
+lS310HG3or1K8Nnu5Utfe7T6ppX8bLRMkS1/w0p7DKxHaf4D/GJcCtM9lcSt9JpW
+6ACKFikjWR4ZxczYKgApc0wcjd7XBuO5777xtOeyEUDHdDft3jiXA91dYM5UAzc3
+69z/3zmaELzo0gWcrjLXh7fU9AvbU4EUF6rwzxbPGF78jJcGK+oBf8uWUCkBykDt
+VsAEZI1u4EDg8e/C1nFqaH9gNMArAgquYIB9rve+hdprIMnva0S147pflWopBWcb
+jwzgpfquuYnnxe0CNBA=
+-----END CERTIFICATE-----
+-----BEGIN CERTIFICATE-----
+MIIH3DCCBcSgAwIBAgIBATANBgkqhkiG9w0BAQwFADCBvzELMAkGA1UEBhMCVVMx
+CzAJBgNVBAgTAkNBMRYwFAYDVQQHEw1Nb3VudGFpbiBWaWV3MSYwJAYDVQQKEx1D
+b250ZW50IFNpZ25hdHVyZSBEZXYgU2lnbmluZzEmMCQGA1UEAxMdZGV2LmNvbnRl
+bnQtc2lnbmF0dXJlLnJvb3QuY2ExOzA5BgkqhkiG9w0BCQEWLGNsb3Vkc2VjK2Rl
+dnJvb3Rjb250ZW50c2lnbmF0dXJlQG1vemlsbGEuY29tMB4XDTE2MDcwNjE4MTUy
+MloXDTI2MDcwNDE4MTUyMlowgb8xCzAJBgNVBAYTAlVTMQswCQYDVQQIEwJDQTEW
+MBQGA1UEBxMNTW91bnRhaW4gVmlldzEmMCQGA1UEChMdQ29udGVudCBTaWduYXR1
+cmUgRGV2IFNpZ25pbmcxJjAkBgNVBAMTHWRldi5jb250ZW50LXNpZ25hdHVyZS5y
+b290LmNhMTswOQYJKoZIhvcNAQkBFixjbG91ZHNlYytkZXZyb290Y29udGVudHNp
+Z25hdHVyZUBtb3ppbGxhLmNvbTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoC
+ggIBAJcPcXhD8MzF6OTn5qZ0L7lX1+PEgLKhI9g1HxxDYDVup4Zm0kZhPGmFSlml
+6eVO99OvvHdAlHhQGCIG7h+w1cp66mWjfpcvtQH23uRoKZfiW3jy1jUWrvdXolxR
+t1taZosjzo+9OP8TvG6LpJj7AvqUiYD4wYnQJtt0jNRN4d6MUfQwiavSS5uTBuxd
+ZJ4TsPvEI+Iv4A4PSobSzxkg79LTMvsGtDLQv7nN5hMs9T18EL5GnIKoqnSQCU0d
+n2CN7S3QiQ+cbORWsSYqCTj1gUbFh6X3duEB/ypLcyWFbqeJmPHikjY8q8pLjZSB
+IYiTJYLyvYlKdM5QleC/xuBNnMPCftrwwLHjWE4Dd7C9t7k0R5xyOetuiHLCwOcQ
+tuckp7RgFKoviMNv3gdkzwVapOklcsaRkRscv6OMTKJNsdJVIDLrPF1wMABhbEQB
+64BL0uL4lkFtpXXbJzQ6mgUNQveJkfUWOoB+cA/6GtI4J0aQfvQgloCYI6jxNn/e
+Nvk5OV9KFOhXS2dnDft3wHU46sg5yXOuds1u6UrOoATBNFlkS95m4zIX1Svu091+
+CKTiLK85+ZiFtAlU2bPr3Bk3GhL3Z586ae6a4QUEx6SPQVXc18ezB4qxKqFc+avI
+ylccYMRhVP+ruADxtUM5Vy6x3U8BwBK5RLdecRx2FEBDImY1AgMBAAGjggHfMIIB
+2zAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBhjAWBgNVHSUBAf8EDDAK
+BggrBgEFBQcDAzAdBgNVHQ4EFgQUg8fLNKiWjMkAnoSnLs4M1TUQO+YwgewGA1Ud
+IwSB5DCB4YAUg8fLNKiWjMkAnoSnLs4M1TUQO+ahgcWkgcIwgb8xCzAJBgNVBAYT
+AlVTMQswCQYDVQQIEwJDQTEWMBQGA1UEBxMNTW91bnRhaW4gVmlldzEmMCQGA1UE
+ChMdQ29udGVudCBTaWduYXR1cmUgRGV2IFNpZ25pbmcxJjAkBgNVBAMTHWRldi5j
+b250ZW50LXNpZ25hdHVyZS5yb290LmNhMTswOQYJKoZIhvcNAQkBFixjbG91ZHNl
+YytkZXZyb290Y29udGVudHNpZ25hdHVyZUBtb3ppbGxhLmNvbYIBATBCBglghkgB
+hvhCAQQENRYzaHR0cHM6Ly9jb250ZW50LXNpZ25hdHVyZS5kZXYubW96YXdzLm5l
+dC9jYS9jcmwucGVtME4GCCsGAQUFBwEBBEIwQDA+BggrBgEFBQcwAoYyaHR0cHM6
+Ly9jb250ZW50LXNpZ25hdHVyZS5kZXYubW96YXdzLm5ldC9jYS9jYS5wZW0wDQYJ
+KoZIhvcNAQEMBQADggIBAAAQ+fotZE79FfZ8Lz7eiTUzlwHXCdSE2XD3nMROu6n6
+uLTBPrf2C+k+U1FjKVvL5/WCUj6hIzP2X6Sb8+o0XHX9mKN0yoMORTEYJOnazYPK
+KSUUFnE4vGgQkr6k/31gGRMTICdnf3VOOAlUCQ5bOmGIuWi401E3sbd85U+TJe0A
+nHlU+XjtfzlqcBvQivdbA0s+GEG55uRPvn952aTBEMHfn+2JqKeLShl4AtUAfu+h
+6md3Z2HrEC7B3GK8ekWPu0G/ZuWTuFvOimZ+5C8IPRJXcIR/siPQl1x6dpTCew6t
+lPVcVuvg6SQwzvxetkNrGUe2Wb2s9+PK2PUvfOS8ee25SNmfG3XK9qJpqGUhzSBX
+T8QQgyxd0Su5G7Wze7aaHZd/fqIm/G8YFR0HiC2xni/lnDTXFDPCe+HCnSk0bH6U
+wpr6I1yK8oZ2IdnNVfuABGMmGOhvSQ8r7//ea9WKhCsGNQawpVWVioY7hpyNAJ0O
+Vn4xqG5f6allz8lgpwAQ+AeEEClHca6hh6mj9KhD1Of1CC2Vx52GHNh/jMYEc3/g
+zLKniencBqn3Y2XH2daITGJddcleN09+a1NaTkT3hgr7LumxM8EVssPkC+z9j4Vf
+Gbste+8S5QCMhh00g5vR9QF8EaFqdxCdSxrsA4GmpCa5UQl8jtCnpp2DLKXuOh72
+-----END CERTIFICATE-----
diff --git a/toolkit/components/normandy/test/unit/query_server.sjs b/toolkit/components/normandy/test/unit/query_server.sjs
new file mode 100644
index 0000000000..dd00d74bf6
--- /dev/null
+++ b/toolkit/components/normandy/test/unit/query_server.sjs
@@ -0,0 +1,34 @@
+const CC = Components.Constructor;
+const BinaryInputStream = CC(
+ "@mozilla.org/binaryinputstream;1",
+ "nsIBinaryInputStream",
+ "setInputStream"
+);
+
+// Returns a JSON string containing the query string arguments and the
+// request body parsed as JSON.
+function handleRequest(request, response) {
+ // Allow cross-origin, so you can XHR to it!
+ response.setHeader("Access-Control-Allow-Origin", "*", false);
+ // Avoid confusing cache behaviors
+ response.setHeader("Cache-Control", "no-cache", false);
+ response.setHeader("Content-Type", "application/json", false);
+
+ // Read request body
+ const inputStream = new BinaryInputStream(request.bodyInputStream);
+ let bytes = [];
+ let available;
+ while ((available = inputStream.available()) > 0) {
+ bytes = bytes.concat(inputStream.readByteArray(available));
+ }
+ const body = String.fromCharCode.apply(null, bytes);
+
+ // Write response body
+ const data = { queryString: {}, body: body ? JSON.parse(body) : {} };
+ const params = request.queryString.split("&");
+ for (const param of params) {
+ const [key, value] = param.split("=");
+ data.queryString[key] = value;
+ }
+ response.write(JSON.stringify(data));
+}
diff --git a/toolkit/components/normandy/test/unit/test_Normandy.js b/toolkit/components/normandy/test/unit/test_Normandy.js
new file mode 100644
index 0000000000..5bb1655fb8
--- /dev/null
+++ b/toolkit/components/normandy/test/unit/test_Normandy.js
@@ -0,0 +1,95 @@
+/* Any copyright is dedicated to the Public Domain.
+http://creativecommons.org/publicdomain/zero/1.0/ */
+
+"use strict";
+
+const { Normandy } = ChromeUtils.importESModule(
+ "resource://normandy/Normandy.sys.mjs"
+);
+const { NormandyMigrations } = ChromeUtils.importESModule(
+ "resource://normandy/NormandyMigrations.sys.mjs"
+);
+const { PromiseUtils } = ChromeUtils.importESModule(
+ "resource://gre/modules/PromiseUtils.sys.mjs"
+);
+ChromeUtils.defineESModuleGetters(this, {
+ TestUtils: "resource://testing-common/TestUtils.sys.mjs",
+});
+
+/* import-globals-from utils.js */
+load("utils.js");
+
+NormandyTestUtils.init({ add_task });
+const { decorate_task } = NormandyTestUtils;
+
+// Normandy's initialization function should set the start preferences before
+// its first `await`.
+decorate_task(
+ NormandyTestUtils.withStub(Normandy, "finishInit"),
+ NormandyTestUtils.withStub(NormandyMigrations, "applyAll"),
+ NormandyTestUtils.withMockPreferences(),
+ async function test_normandy_init_applies_startup_prefs_synchronously({
+ mockPreferences,
+ }) {
+ const experimentPref = "test.experiment";
+ const rolloutPref = "test.rollout";
+ const experimentStartupPref = `app.normandy.startupExperimentPrefs.${experimentPref}`;
+ const rolloutStartupPref = `app.normandy.startupRolloutPrefs.${rolloutPref}`;
+
+ mockPreferences.preserve(experimentPref, "default");
+ mockPreferences.preserve(rolloutPref, "default");
+ mockPreferences.set(experimentStartupPref, "experiment");
+ mockPreferences.set(rolloutStartupPref, "rollout");
+
+ Assert.equal(
+ Services.prefs.getCharPref(experimentPref, "default"),
+ "default"
+ );
+ Assert.equal(Services.prefs.getCharPref(rolloutPref, "default"), "default");
+
+ let initPromise = Normandy.init({ runAsync: false });
+
+ // note: There are no awaits before these asserts, so only the part of
+ // Normandy's initialization before its first await can run.
+ Assert.equal(
+ Services.prefs.getCharPref(experimentPref, "default"),
+ "experiment"
+ );
+ Assert.equal(Services.prefs.getCharPref(rolloutPref, "default"), "rollout");
+
+ await initPromise;
+ await Normandy.uninit();
+ }
+);
+
+// Normandy's initialization function should register the observer for UI
+// startup before it's first await.
+decorate_task(
+ NormandyTestUtils.withStub(Normandy, "finishInit"),
+ NormandyTestUtils.withStub(NormandyMigrations, "applyAll"),
+ async function test_normandy_init_applies_startup_prefs_synchronously({
+ applyAllStub,
+ }) {
+ let originalDeferred = Normandy.uiAvailableNotificationObserved;
+ let mockUiAvailableDeferred = PromiseUtils.defer();
+ Normandy.uiAvailableNotificationObserved = mockUiAvailableDeferred;
+
+ let applyAllDeferred = PromiseUtils.defer();
+ applyAllStub.returns(applyAllStub);
+
+ let promiseResolvedCount = 0;
+ mockUiAvailableDeferred.promise.then(() => promiseResolvedCount++);
+
+ let initPromise = Normandy.init();
+
+ Assert.equal(promiseResolvedCount, 0);
+ Normandy.observe(null, "sessionstore-windows-restored");
+ await TestUtils.waitForCondition(() => promiseResolvedCount === 1);
+
+ applyAllDeferred.resolve();
+
+ await initPromise;
+ await Normandy.uninit();
+ Normandy.uiAvailableNotificationObserved = originalDeferred;
+ }
+);
diff --git a/toolkit/components/normandy/test/unit/test_NormandyApi.js b/toolkit/components/normandy/test/unit/test_NormandyApi.js
new file mode 100644
index 0000000000..885bd9fbdb
--- /dev/null
+++ b/toolkit/components/normandy/test/unit/test_NormandyApi.js
@@ -0,0 +1,257 @@
+/* globals sinon */
+"use strict";
+
+const { PromiseUtils } = ChromeUtils.importESModule(
+ "resource://gre/modules/PromiseUtils.sys.mjs"
+);
+
+/* import-globals-from utils.js */
+load("utils.js");
+
+NormandyTestUtils.init({ add_task });
+const { decorate_task } = NormandyTestUtils;
+
+Cu.importGlobalProperties(["fetch"]);
+
+decorate_task(withMockApiServer(), async function test_get({ serverUrl }) {
+ // Test that NormandyApi can fetch from the test server.
+ const response = await NormandyApi.get(`${serverUrl}/api/v1/`);
+ const data = await response.json();
+ equal(
+ data["recipe-signed"],
+ "/api/v1/recipe/signed/",
+ "Expected data in response"
+ );
+});
+
+decorate_task(
+ withMockApiServer(),
+ async function test_getApiUrl({ serverUrl }) {
+ const apiBase = `${serverUrl}/api/v1`;
+ // Test that NormandyApi can use the self-describing API's index
+ const recipeListUrl = await NormandyApi.getApiUrl("extension-list");
+ equal(
+ recipeListUrl,
+ `${apiBase}/extension/`,
+ "Can retrieve extension-list URL from API"
+ );
+ }
+);
+
+decorate_task(
+ withMockApiServer(),
+ async function test_getApiUrlSlashes({ serverUrl, mockPreferences }) {
+ const fakeResponse = new MockResponse(
+ JSON.stringify({ "test-endpoint": `${serverUrl}/test/` })
+ );
+ const mockGet = sinon
+ .stub(NormandyApi, "get")
+ .callsFake(async () => fakeResponse);
+
+ // without slash
+ {
+ NormandyApi.clearIndexCache();
+ mockPreferences.set("app.normandy.api_url", `${serverUrl}/api/v1`);
+ const endpoint = await NormandyApi.getApiUrl("test-endpoint");
+ equal(endpoint, `${serverUrl}/test/`);
+ ok(
+ mockGet.calledWithExactly(`${serverUrl}/api/v1/`),
+ "trailing slash was added"
+ );
+ mockGet.resetHistory();
+ }
+
+ // with slash
+ {
+ NormandyApi.clearIndexCache();
+ mockPreferences.set("app.normandy.api_url", `${serverUrl}/api/v1/`);
+ const endpoint = await NormandyApi.getApiUrl("test-endpoint");
+ equal(endpoint, `${serverUrl}/test/`);
+ ok(
+ mockGet.calledWithExactly(`${serverUrl}/api/v1/`),
+ "existing trailing slash was preserved"
+ );
+ mockGet.resetHistory();
+ }
+
+ NormandyApi.clearIndexCache();
+ mockGet.restore();
+ }
+);
+
+// Test validation errors due to validation throwing an exception (e.g. when
+// parameters passed to validation are malformed).
+decorate_task(
+ withMockApiServer(),
+ async function test_validateSignedObject_validation_error() {
+ // Mock the x5u URL
+ const getStub = sinon.stub(NormandyApi, "get").callsFake(async url => {
+ ok(url.endsWith("x5u/"), "the only request should be to fetch the x5u");
+ return new MockResponse("certchain");
+ });
+
+ const signedObject = { a: 1, b: 2 };
+ const signature = {
+ signature: "invalidsignature",
+ x5u: "http://localhost/x5u/",
+ };
+
+ // Validation should fail due to a malformed x5u and signature.
+ try {
+ await NormandyApi.verifyObjectSignature(
+ signedObject,
+ signature,
+ "object"
+ );
+ ok(false, "validateSignedObject did not throw for a validation error");
+ } catch (err) {
+ ok(
+ err instanceof NormandyApi.InvalidSignatureError,
+ "Error is an InvalidSignatureError"
+ );
+ ok(/signature/.test(err), "Error is due to a validation error");
+ }
+
+ getStub.restore();
+ }
+);
+
+// Test validation errors due to validation returning false (e.g. when parameters
+// passed to validation are correctly formed, but not valid for the data).
+decorate_task(
+ withMockApiServer("invalid_recipe_signature_api"),
+ async function test_verifySignedObject_invalid_signature() {
+ // Get the test recipe and signature from the mock server.
+ const recipesUrl = await NormandyApi.getApiUrl("recipe-signed");
+ const recipeResponse = await NormandyApi.get(recipesUrl);
+ const recipes = await recipeResponse.json();
+ equal(recipes.length, 1, "Test data has one recipe");
+ const [{ recipe, signature }] = recipes;
+
+ try {
+ await NormandyApi.verifyObjectSignature(recipe, signature, "recipe");
+ ok(false, "verifyObjectSignature did not throw for an invalid signature");
+ } catch (err) {
+ ok(
+ err instanceof NormandyApi.InvalidSignatureError,
+ "Error is an InvalidSignatureError"
+ );
+ ok(/signature/.test(err), "Error is due to an invalid signature");
+ }
+ }
+);
+
+decorate_task(withMockApiServer(), async function test_classifyClient() {
+ const classification = await NormandyApi.classifyClient();
+ Assert.deepEqual(classification, {
+ country: "US",
+ request_time: new Date("2017-02-22T17:43:24.657841Z"),
+ });
+});
+
+decorate_task(withMockApiServer(), async function test_fetchExtensionDetails() {
+ const extensionDetails = await NormandyApi.fetchExtensionDetails(1);
+ deepEqual(extensionDetails, {
+ id: 1,
+ name: "Normandy Fixture",
+ xpi: "http://example.com/browser/toolkit/components/normandy/test/browser/fixtures/normandy.xpi",
+ extension_id: "normandydriver@example.com",
+ version: "1.0",
+ hash: "ade1c14196ec4fe0aa0a6ba40ac433d7c8d1ec985581a8a94d43dc58991b5171",
+ hash_algorithm: "sha256",
+ });
+});
+
+decorate_task(
+ withScriptServer("query_server.sjs"),
+ async function test_getTestServer({ serverUrl }) {
+ // Test that NormandyApi can fetch from the test server.
+ const response = await NormandyApi.get(serverUrl);
+ const data = await response.json();
+ Assert.deepEqual(
+ data,
+ { queryString: {}, body: {} },
+ "NormandyApi returned incorrect server data."
+ );
+ }
+);
+
+decorate_task(
+ withScriptServer("query_server.sjs"),
+ async function test_getQueryString({ serverUrl }) {
+ // Test that NormandyApi can send query string parameters to the test server.
+ const response = await NormandyApi.get(serverUrl, {
+ foo: "bar",
+ baz: "biff",
+ });
+ const data = await response.json();
+ Assert.deepEqual(
+ data,
+ { queryString: { foo: "bar", baz: "biff" }, body: {} },
+ "NormandyApi sent an incorrect query string."
+ );
+ }
+);
+
+// Test that no credentials are sent, even if the cookie store contains them.
+decorate_task(
+ withScriptServer("cookie_server.sjs"),
+ async function test_sendsNoCredentials({ serverUrl }) {
+ // This test uses cookie_server.sjs, which responds to all requests with a
+ // response that sets a cookie.
+
+ // send a request, to store a cookie in the cookie store
+ await fetch(serverUrl);
+
+ // A normal request should send that cookie
+ const cookieExpectedDeferred = PromiseUtils.defer();
+ function cookieExpectedObserver(aSubject, aTopic, aData) {
+ equal(
+ aTopic,
+ "http-on-modify-request",
+ "Only the expected topic should be observed"
+ );
+ let httpChannel = aSubject.QueryInterface(Ci.nsIHttpChannel);
+ equal(
+ httpChannel.getRequestHeader("Cookie"),
+ "type=chocolate-chip",
+ "The header should be sent"
+ );
+ Services.obs.removeObserver(
+ cookieExpectedObserver,
+ "http-on-modify-request"
+ );
+ cookieExpectedDeferred.resolve();
+ }
+ Services.obs.addObserver(cookieExpectedObserver, "http-on-modify-request");
+ await fetch(serverUrl);
+ await cookieExpectedDeferred.promise;
+
+ // A request through the NormandyApi method should not send that cookie
+ const cookieNotExpectedDeferred = PromiseUtils.defer();
+ function cookieNotExpectedObserver(aSubject, aTopic, aData) {
+ equal(
+ aTopic,
+ "http-on-modify-request",
+ "Only the expected topic should be observed"
+ );
+ let httpChannel = aSubject.QueryInterface(Ci.nsIHttpChannel);
+ Assert.throws(
+ () => httpChannel.getRequestHeader("Cookie"),
+ /NS_ERROR_NOT_AVAILABLE/,
+ "The cookie header should not be sent"
+ );
+ Services.obs.removeObserver(
+ cookieNotExpectedObserver,
+ "http-on-modify-request"
+ );
+ cookieNotExpectedDeferred.resolve();
+ }
+ Services.obs.addObserver(
+ cookieNotExpectedObserver,
+ "http-on-modify-request"
+ );
+ await NormandyApi.get(serverUrl);
+ await cookieNotExpectedDeferred.promise;
+ }
+);
diff --git a/toolkit/components/normandy/test/unit/test_PrefUtils.js b/toolkit/components/normandy/test/unit/test_PrefUtils.js
new file mode 100644
index 0000000000..57130d8783
--- /dev/null
+++ b/toolkit/components/normandy/test/unit/test_PrefUtils.js
@@ -0,0 +1,223 @@
+/* Any copyright is dedicated to the Public Domain.
+http://creativecommons.org/publicdomain/zero/1.0/ */
+
+"use strict";
+
+const { PrefUtils } = ChromeUtils.importESModule(
+ "resource://normandy/lib/PrefUtils.sys.mjs"
+);
+
+add_task(function getPrefGetsValues() {
+ const defaultBranch = Services.prefs.getDefaultBranch("");
+ const userBranch = Services.prefs;
+
+ defaultBranch.setBoolPref("test.bool", false);
+ userBranch.setBoolPref("test.bool", true);
+ defaultBranch.setIntPref("test.int", 1);
+ userBranch.setIntPref("test.int", 2);
+ defaultBranch.setStringPref("test.string", "default");
+ userBranch.setStringPref("test.string", "user");
+
+ equal(
+ PrefUtils.getPref("test.bool", { branch: "user" }),
+ true,
+ "should read user branch bools"
+ );
+ equal(
+ PrefUtils.getPref("test.int", { branch: "user" }),
+ 2,
+ "should read user branch ints"
+ );
+ equal(
+ PrefUtils.getPref("test.string", { branch: "user" }),
+ "user",
+ "should read user branch strings"
+ );
+
+ equal(
+ PrefUtils.getPref("test.bool", { branch: "default" }),
+ false,
+ "should read default branch bools"
+ );
+ equal(
+ PrefUtils.getPref("test.int", { branch: "default" }),
+ 1,
+ "should read default branch ints"
+ );
+ equal(
+ PrefUtils.getPref("test.string", { branch: "default" }),
+ "default",
+ "should read default branch strings"
+ );
+
+ equal(
+ PrefUtils.getPref("test.bool"),
+ true,
+ "should read bools from the user branch by default"
+ );
+ equal(
+ PrefUtils.getPref("test.int"),
+ 2,
+ "should read ints from the user branch by default"
+ );
+ equal(
+ PrefUtils.getPref("test.string"),
+ "user",
+ "should read strings from the user branch by default"
+ );
+
+ equal(
+ PrefUtils.getPref("test.does_not_exist"),
+ null,
+ "Should return null for non-existent prefs by default"
+ );
+ let defaultValue = Symbol();
+ equal(
+ PrefUtils.getPref("test.does_not_exist", { defaultValue }),
+ defaultValue,
+ "Should use the passed default value"
+ );
+});
+
+// This is an important test because the pref system can behave in strange ways
+// when the user branch has a value, but the default branch does not.
+add_task(function getPrefHandlesUserValueNoDefaultValue() {
+ Services.prefs.setStringPref("test.only-user-value", "user");
+
+ let defaultValue = Symbol();
+ equal(
+ PrefUtils.getPref("test.only-user-value", {
+ branch: "default",
+ defaultValue,
+ }),
+ defaultValue
+ );
+ equal(PrefUtils.getPref("test.only-user-value", { branch: "default" }), null);
+ equal(PrefUtils.getPref("test.only-user-value", { branch: "user" }), "user");
+ equal(PrefUtils.getPref("test.only-user-value"), "user");
+});
+
+add_task(function getPrefInvalidBranch() {
+ Assert.throws(
+ () => PrefUtils.getPref("test.pref", { branch: "invalid" }),
+ PrefUtils.UnexpectedPreferenceBranch
+ );
+});
+
+add_task(function setPrefSetsValues() {
+ const defaultBranch = Services.prefs.getDefaultBranch("");
+ const userBranch = Services.prefs;
+
+ defaultBranch.setIntPref("test.int", 1);
+ userBranch.setIntPref("test.int", 2);
+ defaultBranch.setStringPref("test.string", "default");
+ userBranch.setStringPref("test.string", "user");
+ defaultBranch.setBoolPref("test.bool", false);
+ userBranch.setBoolPref("test.bool", true);
+
+ PrefUtils.setPref("test.int", 3);
+ equal(
+ userBranch.getIntPref("test.int"),
+ 3,
+ "the user branch should change for ints"
+ );
+ PrefUtils.setPref("test.int", 4, { branch: "default" });
+ equal(
+ userBranch.getIntPref("test.int"),
+ 3,
+ "changing the default branch shouldn't affect the user branch for ints"
+ );
+ PrefUtils.setPref("test.int", null, { branch: "user" });
+ equal(
+ userBranch.getIntPref("test.int"),
+ 4,
+ "clearing the user branch should reveal the default value for ints"
+ );
+
+ PrefUtils.setPref("test.string", "user override");
+ equal(
+ userBranch.getStringPref("test.string"),
+ "user override",
+ "the user branch should change for strings"
+ );
+ PrefUtils.setPref("test.string", "default override", { branch: "default" });
+ equal(
+ userBranch.getStringPref("test.string"),
+ "user override",
+ "changing the default branch shouldn't affect the user branch for strings"
+ );
+ PrefUtils.setPref("test.string", null, { branch: "user" });
+ equal(
+ userBranch.getStringPref("test.string"),
+ "default override",
+ "clearing the user branch should reveal the default value for strings"
+ );
+
+ PrefUtils.setPref("test.bool", false);
+ equal(
+ userBranch.getBoolPref("test.bool"),
+ false,
+ "the user branch should change for bools"
+ );
+ // The above effectively unsets the user branch, since it is now the same as the default branch
+ PrefUtils.setPref("test.bool", true, { branch: "default" });
+ equal(
+ userBranch.getBoolPref("test.bool"),
+ true,
+ "the default branch should change for bools"
+ );
+
+ defaultBranch.setBoolPref("test.bool", false);
+ userBranch.setBoolPref("test.bool", true);
+ equal(
+ userBranch.getBoolPref("test.bool"),
+ true,
+ "the precondition should hold"
+ );
+ PrefUtils.setPref("test.bool", null, { branch: "user" });
+ equal(
+ userBranch.getBoolPref("test.bool"),
+ false,
+ "setting the user branch to null should reveal the default value for bools"
+ );
+});
+
+add_task(function setPrefInvalidBranch() {
+ Assert.throws(
+ () => PrefUtils.setPref("test.pref", "value", { branch: "invalid" }),
+ PrefUtils.UnexpectedPreferenceBranch
+ );
+});
+
+add_task(function clearPrefClearsValues() {
+ const defaultBranch = Services.prefs.getDefaultBranch("");
+ const userBranch = Services.prefs;
+
+ defaultBranch.setStringPref("test.string", "default");
+ userBranch.setStringPref("test.string", "user");
+ equal(
+ userBranch.getStringPref("test.string"),
+ "user",
+ "the precondition should hold"
+ );
+ PrefUtils.clearPref("test.string");
+ equal(
+ userBranch.getStringPref("test.string"),
+ "default",
+ "clearing the user branch should reveal the default value for bools"
+ );
+
+ PrefUtils.clearPref("test.string", { branch: "default" });
+ equal(
+ userBranch.getStringPref("test.string"),
+ "default",
+ "clearing the default branch shouldn't do anything"
+ );
+});
+
+add_task(function clearPrefInvalidBranch() {
+ Assert.throws(
+ () => PrefUtils.clearPref("test.pref", { branch: "invalid" }),
+ PrefUtils.UnexpectedPreferenceBranch
+ );
+});
diff --git a/toolkit/components/normandy/test/unit/test_RecipeRunner.js b/toolkit/components/normandy/test/unit/test_RecipeRunner.js
new file mode 100644
index 0000000000..710ac4d507
--- /dev/null
+++ b/toolkit/components/normandy/test/unit/test_RecipeRunner.js
@@ -0,0 +1,34 @@
+/**
+ * Any copyright is dedicated to the Public Domain.
+ * http://creativecommons.org/publicdomain/zero/1.0/
+ */
+
+const { updateAppInfo } = ChromeUtils.importESModule(
+ "resource://testing-common/AppInfo.sys.mjs"
+);
+const { RecipeRunner } = ChromeUtils.importESModule(
+ "resource://normandy/lib/RecipeRunner.sys.mjs"
+);
+
+// Test that new build IDs trigger immediate recipe runs
+add_task(async () => {
+ updateAppInfo({
+ appBuildID: "new-build-id",
+ lastAppBuildID: "old-build-id",
+ });
+ const runStub = sinon.stub(RecipeRunner, "run");
+ const registerTimerStub = sinon.stub(RecipeRunner, "registerTimer");
+ sinon.stub(RecipeRunner, "watchPrefs");
+
+ Services.prefs.setBoolPref("app.normandy.first_run", false);
+
+ await RecipeRunner.init();
+ Assert.deepEqual(
+ runStub.args,
+ [[{ trigger: "newBuildID" }]],
+ "RecipeRunner.run is called immediately on a new build ID"
+ );
+ ok(registerTimerStub.called, "RecipeRunner.registerTimer registers a timer");
+
+ sinon.restore();
+});
diff --git a/toolkit/components/normandy/test/unit/test_addon_unenroll.js b/toolkit/components/normandy/test/unit/test_addon_unenroll.js
new file mode 100644
index 0000000000..98750fc976
--- /dev/null
+++ b/toolkit/components/normandy/test/unit/test_addon_unenroll.js
@@ -0,0 +1,310 @@
+const { AddonTestUtils } = ChromeUtils.importESModule(
+ "resource://testing-common/AddonTestUtils.sys.mjs"
+);
+const { ExtensionTestUtils } = ChromeUtils.importESModule(
+ "resource://testing-common/ExtensionXPCShellUtils.sys.mjs"
+);
+const { BranchedAddonStudyAction } = ChromeUtils.importESModule(
+ "resource://normandy/actions/BranchedAddonStudyAction.sys.mjs"
+);
+const { BaseAction } = ChromeUtils.importESModule(
+ "resource://normandy/actions/BaseAction.sys.mjs"
+);
+const { TelemetryEvents } = ChromeUtils.importESModule(
+ "resource://normandy/lib/TelemetryEvents.sys.mjs"
+);
+const { AddonManager } = ChromeUtils.importESModule(
+ "resource://gre/modules/AddonManager.sys.mjs"
+);
+const { AddonStudies } = ChromeUtils.importESModule(
+ "resource://normandy/lib/AddonStudies.sys.mjs"
+);
+const { PromiseUtils } = ChromeUtils.importESModule(
+ "resource://gre/modules/PromiseUtils.sys.mjs"
+);
+
+/* import-globals-from utils.js */
+load("utils.js");
+
+NormandyTestUtils.init({ add_task });
+const { decorate_task } = NormandyTestUtils;
+
+const global = this;
+
+add_task(async () => {
+ ExtensionTestUtils.init(global);
+ AddonTestUtils.init(global);
+ AddonTestUtils.createAppInfo(
+ "xpcshell@tests.mozilla.org",
+ "XPCShell",
+ "1",
+ "1.9.2"
+ );
+ AddonTestUtils.overrideCertDB();
+ await AddonTestUtils.promiseStartupManager();
+
+ TelemetryEvents.init();
+});
+
+decorate_task(
+ withMockApiServer(),
+ AddonStudies.withStudies([]),
+ async function test_addon_unenroll({ server: apiServer }) {
+ const ID = "study@tests.mozilla.org";
+
+ // Create a test extension that uses webextension experiments to install
+ // an unenroll listener.
+ let xpi = AddonTestUtils.createTempWebExtensionFile({
+ manifest: {
+ version: "1.0",
+
+ browser_specific_settings: {
+ gecko: { id: ID },
+ },
+
+ experiment_apis: {
+ study: {
+ schema: "schema.json",
+ parent: {
+ scopes: ["addon_parent"],
+ script: "api.js",
+ paths: [["study"]],
+ },
+ },
+ },
+ },
+
+ files: {
+ "schema.json": JSON.stringify([
+ {
+ namespace: "study",
+ events: [
+ {
+ name: "onStudyEnded",
+ type: "function",
+ },
+ ],
+ },
+ ]),
+
+ "api.js": () => {
+ // The code below is serialized into a file embedded in an extension.
+ // But by including it here as code, eslint can analyze it. However,
+ // this code runs in a different environment with different globals,
+ // the following two lines avoid false eslint warnings:
+ /* globals browser, ExtensionAPI */
+ /* eslint-disable-next-line no-shadow */
+ const { AddonStudies } = ChromeUtils.importESModule(
+ "resource://normandy/lib/AddonStudies.sys.mjs"
+ );
+ const { ExtensionCommon } = ChromeUtils.importESModule(
+ "resource://gre/modules/ExtensionCommon.sys.mjs"
+ );
+ this.study = class extends ExtensionAPI {
+ getAPI(context) {
+ return {
+ study: {
+ onStudyEnded: new ExtensionCommon.EventManager({
+ context,
+ name: "study.onStudyEnded",
+ register: fire => {
+ AddonStudies.addUnenrollListener(
+ this.extension.id,
+ reason => fire.sync(reason)
+ );
+ return () => {};
+ },
+ }).api(),
+ },
+ };
+ }
+ };
+ },
+ },
+
+ background() {
+ browser.study.onStudyEnded.addListener(reason => {
+ browser.test.sendMessage("got-event", reason);
+ return new Promise(resolve => {
+ browser.test.onMessage.addListener(resolve);
+ });
+ });
+ },
+ });
+
+ const server = AddonTestUtils.createHttpServer({ hosts: ["example.com"] });
+ server.registerFile("/study.xpi", xpi);
+
+ const API_ID = 999;
+ apiServer.registerPathHandler(
+ `/api/v1/extension/${API_ID}/`,
+ (request, response) => {
+ response.setStatusLine(request.httpVersion, 200, "OK");
+ response.write(
+ JSON.stringify({
+ id: API_ID,
+ name: "Addon Unenroll Fixture",
+ xpi: "http://example.com/study.xpi",
+ extension_id: ID,
+ version: "1.0",
+ hash: CryptoUtils.getFileHash(xpi, "sha256"),
+ hash_algorithm: "sha256",
+ })
+ );
+ }
+ );
+
+ // Begin by telling Normandy to install the test extension above
+ // that uses a webextension experiment to register a blocking callback
+ // to be invoked when the study ends.
+ let extension = ExtensionTestUtils.expectExtension(ID);
+
+ const RECIPE_ID = 1;
+ const UNENROLL_REASON = "test-ending";
+ let action = new BranchedAddonStudyAction();
+ await action.processRecipe(
+ {
+ id: RECIPE_ID,
+ type: "addon-study",
+ arguments: {
+ slug: "addon-unenroll-test",
+ userFacingDescription: "A recipe to test add-on unenrollment",
+ userFacingName: "Add-on Unenroll Test",
+ isEnrollmentPaused: false,
+ branches: [
+ {
+ ratio: 1,
+ slug: "only",
+ extensionApiId: API_ID,
+ },
+ ],
+ },
+ },
+ BaseAction.suitability.FILTER_MATCH
+ );
+
+ await extension.awaitStartup();
+
+ let addon = await AddonManager.getAddonByID(ID);
+ ok(addon, "Extension is installed");
+
+ // Tell Normandy to end the study, the extension event should be fired.
+ let unenrollPromise = action.unenroll(RECIPE_ID, UNENROLL_REASON);
+
+ let receivedReason = await extension.awaitMessage("got-event");
+ info("Got onStudyEnded event in extension");
+ equal(receivedReason, UNENROLL_REASON, "Unenroll reason should be passed");
+
+ // The extension has not yet finished its unenrollment tasks, so it
+ // should not yet be uninstalled.
+ addon = await AddonManager.getAddonByID(ID);
+ ok(addon, "Extension has not yet been uninstalled");
+
+ // Once the extension does resolve the promise returned from the
+ // event listener, the uninstall can proceed.
+ extension.sendMessage("resolve");
+ await unenrollPromise;
+
+ addon = await AddonManager.getAddonByID(ID);
+ equal(
+ addon,
+ null,
+ "After resolving studyEnded promise, extension is uninstalled"
+ );
+ }
+);
+
+/* Test that a broken unenroll listener doesn't stop the add-on from being removed */
+decorate_task(
+ withMockApiServer(),
+ AddonStudies.withStudies([]),
+ async function test_addon_unenroll({ server: apiServer }) {
+ const ID = "study@tests.mozilla.org";
+
+ // Create a dummy webextension
+ // an unenroll listener that throws an error.
+ let xpi = AddonTestUtils.createTempWebExtensionFile({
+ manifest: {
+ version: "1.0",
+
+ browser_specific_settings: {
+ gecko: { id: ID },
+ },
+ },
+ });
+
+ const server = AddonTestUtils.createHttpServer({ hosts: ["example.com"] });
+ server.registerFile("/study.xpi", xpi);
+
+ const API_ID = 999;
+ apiServer.registerPathHandler(
+ `/api/v1/extension/${API_ID}/`,
+ (request, response) => {
+ response.setStatusLine(request.httpVersion, 200, "OK");
+ response.write(
+ JSON.stringify({
+ id: API_ID,
+ name: "Addon Fixture",
+ xpi: "http://example.com/study.xpi",
+ extension_id: ID,
+ version: "1.0",
+ hash: CryptoUtils.getFileHash(xpi, "sha256"),
+ hash_algorithm: "sha256",
+ })
+ );
+ }
+ );
+
+ // Begin by telling Normandy to install the test extension above that uses a
+ // webextension experiment to register a callback when the study ends that
+ // throws an error.
+ let extension = ExtensionTestUtils.expectExtension(ID);
+
+ const RECIPE_ID = 1;
+ const UNENROLL_REASON = "test-ending";
+ let action = new BranchedAddonStudyAction();
+ await action.processRecipe(
+ {
+ id: RECIPE_ID,
+ type: "addon-study",
+ arguments: {
+ slug: "addon-unenroll-test",
+ userFacingDescription: "A recipe to test add-on unenrollment",
+ userFacingName: "Add-on Unenroll Test",
+ isEnrollmentPaused: false,
+ branches: [
+ {
+ ratio: 1,
+ slug: "only",
+ extensionApiId: API_ID,
+ },
+ ],
+ },
+ },
+ BaseAction.suitability.FILTER_MATCH
+ );
+
+ await extension.startupPromise;
+
+ let addon = await AddonManager.getAddonByID(ID);
+ ok(addon, "Extension is installed");
+
+ let listenerDeferred = PromiseUtils.defer();
+
+ AddonStudies.addUnenrollListener(ID, () => {
+ listenerDeferred.resolve();
+ throw new Error("This listener is busted");
+ });
+
+ // Tell Normandy to end the study, the extension event should be fired.
+ await action.unenroll(RECIPE_ID, UNENROLL_REASON);
+ await listenerDeferred;
+
+ addon = await AddonManager.getAddonByID(ID);
+ equal(
+ addon,
+ null,
+ "Extension is uninstalled even though it threw an exception in the callback"
+ );
+ }
+);
diff --git a/toolkit/components/normandy/test/unit/utils.js b/toolkit/components/normandy/test/unit/utils.js
new file mode 100644
index 0000000000..cffe634c91
--- /dev/null
+++ b/toolkit/components/normandy/test/unit/utils.js
@@ -0,0 +1,135 @@
+"use strict";
+/* eslint-disable no-unused-vars */
+
+// Loaded into the same scope as head_xpc.js
+/* import-globals-from head_xpc.js */
+
+const { Preferences } = ChromeUtils.importESModule(
+ "resource://gre/modules/Preferences.sys.mjs"
+);
+const { HttpServer } = ChromeUtils.import("resource://testing-common/httpd.js");
+
+const { NormandyApi } = ChromeUtils.importESModule(
+ "resource://normandy/lib/NormandyApi.sys.mjs"
+);
+const { NormandyTestUtils } = ChromeUtils.importESModule(
+ "resource://testing-common/NormandyTestUtils.sys.mjs"
+);
+
+const CryptoHash = Components.Constructor(
+ "@mozilla.org/security/hash;1",
+ "nsICryptoHash",
+ "initWithString"
+);
+const FileInputStream = Components.Constructor(
+ "@mozilla.org/network/file-input-stream;1",
+ "nsIFileInputStream",
+ "init"
+);
+
+class MockResponse {
+ constructor(content) {
+ this.content = content;
+ }
+
+ async text() {
+ return this.content;
+ }
+
+ async json() {
+ return JSON.parse(this.content);
+ }
+}
+
+function withServer(server) {
+ return function (testFunction) {
+ return NormandyTestUtils.decorate(
+ NormandyTestUtils.withMockPreferences(),
+ async function inner({ mockPreferences, ...args }) {
+ const serverUrl = `http://localhost:${server.identity.primaryPort}`;
+ mockPreferences.set("app.normandy.api_url", `${serverUrl}/api/v1`);
+ NormandyApi.clearIndexCache();
+
+ try {
+ await testFunction({ ...args, serverUrl, mockPreferences, server });
+ } finally {
+ await new Promise(resolve => server.stop(resolve));
+ }
+ }
+ );
+ };
+}
+
+function makeScriptServer(scriptPath) {
+ const server = new HttpServer();
+ server.registerContentType("sjs", "sjs");
+ server.registerFile("/", do_get_file(scriptPath));
+ server.start(-1);
+ return server;
+}
+
+function withScriptServer(scriptPath) {
+ return withServer(makeScriptServer(scriptPath));
+}
+
+function makeMockApiServer(directory) {
+ const server = new HttpServer();
+ server.registerDirectory("/", directory);
+
+ server.setIndexHandler(async function (request, response) {
+ response.processAsync();
+ const dir = request.getProperty("directory");
+ const index = dir.clone();
+ index.append("index.json");
+
+ if (!index.exists()) {
+ response.setStatusLine("1.1", 404, "Not Found");
+ response.write(`Cannot find path ${index.path}`);
+ response.finish();
+ return;
+ }
+
+ try {
+ const contents = await IOUtils.readUTF8(index.path);
+ response.write(contents);
+ } catch (e) {
+ response.setStatusLine("1.1", 500, "Server error");
+ response.write(e.toString());
+ } finally {
+ response.finish();
+ }
+ });
+
+ server.start(-1);
+ return server;
+}
+
+function withMockApiServer(apiName = "mock_api") {
+ return withServer(makeMockApiServer(do_get_file(apiName)));
+}
+
+const CryptoUtils = {
+ _getHashStringForCrypto(aCrypto) {
+ // return the two-digit hexadecimal code for a byte
+ let toHexString = charCode => ("0" + charCode.toString(16)).slice(-2);
+
+ // convert the binary hash data to a hex string.
+ let binary = aCrypto.finish(false);
+ let hash = Array.from(binary, c => toHexString(c.charCodeAt(0)));
+ return hash.join("").toLowerCase();
+ },
+
+ /**
+ * Get the computed hash for a given file
+ * @param {nsIFile} file The file to be hashed
+ * @param {string} [algorithm] The hashing algorithm to use
+ */
+ getFileHash(file, algorithm = "sha256") {
+ const crypto = CryptoHash(algorithm);
+ const fis = new FileInputStream(file, -1, -1, false);
+ crypto.updateFromStream(fis, file.fileSize);
+ const hash = this._getHashStringForCrypto(crypto);
+ fis.close();
+ return hash;
+ },
+};
diff --git a/toolkit/components/normandy/test/unit/xpcshell.ini b/toolkit/components/normandy/test/unit/xpcshell.ini
new file mode 100644
index 0000000000..e2ec476ce9
--- /dev/null
+++ b/toolkit/components/normandy/test/unit/xpcshell.ini
@@ -0,0 +1,17 @@
+[DEFAULT]
+head = head_xpc.js
+firefox-appdir = browser
+support-files =
+ mock_api/**
+ invalid_recipe_signature_api/**
+ query_server.sjs
+ echo_server.sjs
+ cookie_server.sjs
+ utils.js
+tags = normandy
+
+[test_Normandy.js]
+[test_PrefUtils.js]
+[test_addon_unenroll.js]
+[test_NormandyApi.js]
+[test_RecipeRunner.js]