From 8dd16259287f58f9273002717ec4d27e97127719 Mon Sep 17 00:00:00 2001 From: Daniel Baumann Date: Wed, 12 Jun 2024 07:43:14 +0200 Subject: Merging upstream version 127.0. Signed-off-by: Daniel Baumann --- .../components/backup/tests/browser/browser.toml | 7 + .../backup/tests/browser/browser_settings.js | 40 ++ browser/components/backup/tests/chrome/chrome.toml | 4 + .../backup/tests/chrome/test_backup_settings.html | 43 ++ .../backup/tests/marionette/http2-ca.pem | 18 + .../backup/tests/marionette/manifest.toml | 6 + .../backup/tests/marionette/test_backup.py | 713 +++++++++++++++++++++ .../backup/tests/xpcshell/data/test_xulstore.json | 1 - browser/components/backup/tests/xpcshell/head.js | 6 + .../tests/xpcshell/test_AddonsBackupResource.js | 416 ++++++++++++ .../backup/tests/xpcshell/test_BackupResource.js | 175 ++++- .../backup/tests/xpcshell/test_BackupService.js | 451 +++++++++++++ .../test_BackupService_takeMeasurements.js | 59 ++ .../tests/xpcshell/test_CookiesBackupResource.js | 142 ++++ .../test_CredentialsAndSecurityBackupResource.js | 215 +++++++ .../xpcshell/test_FormHistoryBackupResource.js | 146 +++++ .../tests/xpcshell/test_MiscDataBackupResource.js | 203 +++++- .../tests/xpcshell/test_PlacesBackupResource.js | 151 ++++- .../xpcshell/test_PreferencesBackupResource.js | 66 +- .../xpcshell/test_SessionStoreBackupResource.js | 209 ++++++ .../backup/tests/xpcshell/test_createBackup.js | 74 --- .../backup/tests/xpcshell/test_measurements.js | 577 ----------------- .../components/backup/tests/xpcshell/xpcshell.toml | 16 +- 23 files changed, 3070 insertions(+), 668 deletions(-) create mode 100644 browser/components/backup/tests/browser/browser.toml create mode 100644 browser/components/backup/tests/browser/browser_settings.js create mode 100644 browser/components/backup/tests/chrome/chrome.toml create mode 100644 browser/components/backup/tests/chrome/test_backup_settings.html create mode 100644 browser/components/backup/tests/marionette/http2-ca.pem create mode 100644 browser/components/backup/tests/marionette/manifest.toml create mode 100644 browser/components/backup/tests/marionette/test_backup.py create mode 100644 browser/components/backup/tests/xpcshell/test_AddonsBackupResource.js create mode 100644 browser/components/backup/tests/xpcshell/test_BackupService.js create mode 100644 browser/components/backup/tests/xpcshell/test_BackupService_takeMeasurements.js create mode 100644 browser/components/backup/tests/xpcshell/test_CookiesBackupResource.js create mode 100644 browser/components/backup/tests/xpcshell/test_CredentialsAndSecurityBackupResource.js create mode 100644 browser/components/backup/tests/xpcshell/test_FormHistoryBackupResource.js create mode 100644 browser/components/backup/tests/xpcshell/test_SessionStoreBackupResource.js delete mode 100644 browser/components/backup/tests/xpcshell/test_createBackup.js delete mode 100644 browser/components/backup/tests/xpcshell/test_measurements.js (limited to 'browser/components/backup/tests') diff --git a/browser/components/backup/tests/browser/browser.toml b/browser/components/backup/tests/browser/browser.toml new file mode 100644 index 0000000000..f222c3b825 --- /dev/null +++ b/browser/components/backup/tests/browser/browser.toml @@ -0,0 +1,7 @@ +[DEFAULT] +prefs = [ + "browser.backup.enabled=true", + "browser.backup.preferences.ui.enabled=true", +] + +["browser_settings.js"] diff --git a/browser/components/backup/tests/browser/browser_settings.js b/browser/components/backup/tests/browser/browser_settings.js new file mode 100644 index 0000000000..b33dbec7bd --- /dev/null +++ b/browser/components/backup/tests/browser/browser_settings.js @@ -0,0 +1,40 @@ +/* Any copyright is dedicated to the Public Domain. + https://creativecommons.org/publicdomain/zero/1.0/ */ + +"use strict"; + +/** + * Tests that the section for controlling backup in about:preferences is + * visible, but can also be hidden via a pref. + */ +add_task(async function test_preferences_visibility() { + await BrowserTestUtils.withNewTab("about:preferences", async browser => { + let backupSection = + browser.contentDocument.querySelector("#dataBackupGroup"); + Assert.ok(backupSection, "Found backup preferences section"); + + // Our mochitest-browser tests are configured to have the section visible + // by default. + Assert.ok( + BrowserTestUtils.isVisible(backupSection), + "Backup section is visible" + ); + }); + + await SpecialPowers.pushPrefEnv({ + set: [["browser.backup.preferences.ui.enabled", false]], + }); + + await BrowserTestUtils.withNewTab("about:preferences", async browser => { + let backupSection = + browser.contentDocument.querySelector("#dataBackupGroup"); + Assert.ok(backupSection, "Found backup preferences section"); + + Assert.ok( + BrowserTestUtils.isHidden(backupSection), + "Backup section is now hidden" + ); + }); + + await SpecialPowers.popPrefEnv(); +}); diff --git a/browser/components/backup/tests/chrome/chrome.toml b/browser/components/backup/tests/chrome/chrome.toml new file mode 100644 index 0000000000..b0c01b336f --- /dev/null +++ b/browser/components/backup/tests/chrome/chrome.toml @@ -0,0 +1,4 @@ +[DEFAULT] +skip-if = ["os == 'android'"] + +["test_backup_settings.html"] diff --git a/browser/components/backup/tests/chrome/test_backup_settings.html b/browser/components/backup/tests/chrome/test_backup_settings.html new file mode 100644 index 0000000000..3619f8a1f4 --- /dev/null +++ b/browser/components/backup/tests/chrome/test_backup_settings.html @@ -0,0 +1,43 @@ + + + + + Tests for the BackupSettings component + + + + + + + +

+ +

+
+
diff --git a/browser/components/backup/tests/marionette/http2-ca.pem b/browser/components/backup/tests/marionette/http2-ca.pem
new file mode 100644
index 0000000000..ef5a801720
--- /dev/null
+++ b/browser/components/backup/tests/marionette/http2-ca.pem
@@ -0,0 +1,18 @@
+-----BEGIN CERTIFICATE-----
+MIIC1DCCAbygAwIBAgIURZvN7yVqFNwThGHASoy1OlOGvOMwDQYJKoZIhvcNAQEL
+BQAwGTEXMBUGA1UEAwwOIEhUVFAyIFRlc3QgQ0EwIhgPMjAxNzAxMDEwMDAwMDBa
+GA8yMDI3MDEwMTAwMDAwMFowGTEXMBUGA1UEAwwOIEhUVFAyIFRlc3QgQ0EwggEi
+MA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQC6iFGoRI4W1kH9braIBjYQPTwT
+2erkNUq07PVoV2wke8HHJajg2B+9sZwGm24ahvJr4q9adWtqZHEIeqVap0WH9xzV
+JJwCfs1D/B5p0DggKZOrIMNJ5Nu5TMJrbA7tFYIP8X6taRqx0wI6iypB7qdw4A8N
+jf1mCyuwJJKkfbmIYXmQsVeQPdI7xeC4SB+oN9OIQ+8nFthVt2Zaqn4CkC86exCA
+BiTMHGyXrZZhW7filhLAdTGjDJHdtMr3/K0dJdMJ77kXDqdo4bN7LyJvaeO0ipVh
+He4m1iWdq5EITjbLHCQELL8Wiy/l8Y+ZFzG4s/5JI/pyUcQx1QOs2hgKNe2NAgMB
+AAGjEDAOMAwGA1UdEwQFMAMBAf8wDQYJKoZIhvcNAQELBQADggEBADyDiQnKjsvR
+NrOk0aqgJ8XgK/IgJXFLbAVivjBLwnJGEkwxrFtC14mpTrPuXw9AybhroMjinq4Y
+cNYTFuTE34k0fZEU8d60J/Tpfd1i0EB8+oUPuqOn+N29/LeHPAnkDJdOZye3w0U+
+StAI79WqUYQaKIG7qLnt60dQwBte12uvbuPaB3mREIfDXOKcjLBdZHL1waWjtzUX
+z2E91VIdpvJGfEfXC3fIe1uO9Jh/E9NVWci84+njkNsl+OyBfOJ8T+pV3SHfWedp
+Zbjwh6UTukIuc3mW0rS/qZOa2w3HQaO53BMbluo0w1+cscOepsATld2HHvSiHB+0
+K8SWFRHdBOU=
+-----END CERTIFICATE-----
diff --git a/browser/components/backup/tests/marionette/manifest.toml b/browser/components/backup/tests/marionette/manifest.toml
new file mode 100644
index 0000000000..2982adb693
--- /dev/null
+++ b/browser/components/backup/tests/marionette/manifest.toml
@@ -0,0 +1,6 @@
+[DEFAULT]
+run-if = ["buildapp == 'browser'"]
+prefs = ["browser.backup.enabled=true", "browser.backup.log=true"]
+
+["test_backup.py"]
+support-files = ["http2-ca.pem"]
diff --git a/browser/components/backup/tests/marionette/test_backup.py b/browser/components/backup/tests/marionette/test_backup.py
new file mode 100644
index 0000000000..3b11b50ae8
--- /dev/null
+++ b/browser/components/backup/tests/marionette/test_backup.py
@@ -0,0 +1,713 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import json
+import os
+import shutil
+import tempfile
+
+import mozfile
+from marionette_harness import MarionetteTestCase
+
+
+class BackupTest(MarionetteTestCase):
+    # This is the DB key that will be computed for the http2-ca.pem certificate
+    # that's included in a support-file for this test.
+    _cert_db_key = "AAAAAAAAAAAAAAAUAAAAG0Wbze8lahTcE4RhwEqMtTpThrzjMBkxFzAVBgNVBAMMDiBIVFRQMiBUZXN0IENB"
+
+    def setUp(self):
+        MarionetteTestCase.setUp(self)
+        # We need to quit the browser and restart with the browser.backup.log
+        # pref already set to true in order for it to be displayed.
+        self.marionette.quit()
+        self.marionette.instance.prefs = {
+            "browser.backup.log": True,
+        }
+        # Now restart the browser.
+        self.marionette.instance.switch_profile()
+        self.marionette.start_session()
+
+    def test_backup(self):
+        self.marionette.set_context("chrome")
+
+        self.add_test_cookie()
+        self.add_test_login()
+        self.add_test_certificate()
+        self.add_test_saved_address()
+        self.add_test_identity_credential()
+        self.add_test_form_history()
+        self.add_test_activity_stream_snippets_data()
+        self.add_test_protections_data()
+        self.add_test_bookmarks()
+        self.add_test_history()
+        self.add_test_preferences()
+        self.add_test_permissions()
+
+        resourceKeys = self.marionette.execute_script(
+            """
+          const DefaultBackupResources = ChromeUtils.importESModule("resource:///modules/backup/BackupResources.sys.mjs");
+          let resourceKeys = [];
+          for (const resourceName in DefaultBackupResources) {
+            let resource = DefaultBackupResources[resourceName];
+            resourceKeys.push(resource.key);
+          }
+          return resourceKeys;
+        """
+        )
+
+        originalStagingPath = self.marionette.execute_async_script(
+            """
+          const { BackupService } = ChromeUtils.importESModule("resource:///modules/backup/BackupService.sys.mjs");
+          let bs = BackupService.init();
+          if (!bs) {
+            throw new Error("Could not get initialized BackupService.");
+          }
+
+          let [outerResolve] = arguments;
+          (async () => {
+            let { stagingPath } = await bs.createBackup();
+            if (!stagingPath) {
+              throw new Error("Could not create backup.");
+            }
+            return stagingPath;
+          })().then(outerResolve);
+        """
+        )
+
+        # When we switch over to the recovered profile, the Marionette framework
+        # will blow away the profile directory of the one that we created the
+        # backup on, which ruins our ability to do postRecovery work, since
+        # that relies on the prior profile sticking around. We work around this
+        # by moving the staging folder we got back to the OS temporary
+        # directory, and telling the recovery method to use that instead of the
+        # one from the profile directory.
+        stagingPath = os.path.join(tempfile.gettempdir(), "staging-test")
+        # Delete the destination folder if it exists already
+        shutil.rmtree(stagingPath, ignore_errors=True)
+        shutil.move(originalStagingPath, stagingPath)
+
+        # First, ensure that the staging path exists
+        self.assertTrue(os.path.exists(stagingPath))
+        # Now, ensure that the backup-manifest.json file exists within it.
+        manifestPath = os.path.join(stagingPath, "backup-manifest.json")
+        self.assertTrue(os.path.exists(manifestPath))
+
+        # For now, we just do a cursory check to ensure that for the resources
+        # that are listed in the manifest as having been backed up, that we
+        # have at least one file in their respective staging directories.
+        # We don't check the contents of the files, just that they exist.
+
+        # Read the JSON manifest file
+        with open(manifestPath, "r") as f:
+            manifest = json.load(f)
+
+        # Ensure that the manifest has a "resources" key
+        self.assertIn("resources", manifest)
+        resources = manifest["resources"]
+        self.assertTrue(isinstance(resources, dict))
+        self.assertTrue(len(resources) > 0)
+
+        # We don't have encryption capabilities wired up yet, so we'll check
+        # that all default resources are represented in the manifest.
+        self.assertEqual(len(resources), len(resourceKeys))
+        for resourceKey in resourceKeys:
+            self.assertIn(resourceKey, resources)
+
+        # Iterate the resources dict keys
+        for resourceKey in resources:
+            print("Checking resource: %s" % resourceKey)
+            # Ensure that there are staging directories created for each
+            # resource that was backed up
+            resourceStagingDir = os.path.join(stagingPath, resourceKey)
+            self.assertTrue(os.path.exists(resourceStagingDir))
+
+        # Start a brand new profile, one without any of the data we created or
+        # backed up. This is the one that we'll be starting recovery from.
+        self.marionette.quit()
+        self.marionette.instance.profile = None
+        self.marionette.start_session()
+        self.marionette.set_context("chrome")
+
+        # Recover the created backup into a new profile directory. Also get out
+        # the client ID of this profile, because we're going to want to make
+        # sure that this client ID is inherited by the recovered profile.
+        [
+            newProfileName,
+            newProfilePath,
+            expectedClientID,
+        ] = self.marionette.execute_async_script(
+            """
+          const { ClientID } = ChromeUtils.importESModule("resource://gre/modules/ClientID.sys.mjs");
+          const { BackupService } = ChromeUtils.importESModule("resource:///modules/backup/BackupService.sys.mjs");
+          let bs = BackupService.get();
+          if (!bs) {
+            throw new Error("Could not get initialized BackupService.");
+          }
+
+          let [stagingPath, outerResolve] = arguments;
+          (async () => {
+            let newProfileRootPath = await IOUtils.createUniqueDirectory(
+              PathUtils.tempDir,
+              "recoverFromBackupTest-newProfileRoot"
+            );
+            let newProfile = await bs.recoverFromBackup(stagingPath, false, newProfileRootPath)
+            if (!newProfile) {
+              throw new Error("Could not create recovery profile.");
+            }
+
+            let expectedClientID = await ClientID.getClientID();
+
+            return [newProfile.name, newProfile.rootDir.path, expectedClientID];
+          })().then(outerResolve);
+        """,
+            script_args=[stagingPath],
+        )
+
+        print("Recovery name: %s" % newProfileName)
+        print("Recovery path: %s" % newProfilePath)
+        print("Expected clientID: %s" % expectedClientID)
+
+        self.marionette.quit()
+        originalProfile = self.marionette.instance.profile
+        self.marionette.instance.profile = newProfilePath
+        self.marionette.start_session()
+        self.marionette.set_context("chrome")
+
+        # Ensure that all postRecovery actions have completed.
+        self.marionette.execute_async_script(
+            """
+          const { BackupService } = ChromeUtils.importESModule("resource:///modules/backup/BackupService.sys.mjs");
+          let bs = BackupService.get();
+          if (!bs) {
+            throw new Error("Could not get initialized BackupService.");
+          }
+
+          let [outerResolve] = arguments;
+          (async () => {
+            await bs.postRecoveryComplete;
+          })().then(outerResolve);
+        """
+        )
+
+        self.verify_recovered_test_cookie()
+        self.verify_recovered_test_login()
+        self.verify_recovered_test_certificate()
+        self.verify_recovered_saved_address()
+        self.verify_recovered_identity_credential()
+        self.verify_recovered_form_history()
+        self.verify_recovered_activity_stream_snippets_data()
+        self.verify_recovered_protections_data()
+        self.verify_recovered_bookmarks()
+        self.verify_recovered_history()
+        self.verify_recovered_preferences()
+        self.verify_recovered_permissions()
+
+        # Now also ensure that the recovered profile inherited the client ID
+        # from the profile that initiated recovery.
+        recoveredClientID = self.marionette.execute_async_script(
+            """
+          const { ClientID } = ChromeUtils.importESModule("resource://gre/modules/ClientID.sys.mjs");
+          let [outerResolve] = arguments;
+          (async () => {
+            return ClientID.getClientID();
+          })().then(outerResolve);
+        """
+        )
+        self.assertEqual(recoveredClientID, expectedClientID)
+
+        # Try not to pollute the profile list by getting rid of the one we just
+        # created.
+        self.marionette.quit()
+        self.marionette.instance.profile = originalProfile
+        self.marionette.start_session()
+        self.marionette.set_context("chrome")
+        self.marionette.execute_script(
+            """
+          let newProfileName = arguments[0];
+          let profileSvc = Cc["@mozilla.org/toolkit/profile-service;1"].getService(
+            Ci.nsIToolkitProfileService
+          );
+          let profile = profileSvc.getProfileByName(newProfileName);
+          profile.remove(true);
+          profileSvc.flush();
+        """,
+            script_args=[newProfileName],
+        )
+
+        # Cleanup the staging path that we moved
+        mozfile.remove(stagingPath)
+
+    def add_test_cookie(self):
+        self.marionette.execute_async_script(
+            """
+          let [outerResolve] = arguments;
+          (async () => {
+            // We'll just add a single cookie, and then make sure that it shows
+            // up on the other side.
+            Services.cookies.removeAll();
+            Services.cookies.add(
+              ".example.com",
+              "/",
+              "first",
+              "one",
+              false,
+              false,
+              false,
+              Date.now() / 1000 + 1,
+              {},
+              Ci.nsICookie.SAMESITE_NONE,
+              Ci.nsICookie.SCHEME_HTTP
+            );
+          })().then(outerResolve);
+        """
+        )
+
+    def verify_recovered_test_cookie(self):
+        cookiesLength = self.marionette.execute_async_script(
+            """
+          let [outerResolve] = arguments;
+          (async () => {
+            let cookies = Services.cookies.getCookiesFromHost("example.com", {});
+            return cookies.length;
+          })().then(outerResolve);
+        """
+        )
+        self.assertEqual(cookiesLength, 1)
+
+    def add_test_login(self):
+        self.marionette.execute_async_script(
+            """
+          let [outerResolve] = arguments;
+          (async () => {
+            // Let's start with adding a single password
+            Services.logins.removeAllLogins();
+
+            const nsLoginInfo = new Components.Constructor(
+              "@mozilla.org/login-manager/loginInfo;1",
+              Ci.nsILoginInfo,
+              "init"
+            );
+
+            const login1 = new nsLoginInfo(
+              "https://example.com",
+              "https://example.com",
+              null,
+              "notifyu1",
+              "notifyp1",
+              "user",
+              "pass"
+            );
+            await Services.logins.addLoginAsync(login1);
+          })().then(outerResolve);
+        """
+        )
+
+    def verify_recovered_test_login(self):
+        loginsLength = self.marionette.execute_async_script(
+            """
+          let [outerResolve] = arguments;
+          (async () => {
+            let logins = await Services.logins.searchLoginsAsync({
+              origin: "https://example.com",
+            });
+            return logins.length;
+          })().then(outerResolve);
+        """
+        )
+        self.assertEqual(loginsLength, 1)
+
+    def add_test_certificate(self):
+        certPath = os.path.join(os.path.dirname(__file__), "http2-ca.pem")
+        self.marionette.execute_async_script(
+            """
+          let [certPath, certDbKey, outerResolve] = arguments;
+          (async () => {
+            const { NetUtil } = ChromeUtils.importESModule(
+              "resource://gre/modules/NetUtil.sys.mjs"
+            );
+
+            let certDb = Cc["@mozilla.org/security/x509certdb;1"].getService(
+              Ci.nsIX509CertDB
+            );
+
+            if (certDb.findCertByDBKey(certDbKey)) {
+              throw new Error("Should not have this certificate yet!");
+            }
+
+            let certFile = await IOUtils.getFile(certPath);
+            let fstream = Cc["@mozilla.org/network/file-input-stream;1"].createInstance(
+              Ci.nsIFileInputStream
+            );
+            fstream.init(certFile, -1, 0, 0);
+            let data = NetUtil.readInputStreamToString(fstream, fstream.available());
+            fstream.close();
+
+            let pem = data.replace(/-----BEGIN CERTIFICATE-----/, "")
+                          .replace(/-----END CERTIFICATE-----/, "")
+                          .replace(/[\\r\\n]/g, "");
+            let cert = certDb.addCertFromBase64(pem, "CTu,u,u");
+
+            if (cert.dbKey != certDbKey) {
+              throw new Error("The inserted certificate DB key is unexpected.");
+            }
+          })().then(outerResolve);
+        """,
+            script_args=[certPath, self._cert_db_key],
+        )
+
+    def verify_recovered_test_certificate(self):
+        certExists = self.marionette.execute_async_script(
+            """
+          let [certDbKey, outerResolve] = arguments;
+          (async () => {
+            let certDb = Cc["@mozilla.org/security/x509certdb;1"].getService(
+              Ci.nsIX509CertDB
+            );
+            return certDb.findCertByDBKey(certDbKey) != null;
+          })().then(outerResolve);
+        """,
+            script_args=[self._cert_db_key],
+        )
+        self.assertTrue(certExists)
+
+    def add_test_saved_address(self):
+        self.marionette.execute_async_script(
+            """
+          const { formAutofillStorage } = ChromeUtils.importESModule(
+            "resource://autofill/FormAutofillStorage.sys.mjs"
+          );
+
+          let [outerResolve] = arguments;
+          (async () => {
+            const TEST_ADDRESS_1 = {
+              "given-name": "John",
+              "additional-name": "R.",
+              "family-name": "Smith",
+              organization: "World Wide Web Consortium",
+              "street-address": "32 Vassar Street\\\nMIT Room 32-G524",
+              "address-level2": "Cambridge",
+              "address-level1": "MA",
+              "postal-code": "02139",
+              country: "US",
+              tel: "+15195555555",
+              email: "user@example.com",
+            };
+            await formAutofillStorage.initialize();
+            formAutofillStorage.addresses.removeAll();
+            await formAutofillStorage.addresses.add(TEST_ADDRESS_1);
+          })().then(outerResolve);
+        """
+        )
+
+    def verify_recovered_saved_address(self):
+        addressesLength = self.marionette.execute_async_script(
+            """
+          const { formAutofillStorage } = ChromeUtils.importESModule(
+            "resource://autofill/FormAutofillStorage.sys.mjs"
+          );
+
+          let [outerResolve] = arguments;
+          (async () => {
+            await formAutofillStorage.initialize();
+            let addresses = await formAutofillStorage.addresses.getAll();
+            return addresses.length;
+          })().then(outerResolve);
+        """
+        )
+        self.assertEqual(addressesLength, 1)
+
+    def add_test_identity_credential(self):
+        self.marionette.execute_async_script(
+            """
+          let [outerResolve] = arguments;
+          (async () => {
+            let service = Cc["@mozilla.org/browser/identity-credential-storage-service;1"]
+                            .getService(Ci.nsIIdentityCredentialStorageService);
+            service.clear();
+
+            let testPrincipal = Services.scriptSecurityManager.createContentPrincipal(
+              Services.io.newURI("https://test.com/"),
+              {}
+            );
+            let idpPrincipal = Services.scriptSecurityManager.createContentPrincipal(
+              Services.io.newURI("https://idp-test.com/"),
+              {}
+            );
+
+            service.setState(
+              testPrincipal,
+              idpPrincipal,
+              "ID",
+              true,
+              true
+            );
+
+          })().then(outerResolve);
+        """
+        )
+
+    def verify_recovered_identity_credential(self):
+        [registered, allowLogout] = self.marionette.execute_async_script(
+            """
+          let [outerResolve] = arguments;
+          (async () => {
+            let service = Cc["@mozilla.org/browser/identity-credential-storage-service;1"]
+                            .getService(Ci.nsIIdentityCredentialStorageService);
+
+            let testPrincipal = Services.scriptSecurityManager.createContentPrincipal(
+              Services.io.newURI("https://test.com/"),
+              {}
+            );
+            let idpPrincipal = Services.scriptSecurityManager.createContentPrincipal(
+              Services.io.newURI("https://idp-test.com/"),
+              {}
+            );
+
+            let registered = {};
+            let allowLogout = {};
+
+            service.getState(
+              testPrincipal,
+              idpPrincipal,
+              "ID",
+              registered,
+              allowLogout
+            );
+
+            return [registered.value, allowLogout.value];
+          })().then(outerResolve);
+        """
+        )
+        self.assertTrue(registered)
+        self.assertTrue(allowLogout)
+
+    def add_test_form_history(self):
+        self.marionette.execute_async_script(
+            """
+          const { FormHistory } = ChromeUtils.importESModule(
+            "resource://gre/modules/FormHistory.sys.mjs"
+          );
+
+          let [outerResolve] = arguments;
+          (async () => {
+            await FormHistory.update({
+              op: "add",
+              fieldname: "some-test-field",
+              value: "I was recovered!",
+              timesUsed: 1,
+              firstUsed: 0,
+              lastUsed: 0,
+            });
+
+          })().then(outerResolve);
+        """
+        )
+
+    def verify_recovered_form_history(self):
+        formHistoryResultsLength = self.marionette.execute_async_script(
+            """
+          const { FormHistory } = ChromeUtils.importESModule(
+            "resource://gre/modules/FormHistory.sys.mjs"
+          );
+
+          let [outerResolve] = arguments;
+          (async () => {
+            let results = await FormHistory.search(
+              ["guid"],
+              { fieldname: "some-test-field" }
+            );
+            return results.length;
+          })().then(outerResolve);
+        """
+        )
+        self.assertEqual(formHistoryResultsLength, 1)
+
+    def add_test_activity_stream_snippets_data(self):
+        self.marionette.execute_async_script(
+            """
+          const { ActivityStreamStorage } = ChromeUtils.importESModule(
+            "resource://activity-stream/lib/ActivityStreamStorage.sys.mjs",
+          );
+          const SNIPPETS_TABLE_NAME = "snippets";
+
+          let [outerResolve] = arguments;
+          (async () => {
+            let storage = new ActivityStreamStorage({
+              storeNames: [SNIPPETS_TABLE_NAME],
+            });
+            let snippetsTable = await storage.getDbTable(SNIPPETS_TABLE_NAME);
+            await snippetsTable.set("backup-test", "some-test-value");
+          })().then(outerResolve);
+        """
+        )
+
+    def verify_recovered_activity_stream_snippets_data(self):
+        snippetsResult = self.marionette.execute_async_script(
+            """
+          const { ActivityStreamStorage } = ChromeUtils.importESModule(
+            "resource://activity-stream/lib/ActivityStreamStorage.sys.mjs",
+          );
+          const SNIPPETS_TABLE_NAME = "snippets";
+
+          let [outerResolve] = arguments;
+          (async () => {
+            let storage = new ActivityStreamStorage({
+              storeNames: [SNIPPETS_TABLE_NAME],
+            });
+            let snippetsTable = await storage.getDbTable(SNIPPETS_TABLE_NAME);
+            return await snippetsTable.get("backup-test");
+          })().then(outerResolve);
+        """
+        )
+        self.assertEqual(snippetsResult, "some-test-value")
+
+    def add_test_protections_data(self):
+        self.marionette.execute_async_script(
+            """
+          const TrackingDBService = Cc["@mozilla.org/tracking-db-service;1"]
+                                      .getService(Ci.nsITrackingDBService);
+
+          let [outerResolve] = arguments;
+          (async () => {
+            let entry = {
+              "https://test.com": [
+                [Ci.nsIWebProgressListener.STATE_BLOCKED_TRACKING_CONTENT, true, 1],
+              ],
+            };
+            await TrackingDBService.clearAll();
+            await TrackingDBService.saveEvents(JSON.stringify(entry));
+          })().then(outerResolve);
+        """
+        )
+
+    def verify_recovered_protections_data(self):
+        eventsSum = self.marionette.execute_async_script(
+            """
+          const TrackingDBService = Cc["@mozilla.org/tracking-db-service;1"]
+                                      .getService(Ci.nsITrackingDBService);
+
+          let [outerResolve] = arguments;
+          (async () => {
+            return TrackingDBService.sumAllEvents();
+          })().then(outerResolve);
+        """
+        )
+        self.assertEqual(eventsSum, 1)
+
+    def add_test_bookmarks(self):
+        self.marionette.execute_async_script(
+            """
+          const { PlacesUtils } = ChromeUtils.importESModule(
+            "resource://gre/modules/PlacesUtils.sys.mjs"
+          );
+
+          let [outerResolve] = arguments;
+          (async () => {
+            await PlacesUtils.bookmarks.eraseEverything();
+            await PlacesUtils.bookmarks.insert({
+              parentGuid: PlacesUtils.bookmarks.toolbarGuid,
+              title: "Some test page",
+              url: Services.io.newURI("https://www.backup.test/"),
+            });
+          })().then(outerResolve);
+        """
+        )
+
+    def verify_recovered_bookmarks(self):
+        bookmarkExists = self.marionette.execute_async_script(
+            """
+          const { PlacesUtils } = ChromeUtils.importESModule(
+            "resource://gre/modules/PlacesUtils.sys.mjs"
+          );
+
+          let [outerResolve] = arguments;
+          (async () => {
+            let url = Services.io.newURI("https://www.backup.test/");
+            let bookmark = await PlacesUtils.bookmarks.fetch({ url });
+            return bookmark != null;
+          })().then(outerResolve);
+        """
+        )
+        self.assertTrue(bookmarkExists)
+
+    def add_test_history(self):
+        self.marionette.execute_async_script(
+            """
+          const { PlacesUtils } = ChromeUtils.importESModule(
+            "resource://gre/modules/PlacesUtils.sys.mjs"
+          );
+
+          let [outerResolve] = arguments;
+          (async () => {
+            await PlacesUtils.history.clear();
+
+            let entry = {
+              url: "http://my-restored-history.com",
+              visits: [{ transition: PlacesUtils.history.TRANSITION_LINK }],
+            };
+
+            await PlacesUtils.history.insertMany([entry]);
+          })().then(outerResolve);
+        """
+        )
+
+    def verify_recovered_history(self):
+        historyExists = self.marionette.execute_async_script(
+            """
+          const { PlacesUtils } = ChromeUtils.importESModule(
+            "resource://gre/modules/PlacesUtils.sys.mjs"
+          );
+
+          let [outerResolve] = arguments;
+          (async () => {
+            let entry = await PlacesUtils.history.fetch("http://my-restored-history.com");
+            return entry != null;
+          })().then(outerResolve);
+        """
+        )
+        self.assertTrue(historyExists)
+
+    def add_test_preferences(self):
+        self.marionette.execute_script(
+            """
+          Services.prefs.setBoolPref("test-pref-for-backup", true)
+        """
+        )
+
+    def verify_recovered_preferences(self):
+        prefExists = self.marionette.execute_script(
+            """
+          return Services.prefs.getBoolPref("test-pref-for-backup", false);
+        """
+        )
+        self.assertTrue(prefExists)
+
+    def add_test_permissions(self):
+        self.marionette.execute_script(
+            """
+          let principal = Services.scriptSecurityManager.createContentPrincipalFromOrigin(
+            "https://test-permission-site.com"
+          );
+          Services.perms.addFromPrincipal(
+            principal,
+            "desktop-notification",
+            Services.perms.ALLOW_ACTION
+          );
+        """
+        )
+
+    def verify_recovered_permissions(self):
+        permissionExists = self.marionette.execute_script(
+            """
+          let principal = Services.scriptSecurityManager.createContentPrincipalFromOrigin(
+            "https://test-permission-site.com"
+          );
+          let perms = Services.perms.getAllForPrincipal(principal);
+          if (perms.length != 1) {
+            throw new Error("Got an unexpected number of permissions");
+          }
+          return perms[0].type == "desktop-notification"
+        """
+        )
+        self.assertTrue(permissionExists)
diff --git a/browser/components/backup/tests/xpcshell/data/test_xulstore.json b/browser/components/backup/tests/xpcshell/data/test_xulstore.json
index 0d0890ab16..e4ae6f1f66 100644
--- a/browser/components/backup/tests/xpcshell/data/test_xulstore.json
+++ b/browser/components/backup/tests/xpcshell/data/test_xulstore.json
@@ -9,7 +9,6 @@
       "sizemode": "normal"
     },
     "sidebar-box": {
-      "sidebarcommand": "viewBookmarksSidebar",
       "width": "323",
       "style": "width: 323px;"
     },
diff --git a/browser/components/backup/tests/xpcshell/head.js b/browser/components/backup/tests/xpcshell/head.js
index 2402870a13..e5ed32fb63 100644
--- a/browser/components/backup/tests/xpcshell/head.js
+++ b/browser/components/backup/tests/xpcshell/head.js
@@ -50,6 +50,9 @@ class FakeBackupResource2 extends BackupResource {
   static get requiresEncryption() {
     return true;
   }
+  static get priority() {
+    return 1;
+  }
 }
 
 /**
@@ -62,6 +65,9 @@ class FakeBackupResource3 extends BackupResource {
   static get requiresEncryption() {
     return false;
   }
+  static get priority() {
+    return 2;
+  }
 }
 
 /**
diff --git a/browser/components/backup/tests/xpcshell/test_AddonsBackupResource.js b/browser/components/backup/tests/xpcshell/test_AddonsBackupResource.js
new file mode 100644
index 0000000000..d1c47ecdb0
--- /dev/null
+++ b/browser/components/backup/tests/xpcshell/test_AddonsBackupResource.js
@@ -0,0 +1,416 @@
+/* Any copyright is dedicated to the Public Domain.
+https://creativecommons.org/publicdomain/zero/1.0/ */
+
+"use strict";
+
+const { AddonsBackupResource } = ChromeUtils.importESModule(
+  "resource:///modules/backup/AddonsBackupResource.sys.mjs"
+);
+
+/**
+ * Tests that we can measure the size of all the addons & extensions data.
+ */
+add_task(async function test_measure() {
+  Services.fog.testResetFOG();
+  Services.telemetry.clearScalars();
+
+  const EXPECTED_KILOBYTES_FOR_EXTENSIONS_JSON = 250;
+  const EXPECTED_KILOBYTES_FOR_EXTENSIONS_STORE = 500;
+  const EXPECTED_KILOBYTES_FOR_STORAGE_SYNC = 50;
+  const EXPECTED_KILOBYTES_FOR_EXTENSIONS_XPI_A = 600;
+  const EXPECTED_KILOBYTES_FOR_EXTENSIONS_XPI_B = 400;
+  const EXPECTED_KILOBYTES_FOR_EXTENSIONS_XPI_C = 150;
+  const EXPECTED_KILOBYTES_FOR_EXTENSIONS_DIRECTORY = 1000;
+  const EXPECTED_KILOBYTES_FOR_EXTENSION_DATA = 100;
+  const EXPECTED_KILOBYTES_FOR_EXTENSIONS_STORAGE = 200;
+
+  let tempDir = PathUtils.tempDir;
+
+  // Create extensions json files (all the same size).
+  const extensionsFilePath = PathUtils.join(tempDir, "extensions.json");
+  await createKilobyteSizedFile(
+    extensionsFilePath,
+    EXPECTED_KILOBYTES_FOR_EXTENSIONS_JSON
+  );
+  const extensionSettingsFilePath = PathUtils.join(
+    tempDir,
+    "extension-settings.json"
+  );
+  await createKilobyteSizedFile(
+    extensionSettingsFilePath,
+    EXPECTED_KILOBYTES_FOR_EXTENSIONS_JSON
+  );
+  const extensionsPrefsFilePath = PathUtils.join(
+    tempDir,
+    "extension-preferences.json"
+  );
+  await createKilobyteSizedFile(
+    extensionsPrefsFilePath,
+    EXPECTED_KILOBYTES_FOR_EXTENSIONS_JSON
+  );
+  const addonStartupFilePath = PathUtils.join(tempDir, "addonStartup.json.lz4");
+  await createKilobyteSizedFile(
+    addonStartupFilePath,
+    EXPECTED_KILOBYTES_FOR_EXTENSIONS_JSON
+  );
+
+  // Create the extension store permissions data file.
+  let extensionStorePermissionsDataSize = PathUtils.join(
+    tempDir,
+    "extension-store-permissions",
+    "data.safe.bin"
+  );
+  await createKilobyteSizedFile(
+    extensionStorePermissionsDataSize,
+    EXPECTED_KILOBYTES_FOR_EXTENSIONS_STORE
+  );
+
+  // Create the storage sync database file.
+  let storageSyncPath = PathUtils.join(tempDir, "storage-sync-v2.sqlite");
+  await createKilobyteSizedFile(
+    storageSyncPath,
+    EXPECTED_KILOBYTES_FOR_STORAGE_SYNC
+  );
+
+  // Create the extensions directory with XPI files.
+  let extensionsXPIAPath = PathUtils.join(
+    tempDir,
+    "extensions",
+    "extension-b.xpi"
+  );
+  let extensionsXPIBPath = PathUtils.join(
+    tempDir,
+    "extensions",
+    "extension-a.xpi"
+  );
+  await createKilobyteSizedFile(
+    extensionsXPIAPath,
+    EXPECTED_KILOBYTES_FOR_EXTENSIONS_XPI_A
+  );
+  await createKilobyteSizedFile(
+    extensionsXPIBPath,
+    EXPECTED_KILOBYTES_FOR_EXTENSIONS_XPI_B
+  );
+  // Should be ignored.
+  let extensionsXPIStagedPath = PathUtils.join(
+    tempDir,
+    "extensions",
+    "staged",
+    "staged-test-extension.xpi"
+  );
+  let extensionsXPITrashPath = PathUtils.join(
+    tempDir,
+    "extensions",
+    "trash",
+    "trashed-test-extension.xpi"
+  );
+  let extensionsXPIUnpackedPath = PathUtils.join(
+    tempDir,
+    "extensions",
+    "unpacked-extension.xpi",
+    "manifest.json"
+  );
+  await createKilobyteSizedFile(
+    extensionsXPIStagedPath,
+    EXPECTED_KILOBYTES_FOR_EXTENSIONS_XPI_C
+  );
+  await createKilobyteSizedFile(
+    extensionsXPITrashPath,
+    EXPECTED_KILOBYTES_FOR_EXTENSIONS_XPI_C
+  );
+  await createKilobyteSizedFile(
+    extensionsXPIUnpackedPath,
+    EXPECTED_KILOBYTES_FOR_EXTENSIONS_XPI_C
+  );
+
+  // Create the browser extension data directory.
+  let browserExtensionDataPath = PathUtils.join(
+    tempDir,
+    "browser-extension-data",
+    "test-file"
+  );
+  await createKilobyteSizedFile(
+    browserExtensionDataPath,
+    EXPECTED_KILOBYTES_FOR_EXTENSION_DATA
+  );
+
+  // Create the extensions storage directory.
+  let extensionsStoragePath = PathUtils.join(
+    tempDir,
+    "storage",
+    "default",
+    "moz-extension+++test-extension-id",
+    "idb",
+    "data.sqlite"
+  );
+  // Other storage files that should not be counted.
+  let otherStoragePath = PathUtils.join(
+    tempDir,
+    "storage",
+    "default",
+    "https+++accounts.firefox.com",
+    "ls",
+    "data.sqlite"
+  );
+
+  await createKilobyteSizedFile(
+    extensionsStoragePath,
+    EXPECTED_KILOBYTES_FOR_EXTENSIONS_STORAGE
+  );
+  await createKilobyteSizedFile(
+    otherStoragePath,
+    EXPECTED_KILOBYTES_FOR_EXTENSIONS_STORAGE
+  );
+
+  // Measure all the extensions data.
+  let extensionsBackupResource = new AddonsBackupResource();
+  await extensionsBackupResource.measure(tempDir);
+
+  let extensionsJsonSizeMeasurement =
+    Glean.browserBackup.extensionsJsonSize.testGetValue();
+  Assert.equal(
+    extensionsJsonSizeMeasurement,
+    EXPECTED_KILOBYTES_FOR_EXTENSIONS_JSON * 4, // There are 4 equally sized files.
+    "Should have collected the correct measurement of the total size of all extensions JSON files"
+  );
+
+  let extensionStorePermissionsDataSizeMeasurement =
+    Glean.browserBackup.extensionStorePermissionsDataSize.testGetValue();
+  Assert.equal(
+    extensionStorePermissionsDataSizeMeasurement,
+    EXPECTED_KILOBYTES_FOR_EXTENSIONS_STORE,
+    "Should have collected the correct measurement of the size of the extension store permissions data"
+  );
+
+  let storageSyncSizeMeasurement =
+    Glean.browserBackup.storageSyncSize.testGetValue();
+  Assert.equal(
+    storageSyncSizeMeasurement,
+    EXPECTED_KILOBYTES_FOR_STORAGE_SYNC,
+    "Should have collected the correct measurement of the size of the storage sync database"
+  );
+
+  let extensionsXPIDirectorySizeMeasurement =
+    Glean.browserBackup.extensionsXpiDirectorySize.testGetValue();
+  Assert.equal(
+    extensionsXPIDirectorySizeMeasurement,
+    EXPECTED_KILOBYTES_FOR_EXTENSIONS_DIRECTORY,
+    "Should have collected the correct measurement of the size 2 equally sized XPI files in the extensions directory"
+  );
+
+  let browserExtensionDataSizeMeasurement =
+    Glean.browserBackup.browserExtensionDataSize.testGetValue();
+  Assert.equal(
+    browserExtensionDataSizeMeasurement,
+    EXPECTED_KILOBYTES_FOR_EXTENSION_DATA,
+    "Should have collected the correct measurement of the size of the browser extension data directory"
+  );
+
+  let extensionsStorageSizeMeasurement =
+    Glean.browserBackup.extensionsStorageSize.testGetValue();
+  Assert.equal(
+    extensionsStorageSizeMeasurement,
+    EXPECTED_KILOBYTES_FOR_EXTENSIONS_STORAGE,
+    "Should have collected the correct measurement of all the extensions storage"
+  );
+
+  // Compare glean vs telemetry measurements
+  let scalars = TelemetryTestUtils.getProcessScalars("parent", false, false);
+  TelemetryTestUtils.assertScalar(
+    scalars,
+    "browser.backup.extensions_json_size",
+    extensionsJsonSizeMeasurement,
+    "Glean and telemetry measurements for extensions JSON should be equal"
+  );
+  TelemetryTestUtils.assertScalar(
+    scalars,
+    "browser.backup.extension_store_permissions_data_size",
+    extensionStorePermissionsDataSizeMeasurement,
+    "Glean and telemetry measurements for extension store permissions data should be equal"
+  );
+  TelemetryTestUtils.assertScalar(
+    scalars,
+    "browser.backup.storage_sync_size",
+    storageSyncSizeMeasurement,
+    "Glean and telemetry measurements for storage sync database should be equal"
+  );
+  TelemetryTestUtils.assertScalar(
+    scalars,
+    "browser.backup.extensions_xpi_directory_size",
+    extensionsXPIDirectorySizeMeasurement,
+    "Glean and telemetry measurements for extensions directory should be equal"
+  );
+  TelemetryTestUtils.assertScalar(
+    scalars,
+    "browser.backup.browser_extension_data_size",
+    browserExtensionDataSizeMeasurement,
+    "Glean and telemetry measurements for browser extension data should be equal"
+  );
+  TelemetryTestUtils.assertScalar(
+    scalars,
+    "browser.backup.extensions_storage_size",
+    extensionsStorageSizeMeasurement,
+    "Glean and telemetry measurements for extensions storage should be equal"
+  );
+
+  await maybeRemovePath(tempDir);
+});
+
+/**
+ * Tests that we can handle the extension store permissions data
+ * and moz-extension IndexedDB databases not existing.
+ */
+add_task(async function test_measure_missing_data() {
+  Services.fog.testResetFOG();
+
+  let tempDir = PathUtils.tempDir;
+
+  let extensionsBackupResource = new AddonsBackupResource();
+  await extensionsBackupResource.measure(tempDir);
+
+  let extensionStorePermissionsDataSizeMeasurement =
+    Glean.browserBackup.extensionStorePermissionsDataSize.testGetValue();
+  Assert.equal(
+    extensionStorePermissionsDataSizeMeasurement,
+    null,
+    "Should NOT have collected a measurement for the missing permissions data"
+  );
+
+  let extensionsStorageSizeMeasurement =
+    Glean.browserBackup.extensionsStorageSize.testGetValue();
+  Assert.equal(
+    extensionsStorageSizeMeasurement,
+    null,
+    "Should NOT have collected a measurement for the missing storage data"
+  );
+});
+
+/**
+ * Test that the backup method correctly copies items from the profile directory
+ * into the staging directory.
+ */
+add_task(async function test_backup() {
+  let sandbox = sinon.createSandbox();
+
+  let addonsBackupResource = new AddonsBackupResource();
+  let sourcePath = await IOUtils.createUniqueDirectory(
+    PathUtils.tempDir,
+    "AddonsBackupResource-source-test"
+  );
+  let stagingPath = await IOUtils.createUniqueDirectory(
+    PathUtils.tempDir,
+    "AddonsBackupResource-staging-test"
+  );
+
+  const simpleCopyFiles = [
+    { path: "extensions.json" },
+    { path: "extension-settings.json" },
+    { path: "extension-preferences.json" },
+    { path: "addonStartup.json.lz4" },
+    {
+      path: [
+        "browser-extension-data",
+        "{11aa1234-f111-1234-abcd-a9b8c7654d32}",
+      ],
+    },
+    { path: ["extension-store-permissions", "data.safe.bin"] },
+    { path: ["extensions", "{11aa1234-f111-1234-abcd-a9b8c7654d32}.xpi"] },
+  ];
+  await createTestFiles(sourcePath, simpleCopyFiles);
+
+  const junkFiles = [{ path: ["extensions", "junk"] }];
+  await createTestFiles(sourcePath, junkFiles);
+
+  // Create a fake storage-sync-v2 database file. We don't expect this to
+  // be copied to the staging directory in this test due to our stubbing
+  // of the backup method, so we don't include it in `simpleCopyFiles`.
+  await createTestFiles(sourcePath, [{ path: "storage-sync-v2.sqlite" }]);
+
+  let fakeConnection = {
+    backup: sandbox.stub().resolves(true),
+    close: sandbox.stub().resolves(true),
+  };
+  sandbox.stub(Sqlite, "openConnection").returns(fakeConnection);
+
+  let manifestEntry = await addonsBackupResource.backup(
+    stagingPath,
+    sourcePath
+  );
+  Assert.equal(
+    manifestEntry,
+    null,
+    "AddonsBackupResource.backup should return null as its ManifestEntry"
+  );
+
+  await assertFilesExist(stagingPath, simpleCopyFiles);
+
+  let junkFile = PathUtils.join(stagingPath, "extensions", "junk");
+  Assert.equal(
+    await IOUtils.exists(junkFile),
+    false,
+    `${junkFile} should not exist in the staging folder`
+  );
+
+  // Make sure storage-sync-v2 database is backed up.
+  Assert.ok(
+    fakeConnection.backup.calledOnce,
+    "Called backup the expected number of times for all connections"
+  );
+  Assert.ok(
+    fakeConnection.backup.calledWith(
+      PathUtils.join(stagingPath, "storage-sync-v2.sqlite")
+    ),
+    "Called backup on the storage-sync-v2 Sqlite connection"
+  );
+
+  await maybeRemovePath(stagingPath);
+  await maybeRemovePath(sourcePath);
+
+  sandbox.restore();
+});
+
+/**
+ * Test that the recover method correctly copies items from the recovery
+ * directory into the destination profile directory.
+ */
+add_task(async function test_recover() {
+  let addonsBackupResource = new AddonsBackupResource();
+  let recoveryPath = await IOUtils.createUniqueDirectory(
+    PathUtils.tempDir,
+    "addonsBackupResource-recovery-test"
+  );
+  let destProfilePath = await IOUtils.createUniqueDirectory(
+    PathUtils.tempDir,
+    "addonsBackupResource-test-profile"
+  );
+
+  const files = [
+    { path: "extensions.json" },
+    { path: "extension-settings.json" },
+    { path: "extension-preferences.json" },
+    { path: "addonStartup.json.lz4" },
+    { path: "storage-sync-v2.sqlite" },
+    { path: ["browser-extension-data", "addon@darkreader.org.xpi", "data"] },
+    { path: ["extensions", "addon@darkreader.org.xpi"] },
+    { path: ["extension-store-permissions", "data.safe.bin"] },
+  ];
+  await createTestFiles(recoveryPath, files);
+
+  // The backup method is expected to have returned a null ManifestEntry
+  let postRecoveryEntry = await addonsBackupResource.recover(
+    null /* manifestEntry */,
+    recoveryPath,
+    destProfilePath
+  );
+  Assert.equal(
+    postRecoveryEntry,
+    null,
+    "AddonsBackupResource.recover should return null as its post " +
+      "recovery entry"
+  );
+
+  await assertFilesExist(destProfilePath, files);
+
+  await maybeRemovePath(recoveryPath);
+  await maybeRemovePath(destProfilePath);
+});
diff --git a/browser/components/backup/tests/xpcshell/test_BackupResource.js b/browser/components/backup/tests/xpcshell/test_BackupResource.js
index 6623f4cd77..42cda918f9 100644
--- a/browser/components/backup/tests/xpcshell/test_BackupResource.js
+++ b/browser/components/backup/tests/xpcshell/test_BackupResource.js
@@ -31,7 +31,8 @@ add_task(async function test_getFileSize() {
 });
 
 /**
- * Tests that BackupService.getDirectorySize will get the total size of all the files in a directory and it's children in kilobytes.
+ * Tests that BackupService.getDirectorySize will get the total size of all the
+ * files in a directory and it's children in kilobytes.
  */
 add_task(async function test_getDirectorySize() {
   let file = do_get_file("data/test_xulstore.json");
@@ -75,3 +76,175 @@ add_task(async function test_bytesToFuzzyKilobytes() {
 
   Assert.equal(smallSize, 1, "Sizes under 10 kilobytes return 1 kilobyte");
 });
+
+/**
+ * Tests that BackupResource.copySqliteDatabases will call `backup` on a new
+ * read-only connection on each database file.
+ */
+add_task(async function test_copySqliteDatabases() {
+  let sandbox = sinon.createSandbox();
+  const SQLITE_PAGES_PER_STEP_PREF = "browser.backup.sqlite.pages_per_step";
+  const SQLITE_STEP_DELAY_MS_PREF = "browser.backup.sqlite.step_delay_ms";
+  const DEFAULT_SQLITE_PAGES_PER_STEP = Services.prefs.getIntPref(
+    SQLITE_PAGES_PER_STEP_PREF
+  );
+  const DEFAULT_SQLITE_STEP_DELAY_MS = Services.prefs.getIntPref(
+    SQLITE_STEP_DELAY_MS_PREF
+  );
+
+  let sourcePath = await IOUtils.createUniqueDirectory(
+    PathUtils.tempDir,
+    "BackupResource-source-test"
+  );
+  let destPath = await IOUtils.createUniqueDirectory(
+    PathUtils.tempDir,
+    "BackupResource-dest-test"
+  );
+  let pretendDatabases = ["places.sqlite", "favicons.sqlite"];
+  await createTestFiles(
+    sourcePath,
+    pretendDatabases.map(f => ({ path: f }))
+  );
+
+  let fakeConnection = {
+    backup: sandbox.stub().resolves(true),
+    close: sandbox.stub().resolves(true),
+  };
+  sandbox.stub(Sqlite, "openConnection").returns(fakeConnection);
+
+  await BackupResource.copySqliteDatabases(
+    sourcePath,
+    destPath,
+    pretendDatabases
+  );
+
+  Assert.ok(
+    Sqlite.openConnection.calledTwice,
+    "Sqlite.openConnection called twice"
+  );
+  Assert.ok(
+    Sqlite.openConnection.firstCall.calledWith({
+      path: PathUtils.join(sourcePath, "places.sqlite"),
+      readOnly: true,
+    }),
+    "openConnection called with places.sqlite as read-only"
+  );
+  Assert.ok(
+    Sqlite.openConnection.secondCall.calledWith({
+      path: PathUtils.join(sourcePath, "favicons.sqlite"),
+      readOnly: true,
+    }),
+    "openConnection called with favicons.sqlite as read-only"
+  );
+
+  Assert.ok(
+    fakeConnection.backup.calledTwice,
+    "backup on an Sqlite connection called twice"
+  );
+  Assert.ok(
+    fakeConnection.backup.firstCall.calledWith(
+      PathUtils.join(destPath, "places.sqlite"),
+      DEFAULT_SQLITE_PAGES_PER_STEP,
+      DEFAULT_SQLITE_STEP_DELAY_MS
+    ),
+    "backup called with places.sqlite to the destination path with the right " +
+      "pages per step and step delay"
+  );
+  Assert.ok(
+    fakeConnection.backup.secondCall.calledWith(
+      PathUtils.join(destPath, "favicons.sqlite"),
+      DEFAULT_SQLITE_PAGES_PER_STEP,
+      DEFAULT_SQLITE_STEP_DELAY_MS
+    ),
+    "backup called with favicons.sqlite to the destination path with the " +
+      "right pages per step and step delay"
+  );
+
+  Assert.ok(
+    fakeConnection.close.calledTwice,
+    "close on an Sqlite connection called twice"
+  );
+
+  // Now check that we can override the default pages per step and step delay.
+  fakeConnection.backup.resetHistory();
+  const NEW_SQLITE_PAGES_PER_STEP = 10;
+  const NEW_SQLITE_STEP_DELAY_MS = 500;
+  Services.prefs.setIntPref(
+    SQLITE_PAGES_PER_STEP_PREF,
+    NEW_SQLITE_PAGES_PER_STEP
+  );
+  Services.prefs.setIntPref(
+    SQLITE_STEP_DELAY_MS_PREF,
+    NEW_SQLITE_STEP_DELAY_MS
+  );
+  await BackupResource.copySqliteDatabases(
+    sourcePath,
+    destPath,
+    pretendDatabases
+  );
+  Assert.ok(
+    fakeConnection.backup.calledTwice,
+    "backup on an Sqlite connection called twice"
+  );
+  Assert.ok(
+    fakeConnection.backup.firstCall.calledWith(
+      PathUtils.join(destPath, "places.sqlite"),
+      NEW_SQLITE_PAGES_PER_STEP,
+      NEW_SQLITE_STEP_DELAY_MS
+    ),
+    "backup called with places.sqlite to the destination path with the right " +
+      "pages per step and step delay"
+  );
+  Assert.ok(
+    fakeConnection.backup.secondCall.calledWith(
+      PathUtils.join(destPath, "favicons.sqlite"),
+      NEW_SQLITE_PAGES_PER_STEP,
+      NEW_SQLITE_STEP_DELAY_MS
+    ),
+    "backup called with favicons.sqlite to the destination path with the " +
+      "right pages per step and step delay"
+  );
+
+  await maybeRemovePath(sourcePath);
+  await maybeRemovePath(destPath);
+  sandbox.restore();
+});
+
+/**
+ * Tests that BackupResource.copyFiles will copy files from one directory to
+ * another.
+ */
+add_task(async function test_copyFiles() {
+  let sourcePath = await IOUtils.createUniqueDirectory(
+    PathUtils.tempDir,
+    "BackupResource-source-test"
+  );
+  let destPath = await IOUtils.createUniqueDirectory(
+    PathUtils.tempDir,
+    "BackupResource-dest-test"
+  );
+
+  const testFiles = [
+    { path: "file1.txt" },
+    { path: ["some", "nested", "file", "file2.txt"] },
+    { path: "file3.txt" },
+  ];
+
+  await createTestFiles(sourcePath, testFiles);
+
+  await BackupResource.copyFiles(sourcePath, destPath, [
+    "file1.txt",
+    "some",
+    "file3.txt",
+    "does-not-exist.txt",
+  ]);
+
+  await assertFilesExist(destPath, testFiles);
+  Assert.ok(
+    !(await IOUtils.exists(PathUtils.join(destPath, "does-not-exist.txt"))),
+    "does-not-exist.txt wasn't somehow written to."
+  );
+
+  await maybeRemovePath(sourcePath);
+  await maybeRemovePath(destPath);
+});
diff --git a/browser/components/backup/tests/xpcshell/test_BackupService.js b/browser/components/backup/tests/xpcshell/test_BackupService.js
new file mode 100644
index 0000000000..33fb9fbb99
--- /dev/null
+++ b/browser/components/backup/tests/xpcshell/test_BackupService.js
@@ -0,0 +1,451 @@
+/* Any copyright is dedicated to the Public Domain.
+https://creativecommons.org/publicdomain/zero/1.0/ */
+
+"use strict";
+
+const { AppConstants } = ChromeUtils.importESModule(
+  "resource://gre/modules/AppConstants.sys.mjs"
+);
+const { JsonSchemaValidator } = ChromeUtils.importESModule(
+  "resource://gre/modules/components-utils/JsonSchemaValidator.sys.mjs"
+);
+const { UIState } = ChromeUtils.importESModule(
+  "resource://services-sync/UIState.sys.mjs"
+);
+const { ClientID } = ChromeUtils.importESModule(
+  "resource://gre/modules/ClientID.sys.mjs"
+);
+
+add_setup(function () {
+  // Much of this setup is copied from toolkit/profile/xpcshell/head.js. It is
+  // needed in order to put the xpcshell test environment into the state where
+  // it thinks its profile is the one pointed at by
+  // nsIToolkitProfileService.currentProfile.
+  let gProfD = do_get_profile();
+  let gDataHome = gProfD.clone();
+  gDataHome.append("data");
+  gDataHome.createUnique(Ci.nsIFile.DIRECTORY_TYPE, 0o755);
+  let gDataHomeLocal = gProfD.clone();
+  gDataHomeLocal.append("local");
+  gDataHomeLocal.createUnique(Ci.nsIFile.DIRECTORY_TYPE, 0o755);
+
+  let xreDirProvider = Cc["@mozilla.org/xre/directory-provider;1"].getService(
+    Ci.nsIXREDirProvider
+  );
+  xreDirProvider.setUserDataDirectory(gDataHome, false);
+  xreDirProvider.setUserDataDirectory(gDataHomeLocal, true);
+
+  let profileSvc = Cc["@mozilla.org/toolkit/profile-service;1"].getService(
+    Ci.nsIToolkitProfileService
+  );
+
+  let createdProfile = {};
+  let didCreate = profileSvc.selectStartupProfile(
+    ["xpcshell"],
+    false,
+    AppConstants.UPDATE_CHANNEL,
+    "",
+    {},
+    {},
+    createdProfile
+  );
+  Assert.ok(didCreate, "Created a testing profile and set it to current.");
+  Assert.equal(
+    profileSvc.currentProfile,
+    createdProfile.value,
+    "Profile set to current"
+  );
+});
+
+/**
+ * A utility function for testing BackupService.createBackup. This helper
+ * function:
+ *
+ * 1. Ensures that `backup` will be called on BackupResources with the service
+ * 2. Ensures that a backup-manifest.json will be written and contain the
+ *    ManifestEntry data returned by each BackupResource.
+ * 3. Ensures that a `staging` folder will be written to and renamed properly
+ *    once the backup creation is complete.
+ *
+ * Once this is done, a task function can be run. The task function is passed
+ * the parsed backup-manifest.json object as its only argument.
+ *
+ * @param {object} sandbox
+ *   The Sinon sandbox to be used stubs and mocks. The test using this helper
+ *   is responsible for creating and resetting this sandbox.
+ * @param {Function} taskFn
+ *   A function that is run once all default checks are done on the manifest
+ *   and staging folder. After this function returns, the staging folder will
+ *   be cleaned up.
+ * @returns {Promise}
+ */
+async function testCreateBackupHelper(sandbox, taskFn) {
+  const EXPECTED_CLIENT_ID = await ClientID.getClientID();
+
+  let fake1ManifestEntry = { fake1: "hello from 1" };
+  sandbox
+    .stub(FakeBackupResource1.prototype, "backup")
+    .resolves(fake1ManifestEntry);
+
+  sandbox
+    .stub(FakeBackupResource2.prototype, "backup")
+    .rejects(new Error("Some failure to backup"));
+
+  let fake3ManifestEntry = { fake3: "hello from 3" };
+  sandbox
+    .stub(FakeBackupResource3.prototype, "backup")
+    .resolves(fake3ManifestEntry);
+
+  let bs = new BackupService({
+    FakeBackupResource1,
+    FakeBackupResource2,
+    FakeBackupResource3,
+  });
+
+  let fakeProfilePath = await IOUtils.createUniqueDirectory(
+    PathUtils.tempDir,
+    "createBackupTest"
+  );
+
+  await bs.createBackup({ profilePath: fakeProfilePath });
+
+  // We expect the staging folder to exist then be renamed under the fakeProfilePath.
+  // We should also find a folder for each fake BackupResource.
+  let backupsFolderPath = PathUtils.join(fakeProfilePath, "backups");
+  let stagingPath = PathUtils.join(backupsFolderPath, "staging");
+
+  // For now, we expect a single backup only to be saved.
+  let backups = await IOUtils.getChildren(backupsFolderPath);
+  Assert.equal(
+    backups.length,
+    1,
+    "There should only be 1 backup in the backups folder"
+  );
+
+  let renamedFilename = await PathUtils.filename(backups[0]);
+  let expectedFormatRegex = /^\d{4}(-\d{2}){2}T(\d{2}-){2}\d{2}Z$/;
+  Assert.ok(
+    renamedFilename.match(expectedFormatRegex),
+    "Renamed staging folder should have format YYYY-MM-DDTHH-mm-ssZ"
+  );
+
+  let stagingPathRenamed = PathUtils.join(backupsFolderPath, renamedFilename);
+
+  for (let backupResourceClass of [
+    FakeBackupResource1,
+    FakeBackupResource2,
+    FakeBackupResource3,
+  ]) {
+    let expectedResourceFolderBeforeRename = PathUtils.join(
+      stagingPath,
+      backupResourceClass.key
+    );
+    let expectedResourceFolderAfterRename = PathUtils.join(
+      stagingPathRenamed,
+      backupResourceClass.key
+    );
+
+    Assert.ok(
+      await IOUtils.exists(expectedResourceFolderAfterRename),
+      `BackupResource folder exists for ${backupResourceClass.key} after rename`
+    );
+    Assert.ok(
+      backupResourceClass.prototype.backup.calledOnce,
+      `Backup was called for ${backupResourceClass.key}`
+    );
+    Assert.ok(
+      backupResourceClass.prototype.backup.calledWith(
+        expectedResourceFolderBeforeRename,
+        fakeProfilePath
+      ),
+      `Backup was called in the staging folder for ${backupResourceClass.key} before rename`
+    );
+  }
+
+  // Check that resources were called from highest to lowest backup priority.
+  sinon.assert.callOrder(
+    FakeBackupResource3.prototype.backup,
+    FakeBackupResource2.prototype.backup,
+    FakeBackupResource1.prototype.backup
+  );
+
+  let manifestPath = PathUtils.join(
+    stagingPathRenamed,
+    BackupService.MANIFEST_FILE_NAME
+  );
+
+  Assert.ok(await IOUtils.exists(manifestPath), "Manifest file exists");
+  let manifest = await IOUtils.readJSON(manifestPath);
+
+  let schema = await BackupService.MANIFEST_SCHEMA;
+  let validationResult = JsonSchemaValidator.validate(manifest, schema);
+  Assert.ok(validationResult.valid, "Schema matches manifest");
+  Assert.deepEqual(
+    Object.keys(manifest.resources).sort(),
+    ["fake1", "fake3"],
+    "Manifest contains all expected BackupResource keys"
+  );
+  Assert.deepEqual(
+    manifest.resources.fake1,
+    fake1ManifestEntry,
+    "Manifest contains the expected entry for FakeBackupResource1"
+  );
+  Assert.deepEqual(
+    manifest.resources.fake3,
+    fake3ManifestEntry,
+    "Manifest contains the expected entry for FakeBackupResource3"
+  );
+  Assert.equal(
+    manifest.meta.legacyClientID,
+    EXPECTED_CLIENT_ID,
+    "The client ID was stored properly."
+  );
+
+  taskFn(manifest);
+
+  // After createBackup is more fleshed out, we're going to want to make sure
+  // that we're writing the manifest file and that it contains the expected
+  // ManifestEntry objects, and that the staging folder was successfully
+  // renamed with the current date.
+  await IOUtils.remove(fakeProfilePath, { recursive: true });
+}
+
+/**
+ * Tests that calling BackupService.createBackup will call backup on each
+ * registered BackupResource, and that each BackupResource will have a folder
+ * created for them to write into. Tests in the signed-out state.
+ */
+add_task(async function test_createBackup_signed_out() {
+  let sandbox = sinon.createSandbox();
+
+  sandbox
+    .stub(UIState, "get")
+    .returns({ status: UIState.STATUS_NOT_CONFIGURED });
+  await testCreateBackupHelper(sandbox, manifest => {
+    Assert.equal(
+      manifest.meta.accountID,
+      undefined,
+      "Account ID should be undefined."
+    );
+    Assert.equal(
+      manifest.meta.accountEmail,
+      undefined,
+      "Account email should be undefined."
+    );
+  });
+
+  sandbox.restore();
+});
+
+/**
+ * Tests that calling BackupService.createBackup will call backup on each
+ * registered BackupResource, and that each BackupResource will have a folder
+ * created for them to write into. Tests in the signed-in state.
+ */
+add_task(async function test_createBackup_signed_in() {
+  let sandbox = sinon.createSandbox();
+
+  const TEST_UID = "ThisIsMyTestUID";
+  const TEST_EMAIL = "foxy@mozilla.org";
+
+  sandbox.stub(UIState, "get").returns({
+    status: UIState.STATUS_SIGNED_IN,
+    uid: TEST_UID,
+    email: TEST_EMAIL,
+  });
+
+  await testCreateBackupHelper(sandbox, manifest => {
+    Assert.equal(
+      manifest.meta.accountID,
+      TEST_UID,
+      "Account ID should be set properly."
+    );
+    Assert.equal(
+      manifest.meta.accountEmail,
+      TEST_EMAIL,
+      "Account email should be set properly."
+    );
+  });
+
+  sandbox.restore();
+});
+
+/**
+ * Creates a directory that looks a lot like a decompressed backup archive,
+ * and then tests that BackupService.recoverFromBackup can create a new profile
+ * and recover into it.
+ */
+add_task(async function test_recoverFromBackup() {
+  let sandbox = sinon.createSandbox();
+  let fakeEntryMap = new Map();
+  let backupResourceClasses = [
+    FakeBackupResource1,
+    FakeBackupResource2,
+    FakeBackupResource3,
+  ];
+
+  let i = 1;
+  for (let backupResourceClass of backupResourceClasses) {
+    let fakeManifestEntry = { [`fake${i}`]: `hello from backup - ${i}` };
+    sandbox
+      .stub(backupResourceClass.prototype, "backup")
+      .resolves(fakeManifestEntry);
+
+    let fakePostRecoveryEntry = { [`fake${i}`]: `hello from recover - ${i}` };
+    sandbox
+      .stub(backupResourceClass.prototype, "recover")
+      .resolves(fakePostRecoveryEntry);
+
+    fakeEntryMap.set(backupResourceClass, {
+      manifestEntry: fakeManifestEntry,
+      postRecoveryEntry: fakePostRecoveryEntry,
+    });
+
+    ++i;
+  }
+
+  let bs = new BackupService({
+    FakeBackupResource1,
+    FakeBackupResource2,
+    FakeBackupResource3,
+  });
+
+  let oldProfilePath = await IOUtils.createUniqueDirectory(
+    PathUtils.tempDir,
+    "recoverFromBackupTest"
+  );
+  let newProfileRootPath = await IOUtils.createUniqueDirectory(
+    PathUtils.tempDir,
+    "recoverFromBackupTest-newProfileRoot"
+  );
+
+  let { stagingPath } = await bs.createBackup({ profilePath: oldProfilePath });
+
+  let testTelemetryStateObject = {
+    clientID: "ed209123-04a1-04a1-04a1-c0ffeec0ffee",
+  };
+  await IOUtils.writeJSON(
+    PathUtils.join(PathUtils.profileDir, "datareporting", "state.json"),
+    testTelemetryStateObject
+  );
+
+  let profile = await bs.recoverFromBackup(
+    stagingPath,
+    false /* shouldLaunch */,
+    newProfileRootPath
+  );
+  Assert.ok(profile, "An nsIToolkitProfile was created.");
+  let newProfilePath = profile.rootDir.path;
+
+  let postRecoveryFilePath = PathUtils.join(
+    newProfilePath,
+    "post-recovery.json"
+  );
+  let postRecovery = await IOUtils.readJSON(postRecoveryFilePath);
+
+  for (let backupResourceClass of backupResourceClasses) {
+    let expectedResourceFolder = PathUtils.join(
+      stagingPath,
+      backupResourceClass.key
+    );
+
+    let { manifestEntry, postRecoveryEntry } =
+      fakeEntryMap.get(backupResourceClass);
+
+    Assert.ok(
+      backupResourceClass.prototype.recover.calledOnce,
+      `Recover was called for ${backupResourceClass.key}`
+    );
+    Assert.ok(
+      backupResourceClass.prototype.recover.calledWith(
+        manifestEntry,
+        expectedResourceFolder,
+        newProfilePath
+      ),
+      `Recover was passed the right arguments for ${backupResourceClass.key}`
+    );
+    Assert.deepEqual(
+      postRecoveryEntry,
+      postRecovery[backupResourceClass.key],
+      "The post recovery data is as expected"
+    );
+  }
+
+  let newProfileTelemetryStateObject = await IOUtils.readJSON(
+    PathUtils.join(newProfileRootPath, "datareporting", "state.json")
+  );
+  Assert.deepEqual(
+    testTelemetryStateObject,
+    newProfileTelemetryStateObject,
+    "Recovered profile inherited telemetry state from the profile that " +
+      "initiated recovery"
+  );
+
+  await IOUtils.remove(oldProfilePath, { recursive: true });
+  await IOUtils.remove(newProfileRootPath, { recursive: true });
+  sandbox.restore();
+});
+
+/**
+ * Tests that if there's a post-recovery.json file in the profile directory
+ * when checkForPostRecovery() is called, that it is processed, and the
+ * postRecovery methods on the associated BackupResources are called with the
+ * entry values from the file.
+ */
+add_task(async function test_checkForPostRecovery() {
+  let sandbox = sinon.createSandbox();
+
+  let testProfilePath = await IOUtils.createUniqueDirectory(
+    PathUtils.tempDir,
+    "checkForPostRecoveryTest"
+  );
+  let fakePostRecoveryObject = {
+    [FakeBackupResource1.key]: "test 1",
+    [FakeBackupResource3.key]: "test 3",
+  };
+  await IOUtils.writeJSON(
+    PathUtils.join(testProfilePath, BackupService.POST_RECOVERY_FILE_NAME),
+    fakePostRecoveryObject
+  );
+
+  sandbox.stub(FakeBackupResource1.prototype, "postRecovery").resolves();
+  sandbox.stub(FakeBackupResource2.prototype, "postRecovery").resolves();
+  sandbox.stub(FakeBackupResource3.prototype, "postRecovery").resolves();
+
+  let bs = new BackupService({
+    FakeBackupResource1,
+    FakeBackupResource2,
+    FakeBackupResource3,
+  });
+
+  await bs.checkForPostRecovery(testProfilePath);
+  await bs.postRecoveryComplete;
+
+  Assert.ok(
+    FakeBackupResource1.prototype.postRecovery.calledOnce,
+    "FakeBackupResource1.postRecovery was called once"
+  );
+  Assert.ok(
+    FakeBackupResource2.prototype.postRecovery.notCalled,
+    "FakeBackupResource2.postRecovery was not called"
+  );
+  Assert.ok(
+    FakeBackupResource3.prototype.postRecovery.calledOnce,
+    "FakeBackupResource3.postRecovery was called once"
+  );
+  Assert.ok(
+    FakeBackupResource1.prototype.postRecovery.calledWith(
+      fakePostRecoveryObject[FakeBackupResource1.key]
+    ),
+    "FakeBackupResource1.postRecovery was called with the expected argument"
+  );
+  Assert.ok(
+    FakeBackupResource3.prototype.postRecovery.calledWith(
+      fakePostRecoveryObject[FakeBackupResource3.key]
+    ),
+    "FakeBackupResource3.postRecovery was called with the expected argument"
+  );
+
+  await IOUtils.remove(testProfilePath, { recursive: true });
+  sandbox.restore();
+});
diff --git a/browser/components/backup/tests/xpcshell/test_BackupService_takeMeasurements.js b/browser/components/backup/tests/xpcshell/test_BackupService_takeMeasurements.js
new file mode 100644
index 0000000000..c73482dfe6
--- /dev/null
+++ b/browser/components/backup/tests/xpcshell/test_BackupService_takeMeasurements.js
@@ -0,0 +1,59 @@
+/* Any copyright is dedicated to the Public Domain.
+http://creativecommons.org/publicdomain/zero/1.0/ */
+
+"use strict";
+
+add_setup(() => {
+  // FOG needs to be initialized in order for data to flow.
+  Services.fog.initializeFOG();
+  Services.telemetry.clearScalars();
+});
+
+/**
+ * Tests that calling `BackupService.takeMeasurements` will call the measure
+ * method of all registered BackupResource classes.
+ */
+add_task(async function test_takeMeasurements() {
+  let sandbox = sinon.createSandbox();
+  sandbox.stub(FakeBackupResource1.prototype, "measure").resolves();
+  sandbox
+    .stub(FakeBackupResource2.prototype, "measure")
+    .rejects(new Error("Some failure to measure"));
+
+  let bs = new BackupService({ FakeBackupResource1, FakeBackupResource2 });
+  await bs.takeMeasurements();
+
+  for (let backupResourceClass of [FakeBackupResource1, FakeBackupResource2]) {
+    Assert.ok(
+      backupResourceClass.prototype.measure.calledOnce,
+      "Measure was called"
+    );
+    Assert.ok(
+      backupResourceClass.prototype.measure.calledWith(PathUtils.profileDir),
+      "Measure was called with the profile directory argument"
+    );
+  }
+
+  sandbox.restore();
+});
+
+/**
+ * Tests that we can measure the disk space available in the profile directory.
+ */
+add_task(async function test_profDDiskSpace() {
+  let bs = new BackupService();
+  await bs.takeMeasurements();
+  let measurement = Glean.browserBackup.profDDiskSpace.testGetValue();
+  TelemetryTestUtils.assertScalar(
+    TelemetryTestUtils.getProcessScalars("parent", false, true),
+    "browser.backup.prof_d_disk_space",
+    measurement
+  );
+
+  Assert.greater(
+    measurement,
+    0,
+    "Should have collected a measurement for the profile directory storage " +
+      "device"
+  );
+});
diff --git a/browser/components/backup/tests/xpcshell/test_CookiesBackupResource.js b/browser/components/backup/tests/xpcshell/test_CookiesBackupResource.js
new file mode 100644
index 0000000000..1690580437
--- /dev/null
+++ b/browser/components/backup/tests/xpcshell/test_CookiesBackupResource.js
@@ -0,0 +1,142 @@
+/* Any copyright is dedicated to the Public Domain.
+https://creativecommons.org/publicdomain/zero/1.0/ */
+
+"use strict";
+
+const { CookiesBackupResource } = ChromeUtils.importESModule(
+  "resource:///modules/backup/CookiesBackupResource.sys.mjs"
+);
+
+/**
+ * Tests that we can measure the Cookies db in a profile directory.
+ */
+add_task(async function test_measure() {
+  const EXPECTED_COOKIES_DB_SIZE = 1230;
+
+  Services.fog.testResetFOG();
+
+  // Create resource files in temporary directory
+  let tempDir = PathUtils.tempDir;
+  let tempCookiesDBPath = PathUtils.join(tempDir, "cookies.sqlite");
+  await createKilobyteSizedFile(tempCookiesDBPath, EXPECTED_COOKIES_DB_SIZE);
+
+  let cookiesBackupResource = new CookiesBackupResource();
+  await cookiesBackupResource.measure(tempDir);
+
+  let cookiesMeasurement = Glean.browserBackup.cookiesSize.testGetValue();
+  let scalars = TelemetryTestUtils.getProcessScalars("parent", false, false);
+
+  // Compare glean vs telemetry measurements
+  TelemetryTestUtils.assertScalar(
+    scalars,
+    "browser.backup.cookies_size",
+    cookiesMeasurement,
+    "Glean and telemetry measurements for cookies.sqlite should be equal"
+  );
+
+  // Compare glean measurements vs actual file sizes
+  Assert.equal(
+    cookiesMeasurement,
+    EXPECTED_COOKIES_DB_SIZE,
+    "Should have collected the correct glean measurement for cookies.sqlite"
+  );
+
+  await maybeRemovePath(tempCookiesDBPath);
+});
+
+/**
+ * Test that the backup method correctly copies items from the profile directory
+ * into the staging directory.
+ */
+add_task(async function test_backup() {
+  let sandbox = sinon.createSandbox();
+
+  let cookiesBackupResource = new CookiesBackupResource();
+  let sourcePath = await IOUtils.createUniqueDirectory(
+    PathUtils.tempDir,
+    "CookiesBackupResource-source-test"
+  );
+  let stagingPath = await IOUtils.createUniqueDirectory(
+    PathUtils.tempDir,
+    "CookiesBackupResource-staging-test"
+  );
+
+  // Make sure this file exists in the source directory, otherwise
+  // BackupResource will skip attempting to back it up.
+  await createTestFiles(sourcePath, [{ path: "cookies.sqlite" }]);
+
+  // We have no need to test that Sqlite.sys.mjs's backup method is working -
+  // this is something that is tested in Sqlite's own tests. We can just make
+  // sure that it's being called using sinon. Unfortunately, we cannot do the
+  // same thing with IOUtils.copy, as its methods are not stubbable.
+  let fakeConnection = {
+    backup: sandbox.stub().resolves(true),
+    close: sandbox.stub().resolves(true),
+  };
+  sandbox.stub(Sqlite, "openConnection").returns(fakeConnection);
+
+  let manifestEntry = await cookiesBackupResource.backup(
+    stagingPath,
+    sourcePath
+  );
+  Assert.equal(
+    manifestEntry,
+    null,
+    "CookiesBackupResource.backup should return null as its ManifestEntry"
+  );
+
+  // Next, we'll make sure that the Sqlite connection had `backup` called on it
+  // with the right arguments.
+  Assert.ok(
+    fakeConnection.backup.calledOnce,
+    "Called backup the expected number of times for all connections"
+  );
+  Assert.ok(
+    fakeConnection.backup.calledWith(
+      PathUtils.join(stagingPath, "cookies.sqlite")
+    ),
+    "Called backup on the cookies.sqlite Sqlite connection"
+  );
+
+  await maybeRemovePath(stagingPath);
+  await maybeRemovePath(sourcePath);
+
+  sandbox.restore();
+});
+
+/**
+ * Test that the recover method correctly copies items from the recovery
+ * directory into the destination profile directory.
+ */
+add_task(async function test_recover() {
+  let cookiesBackupResource = new CookiesBackupResource();
+  let recoveryPath = await IOUtils.createUniqueDirectory(
+    PathUtils.tempDir,
+    "CookiesBackupResource-recovery-test"
+  );
+  let destProfilePath = await IOUtils.createUniqueDirectory(
+    PathUtils.tempDir,
+    "CookiesBackupResource-test-profile"
+  );
+
+  const simpleCopyFiles = [{ path: "cookies.sqlite" }];
+  await createTestFiles(recoveryPath, simpleCopyFiles);
+
+  // The backup method is expected to have returned a null ManifestEntry
+  let postRecoveryEntry = await cookiesBackupResource.recover(
+    null /* manifestEntry */,
+    recoveryPath,
+    destProfilePath
+  );
+  Assert.equal(
+    postRecoveryEntry,
+    null,
+    "CookiesBackupResource.recover should return null as its post " +
+      "recovery entry"
+  );
+
+  await assertFilesExist(destProfilePath, simpleCopyFiles);
+
+  await maybeRemovePath(recoveryPath);
+  await maybeRemovePath(destProfilePath);
+});
diff --git a/browser/components/backup/tests/xpcshell/test_CredentialsAndSecurityBackupResource.js b/browser/components/backup/tests/xpcshell/test_CredentialsAndSecurityBackupResource.js
new file mode 100644
index 0000000000..f53fec8d3f
--- /dev/null
+++ b/browser/components/backup/tests/xpcshell/test_CredentialsAndSecurityBackupResource.js
@@ -0,0 +1,215 @@
+/* Any copyright is dedicated to the Public Domain.
+https://creativecommons.org/publicdomain/zero/1.0/ */
+
+"use strict";
+
+const { CredentialsAndSecurityBackupResource } = ChromeUtils.importESModule(
+  "resource:///modules/backup/CredentialsAndSecurityBackupResource.sys.mjs"
+);
+
+/**
+ * Tests that we can measure credentials related files in the profile directory.
+ */
+add_task(async function test_measure() {
+  Services.fog.testResetFOG();
+
+  const EXPECTED_CREDENTIALS_KILOBYTES_SIZE = 413;
+  const EXPECTED_SECURITY_KILOBYTES_SIZE = 231;
+
+  // Create resource files in temporary directory
+  const tempDir = await IOUtils.createUniqueDirectory(
+    PathUtils.tempDir,
+    "CredentialsAndSecurityBackupResource-measurement-test"
+  );
+
+  const mockFiles = [
+    // Set up credentials files
+    { path: "key4.db", sizeInKB: 300 },
+    { path: "logins.json", sizeInKB: 1 },
+    { path: "logins-backup.json", sizeInKB: 1 },
+    { path: "autofill-profiles.json", sizeInKB: 1 },
+    { path: "credentialstate.sqlite", sizeInKB: 100 },
+    { path: "signedInUser.json", sizeInKB: 5 },
+    // Set up security files
+    { path: "cert9.db", sizeInKB: 230 },
+    { path: "pkcs11.txt", sizeInKB: 1 },
+  ];
+
+  await createTestFiles(tempDir, mockFiles);
+
+  let credentialsAndSecurityBackupResource =
+    new CredentialsAndSecurityBackupResource();
+  await credentialsAndSecurityBackupResource.measure(tempDir);
+
+  let credentialsMeasurement =
+    Glean.browserBackup.credentialsDataSize.testGetValue();
+  let securityMeasurement = Glean.browserBackup.securityDataSize.testGetValue();
+  let scalars = TelemetryTestUtils.getProcessScalars("parent", false, false);
+
+  // Credentials measurements
+  TelemetryTestUtils.assertScalar(
+    scalars,
+    "browser.backup.credentials_data_size",
+    credentialsMeasurement,
+    "Glean and telemetry measurements for credentials data should be equal"
+  );
+
+  Assert.equal(
+    credentialsMeasurement,
+    EXPECTED_CREDENTIALS_KILOBYTES_SIZE,
+    "Should have collected the correct glean measurement for credentials files"
+  );
+
+  // Security measurements
+  TelemetryTestUtils.assertScalar(
+    scalars,
+    "browser.backup.security_data_size",
+    securityMeasurement,
+    "Glean and telemetry measurements for security data should be equal"
+  );
+  Assert.equal(
+    securityMeasurement,
+    EXPECTED_SECURITY_KILOBYTES_SIZE,
+    "Should have collected the correct glean measurement for security files"
+  );
+
+  // Cleanup
+  await maybeRemovePath(tempDir);
+});
+
+/**
+ * Test that the backup method correctly copies items from the profile directory
+ * into the staging directory.
+ */
+add_task(async function test_backup() {
+  let sandbox = sinon.createSandbox();
+
+  let credentialsAndSecurityBackupResource =
+    new CredentialsAndSecurityBackupResource();
+  let sourcePath = await IOUtils.createUniqueDirectory(
+    PathUtils.tempDir,
+    "CredentialsAndSecurityBackupResource-source-test"
+  );
+  let stagingPath = await IOUtils.createUniqueDirectory(
+    PathUtils.tempDir,
+    "CredentialsAndSecurityBackupResource-staging-test"
+  );
+
+  const simpleCopyFiles = [
+    { path: "logins.json", sizeInKB: 1 },
+    { path: "logins-backup.json", sizeInKB: 1 },
+    { path: "autofill-profiles.json", sizeInKB: 1 },
+    { path: "signedInUser.json", sizeInKB: 5 },
+    { path: "pkcs11.txt", sizeInKB: 1 },
+  ];
+  await createTestFiles(sourcePath, simpleCopyFiles);
+
+  // Create our fake database files. We don't expect these to be copied to the
+  // staging directory in this test due to our stubbing of the backup method, so
+  // we don't include it in `simpleCopyFiles`.
+  await createTestFiles(sourcePath, [
+    { path: "cert9.db" },
+    { path: "key4.db" },
+    { path: "credentialstate.sqlite" },
+  ]);
+
+  // We have no need to test that Sqlite.sys.mjs's backup method is working -
+  // this is something that is tested in Sqlite's own tests. We can just make
+  // sure that it's being called using sinon. Unfortunately, we cannot do the
+  // same thing with IOUtils.copy, as its methods are not stubbable.
+  let fakeConnection = {
+    backup: sandbox.stub().resolves(true),
+    close: sandbox.stub().resolves(true),
+  };
+  sandbox.stub(Sqlite, "openConnection").returns(fakeConnection);
+
+  let manifestEntry = await credentialsAndSecurityBackupResource.backup(
+    stagingPath,
+    sourcePath
+  );
+
+  Assert.equal(
+    manifestEntry,
+    null,
+    "CredentialsAndSecurityBackupResource.backup should return null as its ManifestEntry"
+  );
+
+  await assertFilesExist(stagingPath, simpleCopyFiles);
+
+  // Next, we'll make sure that the Sqlite connection had `backup` called on it
+  // with the right arguments.
+  Assert.ok(
+    fakeConnection.backup.calledThrice,
+    "Called backup the expected number of times for all connections"
+  );
+  Assert.ok(
+    fakeConnection.backup.firstCall.calledWith(
+      PathUtils.join(stagingPath, "cert9.db")
+    ),
+    "Called backup on cert9.db connection first"
+  );
+  Assert.ok(
+    fakeConnection.backup.secondCall.calledWith(
+      PathUtils.join(stagingPath, "key4.db")
+    ),
+    "Called backup on key4.db connection second"
+  );
+  Assert.ok(
+    fakeConnection.backup.thirdCall.calledWith(
+      PathUtils.join(stagingPath, "credentialstate.sqlite")
+    ),
+    "Called backup on credentialstate.sqlite connection third"
+  );
+
+  await maybeRemovePath(stagingPath);
+  await maybeRemovePath(sourcePath);
+
+  sandbox.restore();
+});
+
+/**
+ * Test that the recover method correctly copies items from the recovery
+ * directory into the destination profile directory.
+ */
+add_task(async function test_recover() {
+  let credentialsAndSecurityBackupResource =
+    new CredentialsAndSecurityBackupResource();
+  let recoveryPath = await IOUtils.createUniqueDirectory(
+    PathUtils.tempDir,
+    "CredentialsAndSecurityBackupResource-recovery-test"
+  );
+  let destProfilePath = await IOUtils.createUniqueDirectory(
+    PathUtils.tempDir,
+    "CredentialsAndSecurityBackupResource-test-profile"
+  );
+
+  const files = [
+    { path: "logins.json" },
+    { path: "logins-backup.json" },
+    { path: "autofill-profiles.json" },
+    { path: "credentialstate.sqlite" },
+    { path: "signedInUser.json" },
+    { path: "cert9.db" },
+    { path: "key4.db" },
+    { path: "pkcs11.txt" },
+  ];
+  await createTestFiles(recoveryPath, files);
+
+  // The backup method is expected to have returned a null ManifestEntry
+  let postRecoveryEntry = await credentialsAndSecurityBackupResource.recover(
+    null /* manifestEntry */,
+    recoveryPath,
+    destProfilePath
+  );
+  Assert.equal(
+    postRecoveryEntry,
+    null,
+    "CredentialsAndSecurityBackupResource.recover should return null as its post " +
+      "recovery entry"
+  );
+
+  await assertFilesExist(destProfilePath, files);
+
+  await maybeRemovePath(recoveryPath);
+  await maybeRemovePath(destProfilePath);
+});
diff --git a/browser/components/backup/tests/xpcshell/test_FormHistoryBackupResource.js b/browser/components/backup/tests/xpcshell/test_FormHistoryBackupResource.js
new file mode 100644
index 0000000000..93434daa9c
--- /dev/null
+++ b/browser/components/backup/tests/xpcshell/test_FormHistoryBackupResource.js
@@ -0,0 +1,146 @@
+/* Any copyright is dedicated to the Public Domain.
+https://creativecommons.org/publicdomain/zero/1.0/ */
+
+"use strict";
+
+const { FormHistoryBackupResource } = ChromeUtils.importESModule(
+  "resource:///modules/backup/FormHistoryBackupResource.sys.mjs"
+);
+
+/**
+ * Tests that we can measure the Form History db in a profile directory.
+ */
+add_task(async function test_measure() {
+  const EXPECTED_FORM_HISTORY_DB_SIZE = 500;
+
+  Services.fog.testResetFOG();
+
+  // Create resource files in temporary directory
+  let tempDir = PathUtils.tempDir;
+  let tempFormHistoryDBPath = PathUtils.join(tempDir, "formhistory.sqlite");
+  await createKilobyteSizedFile(
+    tempFormHistoryDBPath,
+    EXPECTED_FORM_HISTORY_DB_SIZE
+  );
+
+  let formHistoryBackupResource = new FormHistoryBackupResource();
+  await formHistoryBackupResource.measure(tempDir);
+
+  let formHistoryMeasurement =
+    Glean.browserBackup.formHistorySize.testGetValue();
+  let scalars = TelemetryTestUtils.getProcessScalars("parent", false, false);
+
+  // Compare glean vs telemetry measurements
+  TelemetryTestUtils.assertScalar(
+    scalars,
+    "browser.backup.form_history_size",
+    formHistoryMeasurement,
+    "Glean and telemetry measurements for formhistory.sqlite should be equal"
+  );
+
+  // Compare glean measurements vs actual file sizes
+  Assert.equal(
+    formHistoryMeasurement,
+    EXPECTED_FORM_HISTORY_DB_SIZE,
+    "Should have collected the correct glean measurement for formhistory.sqlite"
+  );
+
+  await IOUtils.remove(tempFormHistoryDBPath);
+});
+
+/**
+ * Test that the backup method correctly copies items from the profile directory
+ * into the staging directory.
+ */
+add_task(async function test_backup() {
+  let sandbox = sinon.createSandbox();
+
+  let formHistoryBackupResource = new FormHistoryBackupResource();
+  let sourcePath = await IOUtils.createUniqueDirectory(
+    PathUtils.tempDir,
+    "FormHistoryBackupResource-source-test"
+  );
+  let stagingPath = await IOUtils.createUniqueDirectory(
+    PathUtils.tempDir,
+    "FormHistoryBackupResource-staging-test"
+  );
+
+  // Make sure this file exists in the source directory, otherwise
+  // BackupResource will skip attempting to back it up.
+  await createTestFiles(sourcePath, [{ path: "formhistory.sqlite" }]);
+
+  // We have no need to test that Sqlite.sys.mjs's backup method is working -
+  // this is something that is tested in Sqlite's own tests. We can just make
+  // sure that it's being called using sinon. Unfortunately, we cannot do the
+  // same thing with IOUtils.copy, as its methods are not stubbable.
+  let fakeConnection = {
+    backup: sandbox.stub().resolves(true),
+    close: sandbox.stub().resolves(true),
+  };
+  sandbox.stub(Sqlite, "openConnection").returns(fakeConnection);
+
+  let manifestEntry = await formHistoryBackupResource.backup(
+    stagingPath,
+    sourcePath
+  );
+  Assert.equal(
+    manifestEntry,
+    null,
+    "FormHistoryBackupResource.backup should return null as its ManifestEntry"
+  );
+
+  // Next, we'll make sure that the Sqlite connection had `backup` called on it
+  // with the right arguments.
+  Assert.ok(
+    fakeConnection.backup.calledOnce,
+    "Called backup the expected number of times for all connections"
+  );
+  Assert.ok(
+    fakeConnection.backup.calledWith(
+      PathUtils.join(stagingPath, "formhistory.sqlite")
+    ),
+    "Called backup on the formhistory.sqlite Sqlite connection"
+  );
+
+  await maybeRemovePath(stagingPath);
+  await maybeRemovePath(sourcePath);
+
+  sandbox.restore();
+});
+
+/**
+ * Test that the recover method correctly copies items from the recovery
+ * directory into the destination profile directory.
+ */
+add_task(async function test_recover() {
+  let formHistoryBackupResource = new FormHistoryBackupResource();
+  let recoveryPath = await IOUtils.createUniqueDirectory(
+    PathUtils.tempDir,
+    "FormHistoryBackupResource-recovery-test"
+  );
+  let destProfilePath = await IOUtils.createUniqueDirectory(
+    PathUtils.tempDir,
+    "FormHistoryBackupResource-test-profile"
+  );
+
+  const simpleCopyFiles = [{ path: "formhistory.sqlite" }];
+  await createTestFiles(recoveryPath, simpleCopyFiles);
+
+  // The backup method is expected to have returned a null ManifestEntry
+  let postRecoveryEntry = await formHistoryBackupResource.recover(
+    null /* manifestEntry */,
+    recoveryPath,
+    destProfilePath
+  );
+  Assert.equal(
+    postRecoveryEntry,
+    null,
+    "FormHistoryBackupResource.recover should return null as its post " +
+      "recovery entry"
+  );
+
+  await assertFilesExist(destProfilePath, simpleCopyFiles);
+
+  await maybeRemovePath(recoveryPath);
+  await maybeRemovePath(destProfilePath);
+});
diff --git a/browser/components/backup/tests/xpcshell/test_MiscDataBackupResource.js b/browser/components/backup/tests/xpcshell/test_MiscDataBackupResource.js
index e57dd50cd3..ab63b65332 100644
--- a/browser/components/backup/tests/xpcshell/test_MiscDataBackupResource.js
+++ b/browser/components/backup/tests/xpcshell/test_MiscDataBackupResource.js
@@ -7,20 +7,27 @@ const { MiscDataBackupResource } = ChromeUtils.importESModule(
   "resource:///modules/backup/MiscDataBackupResource.sys.mjs"
 );
 
+const { ActivityStreamStorage } = ChromeUtils.importESModule(
+  "resource://activity-stream/lib/ActivityStreamStorage.sys.mjs"
+);
+
+const { ProfileAge } = ChromeUtils.importESModule(
+  "resource://gre/modules/ProfileAge.sys.mjs"
+);
+
 /**
  * Tests that we can measure miscellaneous files in the profile directory.
  */
 add_task(async function test_measure() {
   Services.fog.testResetFOG();
 
-  const EXPECTED_MISC_KILOBYTES_SIZE = 241;
+  const EXPECTED_MISC_KILOBYTES_SIZE = 231;
   const tempDir = await IOUtils.createUniqueDirectory(
     PathUtils.tempDir,
     "MiscDataBackupResource-measurement-test"
   );
 
   const mockFiles = [
-    { path: "times.json", sizeInKB: 5 },
     { path: "enumerate_devices.txt", sizeInKB: 1 },
     { path: "protections.sqlite", sizeInKB: 100 },
     { path: "SiteSecurityServiceState.bin", sizeInKB: 10 },
@@ -69,12 +76,16 @@ add_task(async function test_backup() {
   );
 
   const simpleCopyFiles = [
-    { path: "times.json" },
     { path: "enumerate_devices.txt" },
     { path: "SiteSecurityServiceState.bin" },
   ];
   await createTestFiles(sourcePath, simpleCopyFiles);
 
+  // Create our fake database files. We don't expect this to be copied to the
+  // staging directory in this test due to our stubbing of the backup method, so
+  // we don't include it in `simpleCopyFiles`.
+  await createTestFiles(sourcePath, [{ path: "protections.sqlite" }]);
+
   // We have no need to test that Sqlite.sys.mjs's backup method is working -
   // this is something that is tested in Sqlite's own tests. We can just make
   // sure that it's being called using sinon. Unfortunately, we cannot do the
@@ -85,7 +96,27 @@ add_task(async function test_backup() {
   };
   sandbox.stub(Sqlite, "openConnection").returns(fakeConnection);
 
-  await miscDataBackupResource.backup(stagingPath, sourcePath);
+  let snippetsTableStub = {
+    getAllKeys: sandbox.stub().resolves(["key1", "key2"]),
+    get: sandbox.stub().callsFake(key => {
+      return { key: `value for ${key}` };
+    }),
+  };
+
+  sandbox
+    .stub(ActivityStreamStorage.prototype, "getDbTable")
+    .withArgs("snippets")
+    .resolves(snippetsTableStub);
+
+  let manifestEntry = await miscDataBackupResource.backup(
+    stagingPath,
+    sourcePath
+  );
+  Assert.equal(
+    manifestEntry,
+    null,
+    "MiscDataBackupResource.backup should return null as its ManifestEntry"
+  );
 
   await assertFilesExist(stagingPath, simpleCopyFiles);
 
@@ -102,12 +133,170 @@ add_task(async function test_backup() {
     "Called backup on the protections.sqlite Sqlite connection"
   );
 
-  // Bug 1890585 - we don't currently have the ability to copy the
-  // chrome-privileged IndexedDB databases under storage/permanent/chrome, so
-  // we'll just skip testing that for now.
+  // Bug 1890585 - we don't currently have the generalized ability to copy the
+  // chrome-privileged IndexedDB databases under storage/permanent/chrome, but
+  // we do support copying individual IndexedDB databases by manually exporting
+  // and re-importing their contents.
+  let snippetsBackupPath = PathUtils.join(
+    stagingPath,
+    "activity-stream-snippets.json"
+  );
+  Assert.ok(
+    await IOUtils.exists(snippetsBackupPath),
+    "The activity-stream-snippets.json file should exist"
+  );
+  let snippetsBackupContents = await IOUtils.readJSON(snippetsBackupPath);
+  Assert.deepEqual(
+    snippetsBackupContents,
+    {
+      key1: { key: "value for key1" },
+      key2: { key: "value for key2" },
+    },
+    "The contents of the activity-stream-snippets.json file should be as expected"
+  );
 
   await maybeRemovePath(stagingPath);
   await maybeRemovePath(sourcePath);
 
   sandbox.restore();
 });
+
+/**
+ * Test that the recover method correctly copies items from the recovery
+ * directory into the destination profile directory.
+ */
+add_task(async function test_recover() {
+  let miscBackupResource = new MiscDataBackupResource();
+  let recoveryPath = await IOUtils.createUniqueDirectory(
+    PathUtils.tempDir,
+    "MiscDataBackupResource-recovery-test"
+  );
+  let destProfilePath = await IOUtils.createUniqueDirectory(
+    PathUtils.tempDir,
+    "MiscDataBackupResource-test-profile"
+  );
+
+  // Write a dummy times.json into the xpcshell test profile directory. We
+  // expect it to be copied into the destination profile.
+  let originalProfileAge = await ProfileAge(PathUtils.profileDir);
+  await originalProfileAge.computeAndPersistCreated();
+  Assert.ok(
+    await IOUtils.exists(PathUtils.join(PathUtils.profileDir, "times.json"))
+  );
+
+  const simpleCopyFiles = [
+    { path: "enumerate_devices.txt" },
+    { path: "protections.sqlite" },
+    { path: "SiteSecurityServiceState.bin" },
+  ];
+  await createTestFiles(recoveryPath, simpleCopyFiles);
+
+  const SNIPPETS_BACKUP_FILE = "activity-stream-snippets.json";
+
+  // We'll also separately create the activity-stream-snippets.json file, which
+  // is not expected to be copied into the profile directory, but is expected
+  // to exist in the recovery path.
+  await createTestFiles(recoveryPath, [{ path: SNIPPETS_BACKUP_FILE }]);
+
+  // The backup method is expected to have returned a null ManifestEntry
+  let postRecoveryEntry = await miscBackupResource.recover(
+    null /* manifestEntry */,
+    recoveryPath,
+    destProfilePath
+  );
+  Assert.deepEqual(
+    postRecoveryEntry,
+    {
+      snippetsBackupFile: PathUtils.join(recoveryPath, SNIPPETS_BACKUP_FILE),
+    },
+    "MiscDataBackupResource.recover should return the snippets backup data " +
+      "path as its post recovery entry"
+  );
+
+  await assertFilesExist(destProfilePath, simpleCopyFiles);
+
+  // The activity-stream-snippets.json path should _not_ have been written to
+  // the profile path.
+  Assert.ok(
+    !(await IOUtils.exists(
+      PathUtils.join(destProfilePath, SNIPPETS_BACKUP_FILE)
+    )),
+    "Snippets backup data should not have gone into the profile directory"
+  );
+
+  // The times.json file should have been copied over and a backup recovery
+  // time written into it.
+  Assert.ok(
+    await IOUtils.exists(PathUtils.join(destProfilePath, "times.json"))
+  );
+  let copiedProfileAge = await ProfileAge(destProfilePath);
+  Assert.equal(
+    await originalProfileAge.created,
+    await copiedProfileAge.created,
+    "Created timestamp should match."
+  );
+  Assert.equal(
+    await originalProfileAge.firstUse,
+    await copiedProfileAge.firstUse,
+    "First use timestamp should match."
+  );
+  Assert.ok(
+    await copiedProfileAge.recoveredFromBackup,
+    "Backup recovery timestamp should have been set."
+  );
+
+  await maybeRemovePath(recoveryPath);
+  await maybeRemovePath(destProfilePath);
+});
+
+/**
+ * Test that the postRecovery method correctly writes the snippets backup data
+ * into the snippets IndexedDB table.
+ */
+add_task(async function test_postRecovery() {
+  let sandbox = sinon.createSandbox();
+
+  let fakeProfilePath = await IOUtils.createUniqueDirectory(
+    PathUtils.tempDir,
+    "MiscDataBackupResource-test-profile"
+  );
+  let fakeSnippetsData = {
+    key1: "value1",
+    key2: "value2",
+  };
+  const SNIPPEST_BACKUP_FILE = PathUtils.join(
+    fakeProfilePath,
+    "activity-stream-snippets.json"
+  );
+
+  await IOUtils.writeJSON(SNIPPEST_BACKUP_FILE, fakeSnippetsData);
+
+  let snippetsTableStub = {
+    set: sandbox.stub(),
+  };
+
+  sandbox
+    .stub(ActivityStreamStorage.prototype, "getDbTable")
+    .withArgs("snippets")
+    .resolves(snippetsTableStub);
+
+  let miscBackupResource = new MiscDataBackupResource();
+  await miscBackupResource.postRecovery({
+    snippetsBackupFile: SNIPPEST_BACKUP_FILE,
+  });
+
+  Assert.ok(
+    snippetsTableStub.set.calledTwice,
+    "The snippets table's set method was called twice"
+  );
+  Assert.ok(
+    snippetsTableStub.set.firstCall.calledWith("key1", "value1"),
+    "The snippets table's set method was called with the first key-value pair"
+  );
+  Assert.ok(
+    snippetsTableStub.set.secondCall.calledWith("key2", "value2"),
+    "The snippets table's set method was called with the second key-value pair"
+  );
+
+  sandbox.restore();
+});
diff --git a/browser/components/backup/tests/xpcshell/test_PlacesBackupResource.js b/browser/components/backup/tests/xpcshell/test_PlacesBackupResource.js
index de97281372..7248a5c614 100644
--- a/browser/components/backup/tests/xpcshell/test_PlacesBackupResource.js
+++ b/browser/components/backup/tests/xpcshell/test_PlacesBackupResource.js
@@ -3,6 +3,9 @@ https://creativecommons.org/publicdomain/zero/1.0/ */
 
 "use strict";
 
+const { BookmarkJSONUtils } = ChromeUtils.importESModule(
+  "resource://gre/modules/BookmarkJSONUtils.sys.mjs"
+);
 const { PlacesBackupResource } = ChromeUtils.importESModule(
   "resource:///modules/backup/PlacesBackupResource.sys.mjs"
 );
@@ -93,13 +96,28 @@ add_task(async function test_backup() {
     "PlacesBackupResource-staging-test"
   );
 
+  // Make sure these files exist in the source directory, otherwise
+  // BackupResource will skip attempting to back them up.
+  await createTestFiles(sourcePath, [
+    { path: "places.sqlite" },
+    { path: "favicons.sqlite" },
+  ]);
+
   let fakeConnection = {
     backup: sandbox.stub().resolves(true),
     close: sandbox.stub().resolves(true),
   };
   sandbox.stub(Sqlite, "openConnection").returns(fakeConnection);
 
-  await placesBackupResource.backup(stagingPath, sourcePath);
+  let manifestEntry = await placesBackupResource.backup(
+    stagingPath,
+    sourcePath
+  );
+  Assert.equal(
+    manifestEntry,
+    null,
+    "PlacesBackupResource.backup should return null as its ManifestEntry"
+  );
 
   Assert.ok(
     fakeConnection.backup.calledTwice,
@@ -154,7 +172,16 @@ add_task(async function test_backup_no_saved_history() {
   Services.prefs.setBoolPref(HISTORY_ENABLED_PREF, false);
   Services.prefs.setBoolPref(SANITIZE_ON_SHUTDOWN_PREF, false);
 
-  await placesBackupResource.backup(stagingPath, sourcePath);
+  let manifestEntry = await placesBackupResource.backup(
+    stagingPath,
+    sourcePath
+  );
+  Assert.deepEqual(
+    manifestEntry,
+    { bookmarksOnly: true },
+    "Should have gotten back a ManifestEntry indicating that we only copied " +
+      "bookmarks"
+  );
 
   Assert.ok(
     fakeConnection.backup.notCalled,
@@ -171,7 +198,13 @@ add_task(async function test_backup_no_saved_history() {
   Services.prefs.setBoolPref(SANITIZE_ON_SHUTDOWN_PREF, true);
 
   fakeConnection.backup.resetHistory();
-  await placesBackupResource.backup(stagingPath, sourcePath);
+  manifestEntry = await placesBackupResource.backup(stagingPath, sourcePath);
+  Assert.deepEqual(
+    manifestEntry,
+    { bookmarksOnly: true },
+    "Should have gotten back a ManifestEntry indicating that we only copied " +
+      "bookmarks"
+  );
 
   Assert.ok(
     fakeConnection.backup.notCalled,
@@ -211,7 +244,16 @@ add_task(async function test_backup_private_browsing() {
   sandbox.stub(Sqlite, "openConnection").returns(fakeConnection);
   sandbox.stub(PrivateBrowsingUtils, "permanentPrivateBrowsing").value(true);
 
-  await placesBackupResource.backup(stagingPath, sourcePath);
+  let manifestEntry = await placesBackupResource.backup(
+    stagingPath,
+    sourcePath
+  );
+  Assert.deepEqual(
+    manifestEntry,
+    { bookmarksOnly: true },
+    "Should have gotten back a ManifestEntry indicating that we only copied " +
+      "bookmarks"
+  );
 
   Assert.ok(
     fakeConnection.backup.notCalled,
@@ -224,3 +266,104 @@ add_task(async function test_backup_private_browsing() {
 
   sandbox.restore();
 });
+
+/**
+ * Test that the recover method correctly copies places.sqlite and favicons.sqlite
+ * from the recovery directory into the destination profile directory.
+ */
+add_task(async function test_recover() {
+  let placesBackupResource = new PlacesBackupResource();
+  let recoveryPath = await IOUtils.createUniqueDirectory(
+    PathUtils.tempDir,
+    "PlacesBackupResource-recovery-test"
+  );
+  let destProfilePath = await IOUtils.createUniqueDirectory(
+    PathUtils.tempDir,
+    "PlacesBackupResource-test-profile"
+  );
+
+  const simpleCopyFiles = [
+    { path: "places.sqlite" },
+    { path: "favicons.sqlite" },
+  ];
+  await createTestFiles(recoveryPath, simpleCopyFiles);
+
+  // The backup method is expected to have returned a null ManifestEntry
+  let postRecoveryEntry = await placesBackupResource.recover(
+    null /* manifestEntry */,
+    recoveryPath,
+    destProfilePath
+  );
+  Assert.equal(
+    postRecoveryEntry,
+    null,
+    "PlacesBackupResource.recover should return null as its post recovery entry"
+  );
+
+  await assertFilesExist(destProfilePath, simpleCopyFiles);
+
+  await maybeRemovePath(recoveryPath);
+  await maybeRemovePath(destProfilePath);
+});
+
+/**
+ * Test that the recover method correctly copies bookmarks.jsonlz4 from the recovery
+ * directory into the destination profile directory.
+ */
+add_task(async function test_recover_bookmarks_only() {
+  let sandbox = sinon.createSandbox();
+  let placesBackupResource = new PlacesBackupResource();
+  let recoveryPath = await IOUtils.createUniqueDirectory(
+    PathUtils.tempDir,
+    "PlacesBackupResource-recovery-test"
+  );
+  let destProfilePath = await IOUtils.createUniqueDirectory(
+    PathUtils.tempDir,
+    "PlacesBackupResource-test-profile"
+  );
+  let bookmarksImportStub = sandbox
+    .stub(BookmarkJSONUtils, "importFromFile")
+    .resolves(true);
+
+  await createTestFiles(recoveryPath, [{ path: "bookmarks.jsonlz4" }]);
+
+  // The backup method is expected to detect bookmarks import only
+  let postRecoveryEntry = await placesBackupResource.recover(
+    { bookmarksOnly: true },
+    recoveryPath,
+    destProfilePath
+  );
+
+  let expectedBookmarksPath = PathUtils.join(recoveryPath, "bookmarks.jsonlz4");
+
+  // Expect the bookmarks backup file path to be passed from recover()
+  Assert.deepEqual(
+    postRecoveryEntry,
+    { bookmarksBackupPath: expectedBookmarksPath },
+    "PlacesBackupResource.recover should return the expected post recovery entry"
+  );
+
+  // Ensure that files stored in a places backup are not copied to the new profile during recovery
+  for (let placesFile of [
+    "places.sqlite",
+    "favicons.sqlite",
+    "bookmarks.jsonlz4",
+  ]) {
+    Assert.ok(
+      !(await IOUtils.exists(PathUtils.join(destProfilePath, placesFile))),
+      `${placesFile} should not exist in the new profile`
+    );
+  }
+
+  // Now pretend that BackupService called the postRecovery method
+  await placesBackupResource.postRecovery(postRecoveryEntry);
+  Assert.ok(
+    bookmarksImportStub.calledOnce,
+    "BookmarkJSONUtils.importFromFile was called in the postRecovery step"
+  );
+
+  await maybeRemovePath(recoveryPath);
+  await maybeRemovePath(destProfilePath);
+
+  sandbox.restore();
+});
diff --git a/browser/components/backup/tests/xpcshell/test_PreferencesBackupResource.js b/browser/components/backup/tests/xpcshell/test_PreferencesBackupResource.js
index 6845431bb8..2075b57e91 100644
--- a/browser/components/backup/tests/xpcshell/test_PreferencesBackupResource.js
+++ b/browser/components/backup/tests/xpcshell/test_PreferencesBackupResource.js
@@ -86,6 +86,14 @@ add_task(async function test_backup() {
   ];
   await createTestFiles(sourcePath, simpleCopyFiles);
 
+  // Create our fake database files. We don't expect these to be copied to the
+  // staging directory in this test due to our stubbing of the backup method, so
+  // we don't include it in `simpleCopyFiles`.
+  await createTestFiles(sourcePath, [
+    { path: "permissions.sqlite" },
+    { path: "content-prefs.sqlite" },
+  ]);
+
   // We have no need to test that Sqlite.sys.mjs's backup method is working -
   // this is something that is tested in Sqlite's own tests. We can just make
   // sure that it's being called using sinon. Unfortunately, we cannot do the
@@ -96,7 +104,15 @@ add_task(async function test_backup() {
   };
   sandbox.stub(Sqlite, "openConnection").returns(fakeConnection);
 
-  await preferencesBackupResource.backup(stagingPath, sourcePath);
+  let manifestEntry = await preferencesBackupResource.backup(
+    stagingPath,
+    sourcePath
+  );
+  Assert.equal(
+    manifestEntry,
+    null,
+    "PreferencesBackupResource.backup should return null as its ManifestEntry"
+  );
 
   await assertFilesExist(stagingPath, simpleCopyFiles);
 
@@ -130,3 +146,51 @@ add_task(async function test_backup() {
 
   sandbox.restore();
 });
+
+/**
+ * Test that the recover method correctly copies items from the recovery
+ * directory into the destination profile directory.
+ */
+add_task(async function test_recover() {
+  let preferencesBackupResource = new PreferencesBackupResource();
+  let recoveryPath = await IOUtils.createUniqueDirectory(
+    PathUtils.tempDir,
+    "PreferencesBackupResource-recovery-test"
+  );
+  let destProfilePath = await IOUtils.createUniqueDirectory(
+    PathUtils.tempDir,
+    "PreferencesBackupResource-test-profile"
+  );
+
+  const simpleCopyFiles = [
+    { path: "prefs.js" },
+    { path: "xulstore.json" },
+    { path: "permissions.sqlite" },
+    { path: "content-prefs.sqlite" },
+    { path: "containers.json" },
+    { path: "handlers.json" },
+    { path: "search.json.mozlz4" },
+    { path: "user.js" },
+    { path: ["chrome", "userChrome.css"] },
+    { path: ["chrome", "userContent.css"] },
+    { path: ["chrome", "childFolder", "someOtherStylesheet.css"] },
+  ];
+  await createTestFiles(recoveryPath, simpleCopyFiles);
+
+  // The backup method is expected to have returned a null ManifestEntry
+  let postRecoveryEntry = await preferencesBackupResource.recover(
+    null /* manifestEntry */,
+    recoveryPath,
+    destProfilePath
+  );
+  Assert.equal(
+    postRecoveryEntry,
+    null,
+    "PreferencesBackupResource.recover should return null as its post recovery entry"
+  );
+
+  await assertFilesExist(destProfilePath, simpleCopyFiles);
+
+  await maybeRemovePath(recoveryPath);
+  await maybeRemovePath(destProfilePath);
+});
diff --git a/browser/components/backup/tests/xpcshell/test_SessionStoreBackupResource.js b/browser/components/backup/tests/xpcshell/test_SessionStoreBackupResource.js
new file mode 100644
index 0000000000..d57f2d3a25
--- /dev/null
+++ b/browser/components/backup/tests/xpcshell/test_SessionStoreBackupResource.js
@@ -0,0 +1,209 @@
+/* Any copyright is dedicated to the Public Domain.
+https://creativecommons.org/publicdomain/zero/1.0/ */
+
+"use strict";
+
+const { SessionStoreBackupResource } = ChromeUtils.importESModule(
+  "resource:///modules/backup/SessionStoreBackupResource.sys.mjs"
+);
+const { SessionStore } = ChromeUtils.importESModule(
+  "resource:///modules/sessionstore/SessionStore.sys.mjs"
+);
+
+/**
+ * Tests that we can measure the Session Store JSON and backups directory.
+ */
+add_task(async function test_measure() {
+  const EXPECTED_KILOBYTES_FOR_BACKUPS_DIR = 1000;
+  Services.fog.testResetFOG();
+
+  // Create the sessionstore-backups directory.
+  let tempDir = PathUtils.tempDir;
+  let sessionStoreBackupsPath = PathUtils.join(
+    tempDir,
+    "sessionstore-backups",
+    "restore.jsonlz4"
+  );
+  await createKilobyteSizedFile(
+    sessionStoreBackupsPath,
+    EXPECTED_KILOBYTES_FOR_BACKUPS_DIR
+  );
+
+  let sessionStoreBackupResource = new SessionStoreBackupResource();
+  await sessionStoreBackupResource.measure(tempDir);
+
+  let sessionStoreBackupsDirectoryMeasurement =
+    Glean.browserBackup.sessionStoreBackupsDirectorySize.testGetValue();
+  let sessionStoreMeasurement =
+    Glean.browserBackup.sessionStoreSize.testGetValue();
+  let scalars = TelemetryTestUtils.getProcessScalars("parent", false, false);
+
+  // Compare glean vs telemetry measurements
+  TelemetryTestUtils.assertScalar(
+    scalars,
+    "browser.backup.session_store_backups_directory_size",
+    sessionStoreBackupsDirectoryMeasurement,
+    "Glean and telemetry measurements for session store backups directory should be equal"
+  );
+  TelemetryTestUtils.assertScalar(
+    scalars,
+    "browser.backup.session_store_size",
+    sessionStoreMeasurement,
+    "Glean and telemetry measurements for session store should be equal"
+  );
+
+  // Compare glean measurements vs actual file sizes
+  Assert.equal(
+    sessionStoreBackupsDirectoryMeasurement,
+    EXPECTED_KILOBYTES_FOR_BACKUPS_DIR,
+    "Should have collected the correct glean measurement for the sessionstore-backups directory"
+  );
+
+  // Session store measurement is from `getCurrentState`, so exact size is unknown.
+  Assert.greater(
+    sessionStoreMeasurement,
+    0,
+    "Should have collected a measurement for the session store"
+  );
+
+  await IOUtils.remove(sessionStoreBackupsPath);
+});
+
+/**
+ * Test that the backup method correctly copies items from the profile directory
+ * into the staging directory.
+ */
+add_task(async function test_backup() {
+  let sandbox = sinon.createSandbox();
+
+  let sessionStoreBackupResource = new SessionStoreBackupResource();
+  let sourcePath = await IOUtils.createUniqueDirectory(
+    PathUtils.tempDir,
+    "SessionStoreBackupResource-source-test"
+  );
+  let stagingPath = await IOUtils.createUniqueDirectory(
+    PathUtils.tempDir,
+    "SessionStoreBackupResource-staging-test"
+  );
+
+  const simpleCopyFiles = [
+    { path: ["sessionstore-backups", "test-sessionstore-backup.jsonlz4"] },
+    { path: ["sessionstore-backups", "test-sessionstore-recovery.baklz4"] },
+  ];
+  await createTestFiles(sourcePath, simpleCopyFiles);
+
+  let sessionStoreState = SessionStore.getCurrentState(true);
+  let manifestEntry = await sessionStoreBackupResource.backup(
+    stagingPath,
+    sourcePath
+  );
+  Assert.equal(
+    manifestEntry,
+    null,
+    "SessionStoreBackupResource.backup should return null as its ManifestEntry"
+  );
+
+  /**
+   * We don't expect the actual file sessionstore.jsonlz4 to exist in the profile directory before calling the backup method.
+   * Instead, verify that it is created by the backup method and exists in the staging folder right after.
+   */
+  await assertFilesExist(stagingPath, [
+    ...simpleCopyFiles,
+    { path: "sessionstore.jsonlz4" },
+  ]);
+
+  /**
+   * Do a deep comparison between the recorded session state before backup and the file made in the staging folder
+   * to verify that information about session state was correctly written for backup.
+   */
+  let sessionStoreStateStaged = await IOUtils.readJSON(
+    PathUtils.join(stagingPath, "sessionstore.jsonlz4"),
+    { decompress: true }
+  );
+
+  /**
+   * These timestamps might be slightly different from one another, so we'll exclude
+   * them from the comparison.
+   */
+  delete sessionStoreStateStaged.session.lastUpdate;
+  delete sessionStoreState.session.lastUpdate;
+  Assert.deepEqual(
+    sessionStoreStateStaged,
+    sessionStoreState,
+    "sessionstore.jsonlz4 in the staging folder matches the recorded session state"
+  );
+
+  await maybeRemovePath(stagingPath);
+  await maybeRemovePath(sourcePath);
+
+  sandbox.restore();
+});
+
+/**
+ * Test that the recover method correctly copies items from the recovery
+ * directory into the destination profile directory.
+ */
+add_task(async function test_recover() {
+  let sessionStoreBackupResource = new SessionStoreBackupResource();
+  let recoveryPath = await IOUtils.createUniqueDirectory(
+    PathUtils.tempDir,
+    "SessionStoreBackupResource-recovery-test"
+  );
+  let destProfilePath = await IOUtils.createUniqueDirectory(
+    PathUtils.tempDir,
+    "SessionStoreBackupResource-test-profile"
+  );
+
+  const simpleCopyFiles = [
+    { path: ["sessionstore-backups", "test-sessionstore-backup.jsonlz4"] },
+    { path: ["sessionstore-backups", "test-sessionstore-recovery.baklz4"] },
+  ];
+  await createTestFiles(recoveryPath, simpleCopyFiles);
+
+  // We backup a copy of sessionstore.jsonlz4, so ensure it exists in the recovery path
+  let sessionStoreState = SessionStore.getCurrentState(true);
+  let sessionStoreBackupPath = PathUtils.join(
+    recoveryPath,
+    "sessionstore.jsonlz4"
+  );
+  await IOUtils.writeJSON(sessionStoreBackupPath, sessionStoreState, {
+    compress: true,
+  });
+
+  // The backup method is expected to have returned a null ManifestEntry
+  let postRecoveryEntry = await sessionStoreBackupResource.recover(
+    null /* manifestEntry */,
+    recoveryPath,
+    destProfilePath
+  );
+  Assert.equal(
+    postRecoveryEntry,
+    null,
+    "SessionStoreBackupResource.recover should return null as its post recovery entry"
+  );
+
+  await assertFilesExist(destProfilePath, [
+    ...simpleCopyFiles,
+    { path: "sessionstore.jsonlz4" },
+  ]);
+
+  let sessionStateCopied = await IOUtils.readJSON(
+    PathUtils.join(destProfilePath, "sessionstore.jsonlz4"),
+    { decompress: true }
+  );
+
+  /**
+   * These timestamps might be slightly different from one another, so we'll exclude
+   * them from the comparison.
+   */
+  delete sessionStateCopied.session.lastUpdate;
+  delete sessionStoreState.session.lastUpdate;
+  Assert.deepEqual(
+    sessionStateCopied,
+    sessionStoreState,
+    "sessionstore.jsonlz4 in the destination profile folder matches the backed up session state"
+  );
+
+  await maybeRemovePath(recoveryPath);
+  await maybeRemovePath(destProfilePath);
+});
diff --git a/browser/components/backup/tests/xpcshell/test_createBackup.js b/browser/components/backup/tests/xpcshell/test_createBackup.js
deleted file mode 100644
index fcace695ef..0000000000
--- a/browser/components/backup/tests/xpcshell/test_createBackup.js
+++ /dev/null
@@ -1,74 +0,0 @@
-/* Any copyright is dedicated to the Public Domain.
-https://creativecommons.org/publicdomain/zero/1.0/ */
-
-"use strict";
-
-/**
- * Tests that calling BackupService.createBackup will call backup on each
- * registered BackupResource, and that each BackupResource will have a folder
- * created for them to write into.
- */
-add_task(async function test_createBackup() {
-  let sandbox = sinon.createSandbox();
-  sandbox
-    .stub(FakeBackupResource1.prototype, "backup")
-    .resolves({ fake1: "hello from 1" });
-  sandbox
-    .stub(FakeBackupResource2.prototype, "backup")
-    .rejects(new Error("Some failure to backup"));
-  sandbox
-    .stub(FakeBackupResource3.prototype, "backup")
-    .resolves({ fake3: "hello from 3" });
-
-  let bs = new BackupService({
-    FakeBackupResource1,
-    FakeBackupResource2,
-    FakeBackupResource3,
-  });
-
-  let fakeProfilePath = await IOUtils.createUniqueDirectory(
-    PathUtils.tempDir,
-    "createBackupTest"
-  );
-
-  await bs.createBackup({ profilePath: fakeProfilePath });
-
-  // For now, we expect a staging folder to exist under the fakeProfilePath,
-  // and we should find a folder for each fake BackupResource.
-  let stagingPath = PathUtils.join(fakeProfilePath, "backups", "staging");
-  Assert.ok(await IOUtils.exists(stagingPath), "Staging folder exists");
-
-  for (let backupResourceClass of [
-    FakeBackupResource1,
-    FakeBackupResource2,
-    FakeBackupResource3,
-  ]) {
-    let expectedResourceFolder = PathUtils.join(
-      stagingPath,
-      backupResourceClass.key
-    );
-    Assert.ok(
-      await IOUtils.exists(expectedResourceFolder),
-      `BackupResource staging folder exists for ${backupResourceClass.key}`
-    );
-    Assert.ok(
-      backupResourceClass.prototype.backup.calledOnce,
-      `Backup was called for ${backupResourceClass.key}`
-    );
-    Assert.ok(
-      backupResourceClass.prototype.backup.calledWith(
-        expectedResourceFolder,
-        fakeProfilePath
-      ),
-      `Backup was passed the right paths for ${backupResourceClass.key}`
-    );
-  }
-
-  // After createBackup is more fleshed out, we're going to want to make sure
-  // that we're writing the manifest file and that it contains the expected
-  // ManifestEntry objects, and that the staging folder was successfully
-  // renamed with the current date.
-  await IOUtils.remove(fakeProfilePath, { recursive: true });
-
-  sandbox.restore();
-});
diff --git a/browser/components/backup/tests/xpcshell/test_measurements.js b/browser/components/backup/tests/xpcshell/test_measurements.js
deleted file mode 100644
index 0dece6b370..0000000000
--- a/browser/components/backup/tests/xpcshell/test_measurements.js
+++ /dev/null
@@ -1,577 +0,0 @@
-/* Any copyright is dedicated to the Public Domain.
-http://creativecommons.org/publicdomain/zero/1.0/ */
-
-"use strict";
-
-const { CredentialsAndSecurityBackupResource } = ChromeUtils.importESModule(
-  "resource:///modules/backup/CredentialsAndSecurityBackupResource.sys.mjs"
-);
-const { AddonsBackupResource } = ChromeUtils.importESModule(
-  "resource:///modules/backup/AddonsBackupResource.sys.mjs"
-);
-const { CookiesBackupResource } = ChromeUtils.importESModule(
-  "resource:///modules/backup/CookiesBackupResource.sys.mjs"
-);
-
-const { FormHistoryBackupResource } = ChromeUtils.importESModule(
-  "resource:///modules/backup/FormHistoryBackupResource.sys.mjs"
-);
-
-const { SessionStoreBackupResource } = ChromeUtils.importESModule(
-  "resource:///modules/backup/SessionStoreBackupResource.sys.mjs"
-);
-
-add_setup(() => {
-  // FOG needs to be initialized in order for data to flow.
-  Services.fog.initializeFOG();
-  Services.telemetry.clearScalars();
-});
-
-/**
- * Tests that calling `BackupService.takeMeasurements` will call the measure
- * method of all registered BackupResource classes.
- */
-add_task(async function test_takeMeasurements() {
-  let sandbox = sinon.createSandbox();
-  sandbox.stub(FakeBackupResource1.prototype, "measure").resolves();
-  sandbox
-    .stub(FakeBackupResource2.prototype, "measure")
-    .rejects(new Error("Some failure to measure"));
-
-  let bs = new BackupService({ FakeBackupResource1, FakeBackupResource2 });
-  await bs.takeMeasurements();
-
-  for (let backupResourceClass of [FakeBackupResource1, FakeBackupResource2]) {
-    Assert.ok(
-      backupResourceClass.prototype.measure.calledOnce,
-      "Measure was called"
-    );
-    Assert.ok(
-      backupResourceClass.prototype.measure.calledWith(PathUtils.profileDir),
-      "Measure was called with the profile directory argument"
-    );
-  }
-
-  sandbox.restore();
-});
-
-/**
- * Tests that we can measure the disk space available in the profile directory.
- */
-add_task(async function test_profDDiskSpace() {
-  let bs = new BackupService();
-  await bs.takeMeasurements();
-  let measurement = Glean.browserBackup.profDDiskSpace.testGetValue();
-  TelemetryTestUtils.assertScalar(
-    TelemetryTestUtils.getProcessScalars("parent", false, true),
-    "browser.backup.prof_d_disk_space",
-    measurement
-  );
-
-  Assert.greater(
-    measurement,
-    0,
-    "Should have collected a measurement for the profile directory storage " +
-      "device"
-  );
-});
-
-/**
- * Tests that we can measure credentials related files in the profile directory.
- */
-add_task(async function test_credentialsAndSecurityBackupResource() {
-  Services.fog.testResetFOG();
-
-  const EXPECTED_CREDENTIALS_KILOBYTES_SIZE = 413;
-  const EXPECTED_SECURITY_KILOBYTES_SIZE = 231;
-
-  // Create resource files in temporary directory
-  const tempDir = await IOUtils.createUniqueDirectory(
-    PathUtils.tempDir,
-    "CredentialsAndSecurityBackupResource-measurement-test"
-  );
-
-  const mockFiles = [
-    // Set up credentials files
-    { path: "key4.db", sizeInKB: 300 },
-    { path: "logins.json", sizeInKB: 1 },
-    { path: "logins-backup.json", sizeInKB: 1 },
-    { path: "autofill-profiles.json", sizeInKB: 1 },
-    { path: "credentialstate.sqlite", sizeInKB: 100 },
-    { path: "signedInUser.json", sizeInKB: 5 },
-    // Set up security files
-    { path: "cert9.db", sizeInKB: 230 },
-    { path: "pkcs11.txt", sizeInKB: 1 },
-  ];
-
-  await createTestFiles(tempDir, mockFiles);
-
-  let credentialsAndSecurityBackupResource =
-    new CredentialsAndSecurityBackupResource();
-  await credentialsAndSecurityBackupResource.measure(tempDir);
-
-  let credentialsMeasurement =
-    Glean.browserBackup.credentialsDataSize.testGetValue();
-  let securityMeasurement = Glean.browserBackup.securityDataSize.testGetValue();
-  let scalars = TelemetryTestUtils.getProcessScalars("parent", false, false);
-
-  // Credentials measurements
-  TelemetryTestUtils.assertScalar(
-    scalars,
-    "browser.backup.credentials_data_size",
-    credentialsMeasurement,
-    "Glean and telemetry measurements for credentials data should be equal"
-  );
-
-  Assert.equal(
-    credentialsMeasurement,
-    EXPECTED_CREDENTIALS_KILOBYTES_SIZE,
-    "Should have collected the correct glean measurement for credentials files"
-  );
-
-  // Security measurements
-  TelemetryTestUtils.assertScalar(
-    scalars,
-    "browser.backup.security_data_size",
-    securityMeasurement,
-    "Glean and telemetry measurements for security data should be equal"
-  );
-  Assert.equal(
-    securityMeasurement,
-    EXPECTED_SECURITY_KILOBYTES_SIZE,
-    "Should have collected the correct glean measurement for security files"
-  );
-
-  // Cleanup
-  await maybeRemovePath(tempDir);
-});
-
-/**
- * Tests that we can measure the Cookies db in a profile directory.
- */
-add_task(async function test_cookiesBackupResource() {
-  const EXPECTED_COOKIES_DB_SIZE = 1230;
-
-  Services.fog.testResetFOG();
-
-  // Create resource files in temporary directory
-  let tempDir = PathUtils.tempDir;
-  let tempCookiesDBPath = PathUtils.join(tempDir, "cookies.sqlite");
-  await createKilobyteSizedFile(tempCookiesDBPath, EXPECTED_COOKIES_DB_SIZE);
-
-  let cookiesBackupResource = new CookiesBackupResource();
-  await cookiesBackupResource.measure(tempDir);
-
-  let cookiesMeasurement = Glean.browserBackup.cookiesSize.testGetValue();
-  let scalars = TelemetryTestUtils.getProcessScalars("parent", false, false);
-
-  // Compare glean vs telemetry measurements
-  TelemetryTestUtils.assertScalar(
-    scalars,
-    "browser.backup.cookies_size",
-    cookiesMeasurement,
-    "Glean and telemetry measurements for cookies.sqlite should be equal"
-  );
-
-  // Compare glean measurements vs actual file sizes
-  Assert.equal(
-    cookiesMeasurement,
-    EXPECTED_COOKIES_DB_SIZE,
-    "Should have collected the correct glean measurement for cookies.sqlite"
-  );
-
-  await maybeRemovePath(tempCookiesDBPath);
-});
-
-/**
- * Tests that we can measure the Form History db in a profile directory.
- */
-add_task(async function test_formHistoryBackupResource() {
-  const EXPECTED_FORM_HISTORY_DB_SIZE = 500;
-
-  Services.fog.testResetFOG();
-
-  // Create resource files in temporary directory
-  let tempDir = PathUtils.tempDir;
-  let tempFormHistoryDBPath = PathUtils.join(tempDir, "formhistory.sqlite");
-  await createKilobyteSizedFile(
-    tempFormHistoryDBPath,
-    EXPECTED_FORM_HISTORY_DB_SIZE
-  );
-
-  let formHistoryBackupResource = new FormHistoryBackupResource();
-  await formHistoryBackupResource.measure(tempDir);
-
-  let formHistoryMeasurement =
-    Glean.browserBackup.formHistorySize.testGetValue();
-  let scalars = TelemetryTestUtils.getProcessScalars("parent", false, false);
-
-  // Compare glean vs telemetry measurements
-  TelemetryTestUtils.assertScalar(
-    scalars,
-    "browser.backup.form_history_size",
-    formHistoryMeasurement,
-    "Glean and telemetry measurements for formhistory.sqlite should be equal"
-  );
-
-  // Compare glean measurements vs actual file sizes
-  Assert.equal(
-    formHistoryMeasurement,
-    EXPECTED_FORM_HISTORY_DB_SIZE,
-    "Should have collected the correct glean measurement for formhistory.sqlite"
-  );
-
-  await IOUtils.remove(tempFormHistoryDBPath);
-});
-
-/**
- * Tests that we can measure the Session Store JSON and backups directory.
- */
-add_task(async function test_sessionStoreBackupResource() {
-  const EXPECTED_KILOBYTES_FOR_BACKUPS_DIR = 1000;
-  Services.fog.testResetFOG();
-
-  // Create the sessionstore-backups directory.
-  let tempDir = PathUtils.tempDir;
-  let sessionStoreBackupsPath = PathUtils.join(
-    tempDir,
-    "sessionstore-backups",
-    "restore.jsonlz4"
-  );
-  await createKilobyteSizedFile(
-    sessionStoreBackupsPath,
-    EXPECTED_KILOBYTES_FOR_BACKUPS_DIR
-  );
-
-  let sessionStoreBackupResource = new SessionStoreBackupResource();
-  await sessionStoreBackupResource.measure(tempDir);
-
-  let sessionStoreBackupsDirectoryMeasurement =
-    Glean.browserBackup.sessionStoreBackupsDirectorySize.testGetValue();
-  let sessionStoreMeasurement =
-    Glean.browserBackup.sessionStoreSize.testGetValue();
-  let scalars = TelemetryTestUtils.getProcessScalars("parent", false, false);
-
-  // Compare glean vs telemetry measurements
-  TelemetryTestUtils.assertScalar(
-    scalars,
-    "browser.backup.session_store_backups_directory_size",
-    sessionStoreBackupsDirectoryMeasurement,
-    "Glean and telemetry measurements for session store backups directory should be equal"
-  );
-  TelemetryTestUtils.assertScalar(
-    scalars,
-    "browser.backup.session_store_size",
-    sessionStoreMeasurement,
-    "Glean and telemetry measurements for session store should be equal"
-  );
-
-  // Compare glean measurements vs actual file sizes
-  Assert.equal(
-    sessionStoreBackupsDirectoryMeasurement,
-    EXPECTED_KILOBYTES_FOR_BACKUPS_DIR,
-    "Should have collected the correct glean measurement for the sessionstore-backups directory"
-  );
-
-  // Session store measurement is from `getCurrentState`, so exact size is unknown.
-  Assert.greater(
-    sessionStoreMeasurement,
-    0,
-    "Should have collected a measurement for the session store"
-  );
-
-  await IOUtils.remove(sessionStoreBackupsPath);
-});
-
-/**
- * Tests that we can measure the size of all the addons & extensions data.
- */
-add_task(async function test_AddonsBackupResource() {
-  Services.fog.testResetFOG();
-  Services.telemetry.clearScalars();
-
-  const EXPECTED_KILOBYTES_FOR_EXTENSIONS_JSON = 250;
-  const EXPECTED_KILOBYTES_FOR_EXTENSIONS_STORE = 500;
-  const EXPECTED_KILOBYTES_FOR_STORAGE_SYNC = 50;
-  const EXPECTED_KILOBYTES_FOR_EXTENSIONS_XPI_A = 600;
-  const EXPECTED_KILOBYTES_FOR_EXTENSIONS_XPI_B = 400;
-  const EXPECTED_KILOBYTES_FOR_EXTENSIONS_XPI_C = 150;
-  const EXPECTED_KILOBYTES_FOR_EXTENSIONS_DIRECTORY = 1000;
-  const EXPECTED_KILOBYTES_FOR_EXTENSION_DATA = 100;
-  const EXPECTED_KILOBYTES_FOR_EXTENSIONS_STORAGE = 200;
-
-  let tempDir = PathUtils.tempDir;
-
-  // Create extensions json files (all the same size).
-  const extensionsFilePath = PathUtils.join(tempDir, "extensions.json");
-  await createKilobyteSizedFile(
-    extensionsFilePath,
-    EXPECTED_KILOBYTES_FOR_EXTENSIONS_JSON
-  );
-  const extensionSettingsFilePath = PathUtils.join(
-    tempDir,
-    "extension-settings.json"
-  );
-  await createKilobyteSizedFile(
-    extensionSettingsFilePath,
-    EXPECTED_KILOBYTES_FOR_EXTENSIONS_JSON
-  );
-  const extensionsPrefsFilePath = PathUtils.join(
-    tempDir,
-    "extension-preferences.json"
-  );
-  await createKilobyteSizedFile(
-    extensionsPrefsFilePath,
-    EXPECTED_KILOBYTES_FOR_EXTENSIONS_JSON
-  );
-  const addonStartupFilePath = PathUtils.join(tempDir, "addonStartup.json.lz4");
-  await createKilobyteSizedFile(
-    addonStartupFilePath,
-    EXPECTED_KILOBYTES_FOR_EXTENSIONS_JSON
-  );
-
-  // Create the extension store permissions data file.
-  let extensionStorePermissionsDataSize = PathUtils.join(
-    tempDir,
-    "extension-store-permissions",
-    "data.safe.bin"
-  );
-  await createKilobyteSizedFile(
-    extensionStorePermissionsDataSize,
-    EXPECTED_KILOBYTES_FOR_EXTENSIONS_STORE
-  );
-
-  // Create the storage sync database file.
-  let storageSyncPath = PathUtils.join(tempDir, "storage-sync-v2.sqlite");
-  await createKilobyteSizedFile(
-    storageSyncPath,
-    EXPECTED_KILOBYTES_FOR_STORAGE_SYNC
-  );
-
-  // Create the extensions directory with XPI files.
-  let extensionsXpiAPath = PathUtils.join(
-    tempDir,
-    "extensions",
-    "extension-b.xpi"
-  );
-  let extensionsXpiBPath = PathUtils.join(
-    tempDir,
-    "extensions",
-    "extension-a.xpi"
-  );
-  await createKilobyteSizedFile(
-    extensionsXpiAPath,
-    EXPECTED_KILOBYTES_FOR_EXTENSIONS_XPI_A
-  );
-  await createKilobyteSizedFile(
-    extensionsXpiBPath,
-    EXPECTED_KILOBYTES_FOR_EXTENSIONS_XPI_B
-  );
-  // Should be ignored.
-  let extensionsXpiStagedPath = PathUtils.join(
-    tempDir,
-    "extensions",
-    "staged",
-    "staged-test-extension.xpi"
-  );
-  let extensionsXpiTrashPath = PathUtils.join(
-    tempDir,
-    "extensions",
-    "trash",
-    "trashed-test-extension.xpi"
-  );
-  let extensionsXpiUnpackedPath = PathUtils.join(
-    tempDir,
-    "extensions",
-    "unpacked-extension.xpi",
-    "manifest.json"
-  );
-  await createKilobyteSizedFile(
-    extensionsXpiStagedPath,
-    EXPECTED_KILOBYTES_FOR_EXTENSIONS_XPI_C
-  );
-  await createKilobyteSizedFile(
-    extensionsXpiTrashPath,
-    EXPECTED_KILOBYTES_FOR_EXTENSIONS_XPI_C
-  );
-  await createKilobyteSizedFile(
-    extensionsXpiUnpackedPath,
-    EXPECTED_KILOBYTES_FOR_EXTENSIONS_XPI_C
-  );
-
-  // Create the browser extension data directory.
-  let browserExtensionDataPath = PathUtils.join(
-    tempDir,
-    "browser-extension-data",
-    "test-file"
-  );
-  await createKilobyteSizedFile(
-    browserExtensionDataPath,
-    EXPECTED_KILOBYTES_FOR_EXTENSION_DATA
-  );
-
-  // Create the extensions storage directory.
-  let extensionsStoragePath = PathUtils.join(
-    tempDir,
-    "storage",
-    "default",
-    "moz-extension+++test-extension-id",
-    "idb",
-    "data.sqlite"
-  );
-  // Other storage files that should not be counted.
-  let otherStoragePath = PathUtils.join(
-    tempDir,
-    "storage",
-    "default",
-    "https+++accounts.firefox.com",
-    "ls",
-    "data.sqlite"
-  );
-
-  await createKilobyteSizedFile(
-    extensionsStoragePath,
-    EXPECTED_KILOBYTES_FOR_EXTENSIONS_STORAGE
-  );
-  await createKilobyteSizedFile(
-    otherStoragePath,
-    EXPECTED_KILOBYTES_FOR_EXTENSIONS_STORAGE
-  );
-
-  // Measure all the extensions data.
-  let extensionsBackupResource = new AddonsBackupResource();
-  await extensionsBackupResource.measure(tempDir);
-
-  let extensionsJsonSizeMeasurement =
-    Glean.browserBackup.extensionsJsonSize.testGetValue();
-  Assert.equal(
-    extensionsJsonSizeMeasurement,
-    EXPECTED_KILOBYTES_FOR_EXTENSIONS_JSON * 4, // There are 4 equally sized files.
-    "Should have collected the correct measurement of the total size of all extensions JSON files"
-  );
-
-  let extensionStorePermissionsDataSizeMeasurement =
-    Glean.browserBackup.extensionStorePermissionsDataSize.testGetValue();
-  Assert.equal(
-    extensionStorePermissionsDataSizeMeasurement,
-    EXPECTED_KILOBYTES_FOR_EXTENSIONS_STORE,
-    "Should have collected the correct measurement of the size of the extension store permissions data"
-  );
-
-  let storageSyncSizeMeasurement =
-    Glean.browserBackup.storageSyncSize.testGetValue();
-  Assert.equal(
-    storageSyncSizeMeasurement,
-    EXPECTED_KILOBYTES_FOR_STORAGE_SYNC,
-    "Should have collected the correct measurement of the size of the storage sync database"
-  );
-
-  let extensionsXpiDirectorySizeMeasurement =
-    Glean.browserBackup.extensionsXpiDirectorySize.testGetValue();
-  Assert.equal(
-    extensionsXpiDirectorySizeMeasurement,
-    EXPECTED_KILOBYTES_FOR_EXTENSIONS_DIRECTORY,
-    "Should have collected the correct measurement of the size 2 equally sized XPI files in the extensions directory"
-  );
-
-  let browserExtensionDataSizeMeasurement =
-    Glean.browserBackup.browserExtensionDataSize.testGetValue();
-  Assert.equal(
-    browserExtensionDataSizeMeasurement,
-    EXPECTED_KILOBYTES_FOR_EXTENSION_DATA,
-    "Should have collected the correct measurement of the size of the browser extension data directory"
-  );
-
-  let extensionsStorageSizeMeasurement =
-    Glean.browserBackup.extensionsStorageSize.testGetValue();
-  Assert.equal(
-    extensionsStorageSizeMeasurement,
-    EXPECTED_KILOBYTES_FOR_EXTENSIONS_STORAGE,
-    "Should have collected the correct measurement of all the extensions storage"
-  );
-
-  // Compare glean vs telemetry measurements
-  let scalars = TelemetryTestUtils.getProcessScalars("parent", false, false);
-  TelemetryTestUtils.assertScalar(
-    scalars,
-    "browser.backup.extensions_json_size",
-    extensionsJsonSizeMeasurement,
-    "Glean and telemetry measurements for extensions JSON should be equal"
-  );
-  TelemetryTestUtils.assertScalar(
-    scalars,
-    "browser.backup.extension_store_permissions_data_size",
-    extensionStorePermissionsDataSizeMeasurement,
-    "Glean and telemetry measurements for extension store permissions data should be equal"
-  );
-  TelemetryTestUtils.assertScalar(
-    scalars,
-    "browser.backup.storage_sync_size",
-    storageSyncSizeMeasurement,
-    "Glean and telemetry measurements for storage sync database should be equal"
-  );
-  TelemetryTestUtils.assertScalar(
-    scalars,
-    "browser.backup.extensions_xpi_directory_size",
-    extensionsXpiDirectorySizeMeasurement,
-    "Glean and telemetry measurements for extensions directory should be equal"
-  );
-  TelemetryTestUtils.assertScalar(
-    scalars,
-    "browser.backup.browser_extension_data_size",
-    browserExtensionDataSizeMeasurement,
-    "Glean and telemetry measurements for browser extension data should be equal"
-  );
-  TelemetryTestUtils.assertScalar(
-    scalars,
-    "browser.backup.extensions_storage_size",
-    extensionsStorageSizeMeasurement,
-    "Glean and telemetry measurements for extensions storage should be equal"
-  );
-
-  await maybeRemovePath(tempDir);
-});
-
-/**
- * Tests that we can handle the extension store permissions data not existing.
- */
-add_task(
-  async function test_AddonsBackupResource_no_extension_store_permissions_data() {
-    Services.fog.testResetFOG();
-
-    let tempDir = PathUtils.tempDir;
-
-    let extensionsBackupResource = new AddonsBackupResource();
-    await extensionsBackupResource.measure(tempDir);
-
-    let extensionStorePermissionsDataSizeMeasurement =
-      Glean.browserBackup.extensionStorePermissionsDataSize.testGetValue();
-    Assert.equal(
-      extensionStorePermissionsDataSizeMeasurement,
-      null,
-      "Should NOT have collected a measurement for the missing data"
-    );
-  }
-);
-
-/**
- * Tests that we can handle a profile with no moz-extension IndexedDB databases.
- */
-add_task(
-  async function test_AddonsBackupResource_no_extension_storage_databases() {
-    Services.fog.testResetFOG();
-
-    let tempDir = PathUtils.tempDir;
-
-    let extensionsBackupResource = new AddonsBackupResource();
-    await extensionsBackupResource.measure(tempDir);
-
-    let extensionsStorageSizeMeasurement =
-      Glean.browserBackup.extensionsStorageSize.testGetValue();
-    Assert.equal(
-      extensionsStorageSizeMeasurement,
-      null,
-      "Should NOT have collected a measurement for the missing data"
-    );
-  }
-);
diff --git a/browser/components/backup/tests/xpcshell/xpcshell.toml b/browser/components/backup/tests/xpcshell/xpcshell.toml
index 07e517f1f2..8a41c9e761 100644
--- a/browser/components/backup/tests/xpcshell/xpcshell.toml
+++ b/browser/components/backup/tests/xpcshell/xpcshell.toml
@@ -6,15 +6,25 @@ prefs = [
   "browser.backup.log=true",
 ]
 
+["test_AddonsBackupResource.js"]
+
 ["test_BackupResource.js"]
 support-files = ["data/test_xulstore.json"]
 
+["test_BackupService.js"]
+
+["test_BackupService_takeMeasurements.js"]
+
+["test_CookiesBackupResource.js"]
+
+["test_CredentialsAndSecurityBackupResource.js"]
+
+["test_FormHistoryBackupResource.js"]
+
 ["test_MiscDataBackupResource.js"]
 
 ["test_PlacesBackupResource.js"]
 
 ["test_PreferencesBackupResource.js"]
 
-["test_createBackup.js"]
-
-["test_measurements.js"]
+["test_SessionStoreBackupResource.js"]
-- 
cgit v1.2.3