summaryrefslogtreecommitdiffstats
path: root/browser/components/backup
diff options
context:
space:
mode:
Diffstat (limited to '')
-rw-r--r--browser/components/backup/BackupService.sys.mjs583
-rw-r--r--browser/components/backup/actors/BackupUIChild.sys.mjs54
-rw-r--r--browser/components/backup/actors/BackupUIParent.sys.mjs82
-rw-r--r--browser/components/backup/content/BackupManifest.1.schema.json82
-rw-r--r--browser/components/backup/content/backup-settings.mjs47
-rw-r--r--browser/components/backup/content/backup-settings.stories.mjs32
-rw-r--r--browser/components/backup/content/debug.html18
-rw-r--r--browser/components/backup/content/debug.js54
-rw-r--r--browser/components/backup/docs/backup-ui-actors.rst22
-rw-r--r--browser/components/backup/docs/index.rst1
-rw-r--r--browser/components/backup/jar.mn2
-rw-r--r--browser/components/backup/moz.build8
-rw-r--r--browser/components/backup/resources/AddonsBackupResource.sys.mjs75
-rw-r--r--browser/components/backup/resources/BackupResource.sys.mjs170
-rw-r--r--browser/components/backup/resources/CookiesBackupResource.sys.mjs14
-rw-r--r--browser/components/backup/resources/CredentialsAndSecurityBackupResource.sys.mjs36
-rw-r--r--browser/components/backup/resources/FormHistoryBackupResource.sys.mjs16
-rw-r--r--browser/components/backup/resources/MiscDataBackupResource.sys.mjs135
-rw-r--r--browser/components/backup/resources/PlacesBackupResource.sys.mjs69
-rw-r--r--browser/components/backup/resources/PreferencesBackupResource.sys.mjs52
-rw-r--r--browser/components/backup/resources/SessionStoreBackupResource.sys.mjs26
-rw-r--r--browser/components/backup/tests/browser/browser.toml7
-rw-r--r--browser/components/backup/tests/browser/browser_settings.js40
-rw-r--r--browser/components/backup/tests/chrome/chrome.toml4
-rw-r--r--browser/components/backup/tests/chrome/test_backup_settings.html43
-rw-r--r--browser/components/backup/tests/marionette/http2-ca.pem18
-rw-r--r--browser/components/backup/tests/marionette/manifest.toml6
-rw-r--r--browser/components/backup/tests/marionette/test_backup.py713
-rw-r--r--browser/components/backup/tests/xpcshell/data/test_xulstore.json1
-rw-r--r--browser/components/backup/tests/xpcshell/head.js6
-rw-r--r--browser/components/backup/tests/xpcshell/test_AddonsBackupResource.js416
-rw-r--r--browser/components/backup/tests/xpcshell/test_BackupResource.js175
-rw-r--r--browser/components/backup/tests/xpcshell/test_BackupService.js451
-rw-r--r--browser/components/backup/tests/xpcshell/test_BackupService_takeMeasurements.js59
-rw-r--r--browser/components/backup/tests/xpcshell/test_CookiesBackupResource.js142
-rw-r--r--browser/components/backup/tests/xpcshell/test_CredentialsAndSecurityBackupResource.js215
-rw-r--r--browser/components/backup/tests/xpcshell/test_FormHistoryBackupResource.js146
-rw-r--r--browser/components/backup/tests/xpcshell/test_MiscDataBackupResource.js203
-rw-r--r--browser/components/backup/tests/xpcshell/test_PlacesBackupResource.js151
-rw-r--r--browser/components/backup/tests/xpcshell/test_PreferencesBackupResource.js66
-rw-r--r--browser/components/backup/tests/xpcshell/test_SessionStoreBackupResource.js209
-rw-r--r--browser/components/backup/tests/xpcshell/test_createBackup.js74
-rw-r--r--browser/components/backup/tests/xpcshell/test_measurements.js577
-rw-r--r--browser/components/backup/tests/xpcshell/xpcshell.toml16
44 files changed, 4547 insertions, 769 deletions
diff --git a/browser/components/backup/BackupService.sys.mjs b/browser/components/backup/BackupService.sys.mjs
index 3521f315fd..05634ed2c8 100644
--- a/browser/components/backup/BackupService.sys.mjs
+++ b/browser/components/backup/BackupService.sys.mjs
@@ -3,6 +3,7 @@
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
import * as DefaultBackupResources from "resource:///modules/backup/BackupResources.sys.mjs";
+import { AppConstants } from "resource://gre/modules/AppConstants.sys.mjs";
const lazy = {};
@@ -15,12 +16,25 @@ ChromeUtils.defineLazyGetter(lazy, "logConsole", function () {
});
});
+ChromeUtils.defineLazyGetter(lazy, "fxAccounts", () => {
+ return ChromeUtils.importESModule(
+ "resource://gre/modules/FxAccounts.sys.mjs"
+ ).getFxAccountsSingleton();
+});
+
+ChromeUtils.defineESModuleGetters(lazy, {
+ ClientID: "resource://gre/modules/ClientID.sys.mjs",
+ JsonSchemaValidator:
+ "resource://gre/modules/components-utils/JsonSchemaValidator.sys.mjs",
+ UIState: "resource://services-sync/UIState.sys.mjs",
+});
+
/**
* The BackupService class orchestrates the scheduling and creation of profile
* backups. It also does most of the heavy lifting for the restoration of a
* profile backup.
*/
-export class BackupService {
+export class BackupService extends EventTarget {
/**
* The BackupService singleton instance.
*
@@ -37,11 +51,138 @@ export class BackupService {
#resources = new Map();
/**
+ * Set to true if a backup is currently in progress. Causes stateUpdate()
+ * to be called.
+ *
+ * @see BackupService.stateUpdate()
+ * @param {boolean} val
+ * True if a backup is in progress.
+ */
+ set #backupInProgress(val) {
+ if (this.#_state.backupInProgress != val) {
+ this.#_state.backupInProgress = val;
+ this.stateUpdate();
+ }
+ }
+
+ /**
* True if a backup is currently in progress.
*
* @type {boolean}
*/
- #backupInProgress = false;
+ get #backupInProgress() {
+ return this.#_state.backupInProgress;
+ }
+
+ /**
+ * Dispatches an event to let listeners know that the BackupService state
+ * object has been updated.
+ */
+ stateUpdate() {
+ this.dispatchEvent(new CustomEvent("BackupService:StateUpdate"));
+ }
+
+ /**
+ * An object holding the current state of the BackupService instance, for
+ * the purposes of representing it in the user interface. Ideally, this would
+ * be named #state instead of #_state, but sphinx-js seems to be fairly
+ * unhappy with that coupled with the ``state`` getter.
+ *
+ * @type {object}
+ */
+ #_state = { backupInProgress: false };
+
+ /**
+ * A Promise that will resolve once the postRecovery steps are done. It will
+ * also resolve if postRecovery steps didn't need to run.
+ *
+ * @see BackupService.checkForPostRecovery()
+ * @type {Promise<undefined>}
+ */
+ #postRecoveryPromise;
+
+ /**
+ * The resolving function for #postRecoveryPromise, which should be called
+ * by checkForPostRecovery() before exiting.
+ *
+ * @type {Function}
+ */
+ #postRecoveryResolver;
+
+ /**
+ * The name of the backup manifest file.
+ *
+ * @type {string}
+ */
+ static get MANIFEST_FILE_NAME() {
+ return "backup-manifest.json";
+ }
+
+ /**
+ * The current schema version of the backup manifest that this BackupService
+ * uses when creating a backup.
+ *
+ * @type {number}
+ */
+ static get MANIFEST_SCHEMA_VERSION() {
+ return 1;
+ }
+
+ /**
+ * A promise that resolves to the schema for the backup manifest that this
+ * BackupService uses when creating a backup. This should be accessed via
+ * the `MANIFEST_SCHEMA` static getter.
+ *
+ * @type {Promise<object>}
+ */
+ static #manifestSchemaPromise = null;
+
+ /**
+ * The current schema version of the backup manifest that this BackupService
+ * uses when creating a backup.
+ *
+ * @type {Promise<object>}
+ */
+ static get MANIFEST_SCHEMA() {
+ if (!BackupService.#manifestSchemaPromise) {
+ BackupService.#manifestSchemaPromise = BackupService._getSchemaForVersion(
+ BackupService.MANIFEST_SCHEMA_VERSION
+ );
+ }
+
+ return BackupService.#manifestSchemaPromise;
+ }
+
+ /**
+ * The name of the post recovery file written into the newly created profile
+ * directory just after a profile is recovered from a backup.
+ *
+ * @type {string}
+ */
+ static get POST_RECOVERY_FILE_NAME() {
+ return "post-recovery.json";
+ }
+
+ /**
+ * Returns the schema for the backup manifest for a given version.
+ *
+ * This should really be #getSchemaForVersion, but for some reason,
+ * sphinx-js seems to choke on static async private methods (bug 1893362).
+ * We workaround this breakage by using the `_` prefix to indicate that this
+ * method should be _considered_ private, and ask that you not use this method
+ * outside of this class. The sphinx-js issue is tracked at
+ * https://github.com/mozilla/sphinx-js/issues/240.
+ *
+ * @private
+ * @param {number} version
+ * The version of the schema to return.
+ * @returns {Promise<object>}
+ */
+ static async _getSchemaForVersion(version) {
+ let schemaURL = `chrome://browser/content/backup/BackupManifest.${version}.schema.json`;
+ let response = await fetch(schemaURL);
+ return response.json();
+ }
/**
* Returns a reference to a BackupService singleton. If this is the first time
@@ -56,7 +197,10 @@ export class BackupService {
return this.#instance;
}
this.#instance = new BackupService(DefaultBackupResources);
- this.#instance.takeMeasurements();
+
+ this.#instance.checkForPostRecovery().then(() => {
+ this.#instance.takeMeasurements();
+ });
return this.#instance;
}
@@ -81,15 +225,49 @@ export class BackupService {
* @param {object} [backupResources=DefaultBackupResources] - Object containing BackupResource classes to associate with this service.
*/
constructor(backupResources = DefaultBackupResources) {
+ super();
lazy.logConsole.debug("Instantiated");
for (const resourceName in backupResources) {
let resource = backupResources[resourceName];
this.#resources.set(resource.key, resource);
}
+
+ let { promise, resolve } = Promise.withResolvers();
+ this.#postRecoveryPromise = promise;
+ this.#postRecoveryResolver = resolve;
+ }
+
+ /**
+ * Returns a reference to a Promise that will resolve with undefined once
+ * postRecovery steps have had a chance to run. This will also be resolved
+ * with undefined if no postRecovery steps needed to be run.
+ *
+ * @see BackupService.checkForPostRecovery()
+ * @returns {Promise<undefined>}
+ */
+ get postRecoveryComplete() {
+ return this.#postRecoveryPromise;
}
/**
+ * Returns a state object describing the state of the BackupService for the
+ * purposes of representing it in the user interface. The returned state
+ * object is immutable.
+ *
+ * @type {object}
+ */
+ get state() {
+ return Object.freeze(structuredClone(this.#_state));
+ }
+
+ /**
+ * @typedef {object} CreateBackupResult
+ * @property {string} stagingPath
+ * The staging path for where the backup was created.
+ */
+
+ /**
* Create a backup of the user's profile.
*
* @param {object} [options]
@@ -97,19 +275,22 @@ export class BackupService {
* @param {string} [options.profilePath=PathUtils.profileDir]
* The path to the profile to backup. By default, this is the current
* profile.
- * @returns {Promise<undefined>}
+ * @returns {Promise<CreateBackupResult|null>}
+ * A promise that resolves to an object containing the path to the staging
+ * folder where the backup was created, or null if the backup failed.
*/
async createBackup({ profilePath = PathUtils.profileDir } = {}) {
// createBackup does not allow re-entry or concurrent backups.
if (this.#backupInProgress) {
lazy.logConsole.warn("Backup attempt already in progress");
- return;
+ return null;
}
this.#backupInProgress = true;
try {
lazy.logConsole.debug(`Creating backup for profile at ${profilePath}`);
+ let manifest = await this.#createBackupManifest();
// First, check to see if a `backups` directory already exists in the
// profile.
@@ -122,8 +303,15 @@ export class BackupService {
let stagingPath = await this.#prepareStagingFolder(backupDirPath);
+ // Sort resources be priority.
+ let sortedResources = Array.from(this.#resources.values()).sort(
+ (a, b) => {
+ return b.priority - a.priority;
+ }
+ );
+
// Perform the backup for each resource.
- for (let resourceClass of this.#resources.values()) {
+ for (let resourceClass of sortedResources) {
try {
lazy.logConsole.debug(
`Backing up resource with key ${resourceClass.key}. ` +
@@ -139,10 +327,19 @@ export class BackupService {
resourcePath,
profilePath
);
- lazy.logConsole.debug(
- `Backup of resource with key ${resourceClass.key} completed`,
- manifestEntry
- );
+
+ if (manifestEntry === undefined) {
+ lazy.logConsole.error(
+ `Backup of resource with key ${resourceClass.key} returned undefined
+ as its ManifestEntry instead of null or an object`
+ );
+ } else {
+ lazy.logConsole.debug(
+ `Backup of resource with key ${resourceClass.key} completed`,
+ manifestEntry
+ );
+ manifest.resources[resourceClass.key] = manifestEntry;
+ }
} catch (e) {
lazy.logConsole.error(
`Failed to backup resource: ${resourceClass.key}`,
@@ -150,6 +347,42 @@ export class BackupService {
);
}
}
+
+ // Ensure that the manifest abides by the current schema, and log
+ // an error if somehow it doesn't. We'll want to collect telemetry for
+ // this case to make sure it's not happening in the wild. We debated
+ // throwing an exception here too, but that's not meaningfully better
+ // than creating a backup that's not schema-compliant. At least in this
+ // case, a user so-inclined could theoretically repair the manifest
+ // to make it valid.
+ let manifestSchema = await BackupService.MANIFEST_SCHEMA;
+ let schemaValidationResult = lazy.JsonSchemaValidator.validate(
+ manifest,
+ manifestSchema
+ );
+ if (!schemaValidationResult.valid) {
+ lazy.logConsole.error(
+ "Backup manifest does not conform to schema:",
+ manifest,
+ manifestSchema,
+ schemaValidationResult
+ );
+ // TODO: Collect telemetry for this case. (bug 1891817)
+ }
+
+ // Write the manifest to the staging folder.
+ let manifestPath = PathUtils.join(
+ stagingPath,
+ BackupService.MANIFEST_FILE_NAME
+ );
+ await IOUtils.writeJSON(manifestPath, manifest);
+
+ let renamedStagingPath = await this.#finalizeStagingFolder(stagingPath);
+ lazy.logConsole.log(
+ "Wrote backup to staging directory at ",
+ renamedStagingPath
+ );
+ return { stagingPath: renamedStagingPath };
} finally {
this.#backupInProgress = false;
}
@@ -179,6 +412,336 @@ export class BackupService {
}
/**
+ * Renames the staging folder to an ISO 8601 date string with dashes replacing colons and fractional seconds stripped off.
+ * The ISO date string should be formatted from YYYY-MM-DDTHH:mm:ss.sssZ to YYYY-MM-DDTHH-mm-ssZ
+ *
+ * @param {string} stagingPath
+ * The path to the populated staging folder.
+ * @returns {Promise<string|null>}
+ * The path to the renamed staging folder, or null if the stagingPath was
+ * not pointing to a valid folder.
+ */
+ async #finalizeStagingFolder(stagingPath) {
+ if (!(await IOUtils.exists(stagingPath))) {
+ // If we somehow can't find the specified staging folder, cancel this step.
+ lazy.logConsole.error(
+ `Failed to finalize staging folder. Cannot find ${stagingPath}.`
+ );
+ return null;
+ }
+
+ try {
+ lazy.logConsole.debug("Finalizing and renaming staging folder");
+ let currentDateISO = new Date().toISOString();
+ // First strip the fractional seconds
+ let dateISOStripped = currentDateISO.replace(/\.\d+\Z$/, "Z");
+ // Now replace all colons with dashes
+ let dateISOFormatted = dateISOStripped.replaceAll(":", "-");
+
+ let stagingPathParent = PathUtils.parent(stagingPath);
+ let renamedBackupPath = PathUtils.join(
+ stagingPathParent,
+ dateISOFormatted
+ );
+ await IOUtils.move(stagingPath, renamedBackupPath);
+
+ let existingBackups = await IOUtils.getChildren(stagingPathParent);
+
+ /**
+ * Bug 1892532: for now, we only support a single backup file.
+ * If there are other pre-existing backup folders, delete them.
+ */
+ for (let existingBackupPath of existingBackups) {
+ if (existingBackupPath !== renamedBackupPath) {
+ await IOUtils.remove(existingBackupPath, {
+ recursive: true,
+ });
+ }
+ }
+ return renamedBackupPath;
+ } catch (e) {
+ lazy.logConsole.error(
+ `Something went wrong while finalizing the staging folder. ${e}`
+ );
+ throw e;
+ }
+ }
+
+ /**
+ * Creates and resolves with a backup manifest object with an empty resources
+ * property.
+ *
+ * @returns {Promise<object>}
+ */
+ async #createBackupManifest() {
+ let profileSvc = Cc["@mozilla.org/toolkit/profile-service;1"].getService(
+ Ci.nsIToolkitProfileService
+ );
+ let profileName;
+ if (!profileSvc.currentProfile) {
+ // We're probably running on a local build or in some special configuration.
+ // Let's pull in a profile name from the profile directory.
+ let profileFolder = PathUtils.split(PathUtils.profileDir).at(-1);
+ profileName = profileFolder.substring(profileFolder.indexOf(".") + 1);
+ } else {
+ profileName = profileSvc.currentProfile.name;
+ }
+
+ let meta = {
+ date: new Date().toISOString(),
+ appName: AppConstants.MOZ_APP_NAME,
+ appVersion: AppConstants.MOZ_APP_VERSION,
+ buildID: AppConstants.MOZ_BUILDID,
+ profileName,
+ machineName: lazy.fxAccounts.device.getLocalName(),
+ osName: Services.sysinfo.getProperty("name"),
+ osVersion: Services.sysinfo.getProperty("version"),
+ legacyClientID: await lazy.ClientID.getClientID(),
+ };
+
+ let fxaState = lazy.UIState.get();
+ if (fxaState.status == lazy.UIState.STATUS_SIGNED_IN) {
+ meta.accountID = fxaState.uid;
+ meta.accountEmail = fxaState.email;
+ }
+
+ return {
+ version: BackupService.MANIFEST_SCHEMA_VERSION,
+ meta,
+ resources: {},
+ };
+ }
+
+ /**
+ * Given a decompressed backup archive at recoveryPath, this method does the
+ * following:
+ *
+ * 1. Reads in the backup manifest from the archive and ensures that it is
+ * valid.
+ * 2. Creates a new named profile directory using the same name as the one
+ * found in the backup manifest, but with a different prefix.
+ * 3. Iterates over each resource in the manifest and calls the recover()
+ * method on each found BackupResource, passing in the associated
+ * ManifestEntry from the backup manifest, and collects any post-recovery
+ * data from those resources.
+ * 4. Writes a `post-recovery.json` file into the newly created profile
+ * directory.
+ * 5. Returns the name of the newly created profile directory.
+ *
+ * @param {string} recoveryPath
+ * The path to the decompressed backup archive on the file system.
+ * @param {boolean} [shouldLaunch=false]
+ * An optional argument that specifies whether an instance of the app
+ * should be launched with the newly recovered profile after recovery is
+ * complete.
+ * @param {string} [profileRootPath=null]
+ * An optional argument that specifies the root directory where the new
+ * profile directory should be created. If not provided, the default
+ * profile root directory will be used. This is primarily meant for
+ * testing.
+ * @returns {Promise<nsIToolkitProfile>}
+ * The nsIToolkitProfile that was created for the recovered profile.
+ * @throws {Exception}
+ * In the event that recovery somehow failed.
+ */
+ async recoverFromBackup(
+ recoveryPath,
+ shouldLaunch = false,
+ profileRootPath = null
+ ) {
+ lazy.logConsole.debug("Recovering from backup at ", recoveryPath);
+
+ try {
+ // Read in the backup manifest.
+ let manifestPath = PathUtils.join(
+ recoveryPath,
+ BackupService.MANIFEST_FILE_NAME
+ );
+ let manifest = await IOUtils.readJSON(manifestPath);
+ if (!manifest.version) {
+ throw new Error("Backup manifest version not found");
+ }
+
+ if (manifest.version > BackupService.MANIFEST_SCHEMA_VERSION) {
+ throw new Error(
+ "Cannot recover from a manifest newer than the current schema version"
+ );
+ }
+
+ // Make sure that it conforms to the schema.
+ let manifestSchema = await BackupService._getSchemaForVersion(
+ manifest.version
+ );
+ let schemaValidationResult = lazy.JsonSchemaValidator.validate(
+ manifest,
+ manifestSchema
+ );
+ if (!schemaValidationResult.valid) {
+ lazy.logConsole.error(
+ "Backup manifest does not conform to schema:",
+ manifest,
+ manifestSchema,
+ schemaValidationResult
+ );
+ // TODO: Collect telemetry for this case. (bug 1891817)
+ throw new Error("Cannot recover from an invalid backup manifest");
+ }
+
+ // In the future, if we ever bump the MANIFEST_SCHEMA_VERSION and need to
+ // do any special behaviours to interpret older schemas, this is where we
+ // can do that, and we can remove this comment.
+
+ let meta = manifest.meta;
+
+ // Okay, we have a valid backup-manifest.json. Let's create a new profile
+ // and start invoking the recover() method on each BackupResource.
+ let profileSvc = Cc["@mozilla.org/toolkit/profile-service;1"].getService(
+ Ci.nsIToolkitProfileService
+ );
+ let profile = profileSvc.createUniqueProfile(
+ profileRootPath ? await IOUtils.getDirectory(profileRootPath) : null,
+ meta.profileName
+ );
+
+ let postRecovery = {};
+
+ // Iterate over each resource in the manifest and call recover() on each
+ // associated BackupResource.
+ for (let resourceKey in manifest.resources) {
+ let manifestEntry = manifest.resources[resourceKey];
+ let resourceClass = this.#resources.get(resourceKey);
+ if (!resourceClass) {
+ lazy.logConsole.error(
+ `No BackupResource found for key ${resourceKey}`
+ );
+ continue;
+ }
+
+ try {
+ lazy.logConsole.debug(
+ `Restoring resource with key ${resourceKey}. ` +
+ `Requires encryption: ${resourceClass.requiresEncryption}`
+ );
+ let resourcePath = PathUtils.join(recoveryPath, resourceKey);
+ let postRecoveryEntry = await new resourceClass().recover(
+ manifestEntry,
+ resourcePath,
+ profile.rootDir.path
+ );
+ postRecovery[resourceKey] = postRecoveryEntry;
+ } catch (e) {
+ lazy.logConsole.error(
+ `Failed to recover resource: ${resourceKey}`,
+ e
+ );
+ }
+ }
+
+ // Make sure that a legacy telemetry client ID exists and is written to
+ // disk.
+ let clientID = await lazy.ClientID.getClientID();
+ lazy.logConsole.debug("Current client ID: ", clientID);
+ // Next, copy over the legacy telemetry client ID state from the currently
+ // running profile. The newly created profile that we're recovering into
+ // should inherit this client ID.
+ const TELEMETRY_STATE_FILENAME = "state.json";
+ const TELEMETRY_STATE_FOLDER = "datareporting";
+ await IOUtils.makeDirectory(
+ PathUtils.join(profile.rootDir.path, TELEMETRY_STATE_FOLDER)
+ );
+ await IOUtils.copy(
+ /* source */
+ PathUtils.join(
+ PathUtils.profileDir,
+ TELEMETRY_STATE_FOLDER,
+ TELEMETRY_STATE_FILENAME
+ ),
+ /* destination */
+ PathUtils.join(
+ profile.rootDir.path,
+ TELEMETRY_STATE_FOLDER,
+ TELEMETRY_STATE_FILENAME
+ )
+ );
+
+ let postRecoveryPath = PathUtils.join(
+ profile.rootDir.path,
+ BackupService.POST_RECOVERY_FILE_NAME
+ );
+ await IOUtils.writeJSON(postRecoveryPath, postRecovery);
+
+ profileSvc.flush();
+
+ if (shouldLaunch) {
+ Services.startup.createInstanceWithProfile(profile);
+ }
+
+ return profile;
+ } catch (e) {
+ lazy.logConsole.error(
+ "Failed to recover from backup at ",
+ recoveryPath,
+ e
+ );
+ throw e;
+ }
+ }
+
+ /**
+ * Checks for the POST_RECOVERY_FILE_NAME in the current profile directory.
+ * If one exists, instantiates any relevant BackupResource's, and calls
+ * postRecovery() on them with the appropriate entry from the file. Once
+ * this is done, deletes the file.
+ *
+ * The file is deleted even if one of the postRecovery() steps rejects or
+ * fails.
+ *
+ * This function resolves silently if the POST_RECOVERY_FILE_NAME file does
+ * not exist, which should be the majority of cases.
+ *
+ * @param {string} [profilePath=PathUtils.profileDir]
+ * The profile path to look for the POST_RECOVERY_FILE_NAME file. Defaults
+ * to the current profile.
+ * @returns {Promise<undefined>}
+ */
+ async checkForPostRecovery(profilePath = PathUtils.profileDir) {
+ lazy.logConsole.debug(`Checking for post-recovery file in ${profilePath}`);
+ let postRecoveryFile = PathUtils.join(
+ profilePath,
+ BackupService.POST_RECOVERY_FILE_NAME
+ );
+
+ if (!(await IOUtils.exists(postRecoveryFile))) {
+ lazy.logConsole.debug("Did not find post-recovery file.");
+ this.#postRecoveryResolver();
+ return;
+ }
+
+ lazy.logConsole.debug("Found post-recovery file. Loading...");
+
+ try {
+ let postRecovery = await IOUtils.readJSON(postRecoveryFile);
+ for (let resourceKey in postRecovery) {
+ let postRecoveryEntry = postRecovery[resourceKey];
+ let resourceClass = this.#resources.get(resourceKey);
+ if (!resourceClass) {
+ lazy.logConsole.error(
+ `Invalid resource for post-recovery step: ${resourceKey}`
+ );
+ continue;
+ }
+
+ lazy.logConsole.debug(`Running post-recovery step for ${resourceKey}`);
+ await new resourceClass().postRecovery(postRecoveryEntry);
+ lazy.logConsole.debug(`Done post-recovery step for ${resourceKey}`);
+ }
+ } finally {
+ await IOUtils.remove(postRecoveryFile, { ignoreAbsent: true });
+ this.#postRecoveryResolver();
+ }
+ }
+
+ /**
* Take measurements of the current profile state for Telemetry.
*
* @returns {Promise<undefined>}
diff --git a/browser/components/backup/actors/BackupUIChild.sys.mjs b/browser/components/backup/actors/BackupUIChild.sys.mjs
new file mode 100644
index 0000000000..25d013fa8e
--- /dev/null
+++ b/browser/components/backup/actors/BackupUIChild.sys.mjs
@@ -0,0 +1,54 @@
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+/**
+ * A JSWindowActor that is responsible for marshalling information between
+ * the BackupService singleton and any registered UI widgets that need to
+ * represent data from that service. Any UI widgets that want to receive
+ * state updates from BackupService should emit a BackupUI:InitWidget
+ * event in a document that this actor pair is registered for.
+ */
+export class BackupUIChild extends JSWindowActorChild {
+ #inittedWidgets = new WeakSet();
+
+ /**
+ * Handles BackupUI:InitWidget custom events fired by widgets that want to
+ * register with BackupUIChild. Firing this event sends a message to the
+ * parent to request the BackupService state which will result in a
+ * `backupServiceState` property of the widget to be set when that state is
+ * received. Subsequent state updates will also cause that state property to
+ * be set.
+ *
+ * @param {Event} event
+ * The BackupUI:InitWidget custom event that the widget fired.
+ */
+ handleEvent(event) {
+ if (event.type == "BackupUI:InitWidget") {
+ this.#inittedWidgets.add(event.target);
+ this.sendAsyncMessage("RequestState");
+ }
+ }
+
+ /**
+ * Handles messages sent by BackupUIParent.
+ *
+ * @param {ReceiveMessageArgument} message
+ * The message received from the BackupUIParent.
+ */
+ receiveMessage(message) {
+ if (message.name == "StateUpdate") {
+ let widgets = ChromeUtils.nondeterministicGetWeakSetKeys(
+ this.#inittedWidgets
+ );
+ for (let widget of widgets) {
+ if (widget.isConnected) {
+ // Note: we might need to switch to using Cu.cloneInto here in the
+ // event that these widgets are embedded in a non-parent-process
+ // context, like in an onboarding card.
+ widget.backupServiceState = message.data.state;
+ }
+ }
+ }
+ }
+}
diff --git a/browser/components/backup/actors/BackupUIParent.sys.mjs b/browser/components/backup/actors/BackupUIParent.sys.mjs
new file mode 100644
index 0000000000..e4d0f3aace
--- /dev/null
+++ b/browser/components/backup/actors/BackupUIParent.sys.mjs
@@ -0,0 +1,82 @@
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+const lazy = {};
+
+ChromeUtils.defineESModuleGetters(lazy, {
+ BackupService: "resource:///modules/backup/BackupService.sys.mjs",
+});
+
+/**
+ * A JSWindowActor that is responsible for marshalling information between
+ * the BackupService singleton and any registered UI widgets that need to
+ * represent data from that service.
+ */
+export class BackupUIParent extends JSWindowActorParent {
+ /**
+ * A reference to the BackupService singleton instance.
+ *
+ * @type {BackupService}
+ */
+ #bs;
+
+ /**
+ * Create a BackupUIParent instance. If a BackupUIParent is instantiated
+ * before BrowserGlue has a chance to initialize the BackupService, this
+ * constructor will cause it to initialize first.
+ */
+ constructor() {
+ super();
+ // We use init() rather than get(), since it's possible to load
+ // about:preferences before the service has had a chance to init itself
+ // via BrowserGlue.
+ this.#bs = lazy.BackupService.init();
+ }
+
+ /**
+ * Called once the BackupUIParent/BackupUIChild pair have been connected.
+ */
+ actorCreated() {
+ this.#bs.addEventListener("BackupService:StateUpdate", this);
+ }
+
+ /**
+ * Called once the BackupUIParent/BackupUIChild pair have been disconnected.
+ */
+ didDestroy() {
+ this.#bs.removeEventListener("BackupService:StateUpdate", this);
+ }
+
+ /**
+ * Handles events fired by the BackupService.
+ *
+ * @param {Event} event
+ * The event that the BackupService emitted.
+ */
+ handleEvent(event) {
+ if (event.type == "BackupService:StateUpdate") {
+ this.sendState();
+ }
+ }
+
+ /**
+ * Handles messages sent by BackupUIChild.
+ *
+ * @param {ReceiveMessageArgument} message
+ * The message received from the BackupUIChild.
+ */
+ receiveMessage(message) {
+ if (message.name == "RequestState") {
+ this.sendState();
+ }
+ }
+
+ /**
+ * Sends the StateUpdate message to the BackupUIChild, along with the most
+ * recent state object from BackupService.
+ */
+ sendState() {
+ this.sendAsyncMessage("StateUpdate", { state: this.#bs.state });
+ }
+}
diff --git a/browser/components/backup/content/BackupManifest.1.schema.json b/browser/components/backup/content/BackupManifest.1.schema.json
new file mode 100644
index 0000000000..51418988fe
--- /dev/null
+++ b/browser/components/backup/content/BackupManifest.1.schema.json
@@ -0,0 +1,82 @@
+{
+ "$schema": "https://json-schema.org/draft/2019-09/schema",
+ "$id": "file:///BackupManifest.schema.json",
+ "title": "BackupManifest",
+ "description": "A schema for the backup-manifest.json file for profile backups created by the BackupService",
+ "type": "object",
+ "properties": {
+ "version": {
+ "type": "integer",
+ "description": "Version of the backup manifest structure"
+ },
+ "meta": {
+ "type": "object",
+ "description": "Metadata about the backup",
+ "properties": {
+ "date": {
+ "type": "string",
+ "format": "date-time",
+ "description": "Date and time that the backup was created"
+ },
+ "appName": {
+ "type": "string",
+ "description": "Name of the application that the backup was created for."
+ },
+ "appVersion": {
+ "type": "string",
+ "description": "Full version string for the app instance that the backup was created on"
+ },
+ "buildID": {
+ "type": "string",
+ "description": "Build ID for the app instance that the backup was created on"
+ },
+ "profileName": {
+ "type": "string",
+ "description": "The name given to the profile that was backed up"
+ },
+ "machineName": {
+ "type": "string",
+ "description": "The name of the machine that the backup was created on"
+ },
+ "osName": {
+ "type": "string",
+ "description": "The OS name that the backup was created on"
+ },
+ "osVersion": {
+ "type": "string",
+ "description": "The OS version that the backup was created on"
+ },
+ "accountID": {
+ "type": "string",
+ "description": "The ID for the account that the user profile was signed into when backing up. Optional."
+ },
+ "accountEmail": {
+ "type": "string",
+ "description": "The email address for the account that the user profile was signed into when backing up. Optional."
+ },
+ "legacyClientID": {
+ "type": "string",
+ "description": "The legacy telemetry client ID for the profile that the backup was created on."
+ }
+ },
+ "required": [
+ "date",
+ "appName",
+ "appVersion",
+ "buildID",
+ "profileName",
+ "machineName",
+ "osName",
+ "osVersion",
+ "legacyClientID"
+ ]
+ },
+ "resources": {
+ "type": "object",
+ "additionalProperties": {
+ "type": "object"
+ }
+ }
+ },
+ "required": ["version", "resources", "meta"]
+}
diff --git a/browser/components/backup/content/backup-settings.mjs b/browser/components/backup/content/backup-settings.mjs
new file mode 100644
index 0000000000..c34d87dbc7
--- /dev/null
+++ b/browser/components/backup/content/backup-settings.mjs
@@ -0,0 +1,47 @@
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+import { html } from "chrome://global/content/vendor/lit.all.mjs";
+import { MozLitElement } from "chrome://global/content/lit-utils.mjs";
+
+/**
+ * The widget for managing the BackupService that is embedded within the main
+ * document of about:settings / about:preferences.
+ */
+export default class BackupSettings extends MozLitElement {
+ static properties = {
+ backupServiceState: { type: Object },
+ };
+
+ /**
+ * Creates a BackupSettings instance and sets the initial default
+ * state.
+ */
+ constructor() {
+ super();
+ this.backupServiceState = {
+ backupInProgress: false,
+ };
+ }
+
+ /**
+ * Dispatches the BackupUI:InitWidget custom event upon being attached to the
+ * DOM, which registers with BackupUIChild for BackupService state updates.
+ */
+ connectedCallback() {
+ super.connectedCallback();
+ this.dispatchEvent(
+ new CustomEvent("BackupUI:InitWidget", { bubbles: true })
+ );
+ }
+
+ render() {
+ return html`<div>
+ Backup in progress:
+ ${this.backupServiceState.backupInProgress ? "Yes" : "No"}
+ </div>`;
+ }
+}
+
+customElements.define("backup-settings", BackupSettings);
diff --git a/browser/components/backup/content/backup-settings.stories.mjs b/browser/components/backup/content/backup-settings.stories.mjs
new file mode 100644
index 0000000000..2a87c361bc
--- /dev/null
+++ b/browser/components/backup/content/backup-settings.stories.mjs
@@ -0,0 +1,32 @@
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+// eslint-disable-next-line import/no-unresolved
+import { html } from "lit.all.mjs";
+// eslint-disable-next-line import/no-unassigned-import
+import "./backup-settings.mjs";
+
+export default {
+ title: "Domain-specific UI Widgets/Backup/Backup Settings",
+ component: "backup-settings",
+ argTypes: {},
+};
+
+const Template = ({ backupServiceState }) => html`
+ <backup-settings .backupServiceState=${backupServiceState}></backup-settings>
+`;
+
+export const BackingUpNotInProgress = Template.bind({});
+BackingUpNotInProgress.args = {
+ backupServiceState: {
+ backupInProgress: false,
+ },
+};
+
+export const BackingUpInProgress = Template.bind({});
+BackingUpInProgress.args = {
+ backupServiceState: {
+ backupInProgress: true,
+ },
+};
diff --git a/browser/components/backup/content/debug.html b/browser/components/backup/content/debug.html
index 5d6517cf2a..55034d4a5c 100644
--- a/browser/components/backup/content/debug.html
+++ b/browser/components/backup/content/debug.html
@@ -36,7 +36,25 @@
<section id="controls">
<h2>Controls</h2>
<button id="create-backup">Create backup</button>
+ <p>
+ Clicking "Create backup" will create a backup, and then attempt to
+ show an OS notification with the total time it took to create it. This
+ notification may not appear if your OS has not granted the browser to
+ display notifications.
+ </p>
+ <p id="last-backup-status"></p>
<button id="open-backup-folder">Open backups folder</button>
+ <button id="recover-from-staging">
+ Recover from staging folder and launch
+ </button>
+ <p>
+ Clicking "Recover from staging folder and launch" will open a file
+ picker to allow you to select a staging folder. Once selected, a new
+ user profile will be created and the data stores from the staging
+ folder will be copied into that new profile. The new profile will then
+ be launched.
+ </p>
+ <p id="last-recovery-status"></p>
</section>
</main>
diff --git a/browser/components/backup/content/debug.js b/browser/components/backup/content/debug.js
index fd673818c0..7a2cea9640 100644
--- a/browser/components/backup/content/debug.js
+++ b/browser/components/backup/content/debug.js
@@ -26,13 +26,31 @@ let DebugUI = {
}
},
+ secondsToHms(seconds) {
+ let h = Math.floor(seconds / 3600);
+ let m = Math.floor((seconds % 3600) / 60);
+ let s = Math.floor((seconds % 3600) % 60);
+ return `${h}h ${m}m ${s}s`;
+ },
+
async onButtonClick(button) {
switch (button.id) {
case "create-backup": {
let service = BackupService.get();
+ let lastBackupStatus = document.querySelector("#last-backup-status");
+ lastBackupStatus.textContent = "Creating backup...";
+
+ let then = Cu.now();
button.disabled = true;
await service.createBackup();
+ let totalTimeSeconds = (Cu.now() - then) / 1000;
button.disabled = false;
+ new Notification(`Backup created`, {
+ body: `Total time ${this.secondsToHms(totalTimeSeconds)}`,
+ });
+ lastBackupStatus.textContent = `Backup created - total time: ${this.secondsToHms(
+ totalTimeSeconds
+ )}`;
break;
}
case "open-backup-folder": {
@@ -52,6 +70,42 @@ let DebugUI = {
break;
}
+ case "recover-from-staging": {
+ let backupsDir = PathUtils.join(PathUtils.profileDir, "backups");
+ let fp = Cc["@mozilla.org/filepicker;1"].createInstance(
+ Ci.nsIFilePicker
+ );
+ fp.init(
+ window.browsingContext,
+ "Choose a staging folder",
+ Ci.nsIFilePicker.modeGetFolder
+ );
+ fp.displayDirectory = await IOUtils.getDirectory(backupsDir);
+ let result = await new Promise(resolve => fp.open(resolve));
+ if (result == Ci.nsIFilePicker.returnCancel) {
+ break;
+ }
+
+ let path = fp.file.path;
+ let lastRecoveryStatus = document.querySelector(
+ "#last-recovery-status"
+ );
+ lastRecoveryStatus.textContent = "Recovering from backup...";
+
+ let service = BackupService.get();
+ try {
+ let newProfile = await service.recoverFromBackup(
+ path,
+ true /* shouldLaunch */
+ );
+ lastRecoveryStatus.textContent = `Created profile ${newProfile.name} at ${newProfile.rootDir.path}`;
+ } catch (e) {
+ lastRecoveryStatus.textContent(
+ `Failed to recover: ${e.message} Check the console for the full exception.`
+ );
+ throw e;
+ }
+ }
}
},
};
diff --git a/browser/components/backup/docs/backup-ui-actors.rst b/browser/components/backup/docs/backup-ui-actors.rst
new file mode 100644
index 0000000000..eafe59d05b
--- /dev/null
+++ b/browser/components/backup/docs/backup-ui-actors.rst
@@ -0,0 +1,22 @@
+==========================
+Backup UI Actors Reference
+==========================
+
+The ``BackupUIParent`` and ``BackupUIChild`` actors allow UI widgets to access
+the current state of the ``BackupService`` and to subscribe to state updates.
+
+UI widgets that want to subscribe to state updates must ensure that they are
+running in a process and on a page that the ``BackupUIParent/BackupUIChild``
+actor pair are registered for, and then fire a ``BackupUI::InitWidget`` event.
+
+It is expected that these UI widgets will respond to having their
+``backupServiceState`` property set.
+
+.. js:autoclass:: BackupUIParent
+ :members:
+ :private-members:
+
+.. js:autoclass:: BackupUIChild
+.. js::autoattribute:: BackupUIChild#inittedWidgets
+ :members:
+ :private-members:
diff --git a/browser/components/backup/docs/index.rst b/browser/components/backup/docs/index.rst
index db9995dad2..fc8751f9d2 100644
--- a/browser/components/backup/docs/index.rst
+++ b/browser/components/backup/docs/index.rst
@@ -12,3 +12,4 @@ into a single file that can be easily restored from.
backup-service
backup-resources
+ backup-ui-actors
diff --git a/browser/components/backup/jar.mn b/browser/components/backup/jar.mn
index 7800962486..94c670afaf 100644
--- a/browser/components/backup/jar.mn
+++ b/browser/components/backup/jar.mn
@@ -7,3 +7,5 @@ browser.jar:
content/browser/backup/debug.html (content/debug.html)
content/browser/backup/debug.js (content/debug.js)
#endif
+ content/browser/backup/BackupManifest.1.schema.json (content/BackupManifest.1.schema.json)
+ content/browser/backup/backup-settings.mjs (content/backup-settings.mjs)
diff --git a/browser/components/backup/moz.build b/browser/components/backup/moz.build
index be548ce81f..853ae1d80d 100644
--- a/browser/components/backup/moz.build
+++ b/browser/components/backup/moz.build
@@ -12,6 +12,14 @@ JAR_MANIFESTS += ["jar.mn"]
SPHINX_TREES["docs"] = "docs"
XPCSHELL_TESTS_MANIFESTS += ["tests/xpcshell/xpcshell.toml"]
+MARIONETTE_MANIFESTS += ["tests/marionette/manifest.toml"]
+BROWSER_CHROME_MANIFESTS += ["tests/browser/browser.toml"]
+MOCHITEST_CHROME_MANIFESTS += ["tests/chrome/chrome.toml"]
+
+FINAL_TARGET_FILES.actors += [
+ "actors/BackupUIChild.sys.mjs",
+ "actors/BackupUIParent.sys.mjs",
+]
EXTRA_JS_MODULES.backup += [
"BackupResources.sys.mjs",
diff --git a/browser/components/backup/resources/AddonsBackupResource.sys.mjs b/browser/components/backup/resources/AddonsBackupResource.sys.mjs
index 83b97ed2f2..29b51b8a7f 100644
--- a/browser/components/backup/resources/AddonsBackupResource.sys.mjs
+++ b/browser/components/backup/resources/AddonsBackupResource.sys.mjs
@@ -16,6 +16,73 @@ export class AddonsBackupResource extends BackupResource {
return false;
}
+ async backup(stagingPath, profilePath = PathUtils.profileDir) {
+ // Files and directories to backup.
+ let toCopy = [
+ "extensions.json",
+ "extension-settings.json",
+ "extension-preferences.json",
+ "addonStartup.json.lz4",
+ "browser-extension-data",
+ "extension-store-permissions",
+ ];
+ await BackupResource.copyFiles(profilePath, stagingPath, toCopy);
+
+ // Backup only the XPIs in the extensions directory.
+ let xpiFiles = [];
+ let extensionsXPIDirectoryPath = PathUtils.join(profilePath, "extensions");
+ let xpiDirectoryChildren = await IOUtils.getChildren(
+ extensionsXPIDirectoryPath,
+ {
+ ignoreAbsent: true,
+ }
+ );
+ for (const childFilePath of xpiDirectoryChildren) {
+ if (childFilePath.endsWith(".xpi")) {
+ let childFileName = PathUtils.filename(childFilePath);
+ xpiFiles.push(childFileName);
+ }
+ }
+ // Create the extensions directory in the staging directory.
+ let stagingExtensionsXPIDirectoryPath = PathUtils.join(
+ stagingPath,
+ "extensions"
+ );
+ await IOUtils.makeDirectory(stagingExtensionsXPIDirectoryPath);
+ // Copy all found XPIs to the staging directory.
+ await BackupResource.copyFiles(
+ extensionsXPIDirectoryPath,
+ stagingExtensionsXPIDirectoryPath,
+ xpiFiles
+ );
+
+ // Copy storage sync database.
+ let databases = ["storage-sync-v2.sqlite"];
+ await BackupResource.copySqliteDatabases(
+ profilePath,
+ stagingPath,
+ databases
+ );
+
+ return null;
+ }
+
+ async recover(_manifestEntry, recoveryPath, destProfilePath) {
+ const files = [
+ "extensions.json",
+ "extension-settings.json",
+ "extension-preferences.json",
+ "addonStartup.json.lz4",
+ "browser-extension-data",
+ "extension-store-permissions",
+ "extensions",
+ "storage-sync-v2.sqlite",
+ ];
+ await BackupResource.copyFiles(recoveryPath, destProfilePath, files);
+
+ return null;
+ }
+
async measure(profilePath = PathUtils.profileDir) {
// Report the total size of the extension json files.
const jsonFiles = [
@@ -55,16 +122,16 @@ export class AddonsBackupResource extends BackupResource {
Glean.browserBackup.storageSyncSize.set(storageSyncSize);
// Report the total size of XPI files in the extensions directory.
- let extensionsXpiDirectoryPath = PathUtils.join(profilePath, "extensions");
- let extensionsXpiDirectorySize = await BackupResource.getDirectorySize(
- extensionsXpiDirectoryPath,
+ let extensionsXPIDirectoryPath = PathUtils.join(profilePath, "extensions");
+ let extensionsXPIDirectorySize = await BackupResource.getDirectorySize(
+ extensionsXPIDirectoryPath,
{
shouldExclude: (filePath, fileType) =>
fileType !== "regular" || !filePath.endsWith(".xpi"),
}
);
Glean.browserBackup.extensionsXpiDirectorySize.set(
- extensionsXpiDirectorySize
+ extensionsXPIDirectorySize
);
// Report the total size of the browser extension data.
diff --git a/browser/components/backup/resources/BackupResource.sys.mjs b/browser/components/backup/resources/BackupResource.sys.mjs
index d851eb5199..5be6314a60 100644
--- a/browser/components/backup/resources/BackupResource.sys.mjs
+++ b/browser/components/backup/resources/BackupResource.sys.mjs
@@ -2,6 +2,14 @@
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
+import { XPCOMUtils } from "resource://gre/modules/XPCOMUtils.sys.mjs";
+
+const lazy = {};
+
+ChromeUtils.defineESModuleGetters(lazy, {
+ Sqlite: "resource://gre/modules/Sqlite.sys.mjs",
+});
+
// Convert from bytes to kilobytes (not kibibytes).
export const BYTES_IN_KB = 1000;
@@ -50,6 +58,19 @@ export class BackupResource {
}
/**
+ * This can be overridden to return a number indicating the priority the
+ * resource should have in the backup order.
+ *
+ * Resources with a higher priority will be backed up first.
+ * The default priority of 0 indicates it can be processed in any order.
+ *
+ * @returns {number}
+ */
+ static get priority() {
+ return 0;
+ }
+
+ /**
* Get the size of a file.
*
* @param {string} filePath - path to a file.
@@ -129,6 +150,75 @@ export class BackupResource {
return size;
}
+ /**
+ * Copy a set of SQLite databases safely from a source directory to a
+ * destination directory. A new read-only connection is opened for each
+ * database, and then a backup is created. If the source database does not
+ * exist, it is ignored.
+ *
+ * @param {string} sourcePath
+ * Path to the source directory of the SQLite databases.
+ * @param {string} destPath
+ * Path to the destination directory where the SQLite databases should be
+ * copied to.
+ * @param {Array<string>} sqliteDatabases
+ * An array of filenames of the SQLite databases to copy.
+ * @returns {Promise<undefined>}
+ */
+ static async copySqliteDatabases(sourcePath, destPath, sqliteDatabases) {
+ for (let fileName of sqliteDatabases) {
+ let sourceFilePath = PathUtils.join(sourcePath, fileName);
+
+ if (!(await IOUtils.exists(sourceFilePath))) {
+ continue;
+ }
+
+ let destFilePath = PathUtils.join(destPath, fileName);
+ let connection;
+
+ try {
+ connection = await lazy.Sqlite.openConnection({
+ path: sourceFilePath,
+ readOnly: true,
+ });
+
+ await connection.backup(
+ destFilePath,
+ BackupResource.SQLITE_PAGES_PER_STEP,
+ BackupResource.SQLITE_STEP_DELAY_MS
+ );
+ } finally {
+ await connection?.close();
+ }
+ }
+ }
+
+ /**
+ * A helper function to copy a set of files from a source directory to a
+ * destination directory. Callers should ensure that the source files can be
+ * copied safely before invoking this function. Files that do not exist will
+ * be ignored. Callers that wish to copy SQLite databases should use
+ * copySqliteDatabases() instead.
+ *
+ * @param {string} sourcePath
+ * Path to the source directory of the files to be copied.
+ * @param {string} destPath
+ * Path to the destination directory where the files should be
+ * copied to.
+ * @param {string[]} fileNames
+ * An array of filenames of the files to copy.
+ * @returns {Promise<undefined>}
+ */
+ static async copyFiles(sourcePath, destPath, fileNames) {
+ for (let fileName of fileNames) {
+ let sourceFilePath = PathUtils.join(sourcePath, fileName);
+ let destFilePath = PathUtils.join(destPath, fileName);
+ if (await IOUtils.exists(sourceFilePath)) {
+ await IOUtils.copy(sourceFilePath, destFilePath, { recursive: true });
+ }
+ }
+ }
+
constructor() {}
/**
@@ -144,12 +234,12 @@ export class BackupResource {
}
/**
- * Perform a safe copy of the resource(s) and write them into the backup
- * database. The Promise should resolve with an object that can be serialized
- * to JSON, as it will be written to the manifest file. This same object will
- * be deserialized and passed to restore() when restoring the backup. This
- * object can be null if no additional information is needed to restore the
- * backup.
+ * Perform a safe copy of the datastores that this resource manages and write
+ * them into the backup database. The Promise should resolve with an object
+ * that can be serialized to JSON, as it will be written to the manifest file.
+ * This same object will be deserialized and passed to restore() when
+ * restoring the backup. This object can be null if no additional information
+ * is needed to restore the backup.
*
* @param {string} stagingPath
* The path to the staging folder where copies of the datastores for this
@@ -167,4 +257,72 @@ export class BackupResource {
async backup(stagingPath, profilePath = null) {
throw new Error("BackupResource::backup must be overridden");
}
+
+ /**
+ * Recovers the datastores that this resource manages from a backup archive
+ * that has been decompressed into the recoveryPath. A pre-existing unlocked
+ * user profile should be available to restore into, and destProfilePath
+ * should point at its location on the file system.
+ *
+ * This method is not expected to be running in an app connected to the
+ * destProfilePath. If the BackupResource needs to run some operations
+ * while attached to the recovery profile, it should do that work inside of
+ * postRecovery(). If data needs to be transferred to postRecovery(), it
+ * should be passed as a JSON serializable object in the return value of this
+ * method.
+ *
+ * @see BackupResource.postRecovery()
+ * @param {object|null} manifestEntry
+ * The object that was returned by the backup() method when the backup was
+ * created. This object can be null if no additional information was needed
+ * for recovery.
+ * @param {string} recoveryPath
+ * The path to the resource directory where the backup archive has been
+ * decompressed.
+ * @param {string} destProfilePath
+ * The path to the profile directory where the backup should be restored to.
+ * @returns {Promise<object|null>}
+ * This should return a JSON serializable object that will be passed to
+ * postRecovery() if any data needs to be passed to it. This object can be
+ * null if no additional information is needed for postRecovery().
+ */
+ // eslint-disable-next-line no-unused-vars
+ async recover(manifestEntry, recoveryPath, destProfilePath) {
+ throw new Error("BackupResource::recover must be overridden");
+ }
+
+ /**
+ * Perform any post-recovery operations that need to be done after the
+ * recovery has been completed and the recovered profile has been attached
+ * to.
+ *
+ * This method is running in an app connected to the recovered profile. The
+ * profile is locked, but this postRecovery method can be used to insert
+ * data into connected datastores, or perform any other operations that can
+ * only occur within the context of the recovered profile.
+ *
+ * @see BackupResource.recover()
+ * @param {object|null} postRecoveryEntry
+ * The object that was returned by the recover() method when the recovery
+ * was originally done. This object can be null if no additional information
+ * is needed for post-recovery.
+ */
+ // eslint-disable-next-line no-unused-vars
+ async postRecovery(postRecoveryEntry) {
+ // no-op by default
+ }
}
+
+XPCOMUtils.defineLazyPreferenceGetter(
+ BackupResource,
+ "SQLITE_PAGES_PER_STEP",
+ "browser.backup.sqlite.pages_per_step",
+ 5
+);
+
+XPCOMUtils.defineLazyPreferenceGetter(
+ BackupResource,
+ "SQLITE_STEP_DELAY_MS",
+ "browser.backup.sqlite.step_delay_ms",
+ 250
+);
diff --git a/browser/components/backup/resources/CookiesBackupResource.sys.mjs b/browser/components/backup/resources/CookiesBackupResource.sys.mjs
index 8b988fd532..87ac27757c 100644
--- a/browser/components/backup/resources/CookiesBackupResource.sys.mjs
+++ b/browser/components/backup/resources/CookiesBackupResource.sys.mjs
@@ -16,6 +16,20 @@ export class CookiesBackupResource extends BackupResource {
return true;
}
+ async backup(stagingPath, profilePath = PathUtils.profileDir) {
+ await BackupResource.copySqliteDatabases(profilePath, stagingPath, [
+ "cookies.sqlite",
+ ]);
+ return null;
+ }
+
+ async recover(_manifestEntry, recoveryPath, destProfilePath) {
+ await BackupResource.copyFiles(recoveryPath, destProfilePath, [
+ "cookies.sqlite",
+ ]);
+ return null;
+ }
+
async measure(profilePath = PathUtils.profileDir) {
let cookiesDBPath = PathUtils.join(profilePath, "cookies.sqlite");
let cookiesSize = await BackupResource.getFileSize(cookiesDBPath);
diff --git a/browser/components/backup/resources/CredentialsAndSecurityBackupResource.sys.mjs b/browser/components/backup/resources/CredentialsAndSecurityBackupResource.sys.mjs
index 89069de826..03a0267f33 100644
--- a/browser/components/backup/resources/CredentialsAndSecurityBackupResource.sys.mjs
+++ b/browser/components/backup/resources/CredentialsAndSecurityBackupResource.sys.mjs
@@ -16,6 +16,42 @@ export class CredentialsAndSecurityBackupResource extends BackupResource {
return true;
}
+ async backup(stagingPath, profilePath = PathUtils.profileDir) {
+ const simpleCopyFiles = [
+ "pkcs11.txt",
+ "logins.json",
+ "logins-backup.json",
+ "autofill-profiles.json",
+ "signedInUser.json",
+ ];
+ await BackupResource.copyFiles(profilePath, stagingPath, simpleCopyFiles);
+
+ const sqliteDatabases = ["cert9.db", "key4.db", "credentialstate.sqlite"];
+ await BackupResource.copySqliteDatabases(
+ profilePath,
+ stagingPath,
+ sqliteDatabases
+ );
+
+ return null;
+ }
+
+ async recover(_manifestEntry, recoveryPath, destProfilePath) {
+ const files = [
+ "pkcs11.txt",
+ "logins.json",
+ "logins-backup.json",
+ "autofill-profiles.json",
+ "signedInUser.json",
+ "cert9.db",
+ "key4.db",
+ "credentialstate.sqlite",
+ ];
+ await BackupResource.copyFiles(recoveryPath, destProfilePath, files);
+
+ return null;
+ }
+
async measure(profilePath = PathUtils.profileDir) {
const securityFiles = ["cert9.db", "pkcs11.txt"];
let securitySize = 0;
diff --git a/browser/components/backup/resources/FormHistoryBackupResource.sys.mjs b/browser/components/backup/resources/FormHistoryBackupResource.sys.mjs
index cb314eb34d..8e35afc66b 100644
--- a/browser/components/backup/resources/FormHistoryBackupResource.sys.mjs
+++ b/browser/components/backup/resources/FormHistoryBackupResource.sys.mjs
@@ -16,6 +16,22 @@ export class FormHistoryBackupResource extends BackupResource {
return false;
}
+ async backup(stagingPath, profilePath = PathUtils.profileDir) {
+ await BackupResource.copySqliteDatabases(profilePath, stagingPath, [
+ "formhistory.sqlite",
+ ]);
+
+ return null;
+ }
+
+ async recover(_manifestEntry, recoveryPath, destProfilePath) {
+ await BackupResource.copyFiles(recoveryPath, destProfilePath, [
+ "formhistory.sqlite",
+ ]);
+
+ return null;
+ }
+
async measure(profilePath = PathUtils.profileDir) {
let formHistoryDBPath = PathUtils.join(profilePath, "formhistory.sqlite");
let formHistorySize = await BackupResource.getFileSize(formHistoryDBPath);
diff --git a/browser/components/backup/resources/MiscDataBackupResource.sys.mjs b/browser/components/backup/resources/MiscDataBackupResource.sys.mjs
index 97224f0e31..3d66114599 100644
--- a/browser/components/backup/resources/MiscDataBackupResource.sys.mjs
+++ b/browser/components/backup/resources/MiscDataBackupResource.sys.mjs
@@ -5,11 +5,19 @@
import { BackupResource } from "resource:///modules/backup/BackupResource.sys.mjs";
const lazy = {};
-
ChromeUtils.defineESModuleGetters(lazy, {
- Sqlite: "resource://gre/modules/Sqlite.sys.mjs",
+ ActivityStreamStorage:
+ "resource://activity-stream/lib/ActivityStreamStorage.sys.mjs",
+ ProfileAge: "resource://gre/modules/ProfileAge.sys.mjs",
});
+const SNIPPETS_TABLE_NAME = "snippets";
+const FILES_FOR_BACKUP = [
+ "enumerate_devices.txt",
+ "protections.sqlite",
+ "SiteSecurityServiceState.bin",
+];
+
/**
* Class representing miscellaneous files for telemetry, site storage,
* media device origin mapping, chrome privileged IndexedDB databases,
@@ -25,57 +33,102 @@ export class MiscDataBackupResource extends BackupResource {
}
async backup(stagingPath, profilePath = PathUtils.profileDir) {
- const files = [
- "times.json",
- "enumerate_devices.txt",
- "SiteSecurityServiceState.bin",
- ];
-
- for (let fileName of files) {
- let sourcePath = PathUtils.join(profilePath, fileName);
- let destPath = PathUtils.join(stagingPath, fileName);
- if (await IOUtils.exists(sourcePath)) {
- await IOUtils.copy(sourcePath, destPath, { recursive: true });
- }
- }
+ const files = ["enumerate_devices.txt", "SiteSecurityServiceState.bin"];
+ await BackupResource.copyFiles(profilePath, stagingPath, files);
const sqliteDatabases = ["protections.sqlite"];
-
- for (let fileName of sqliteDatabases) {
- let sourcePath = PathUtils.join(profilePath, fileName);
- let destPath = PathUtils.join(stagingPath, fileName);
- let connection;
-
- try {
- connection = await lazy.Sqlite.openConnection({
- path: sourcePath,
- readOnly: true,
- });
-
- await connection.backup(destPath);
- } finally {
- await connection.close();
- }
- }
+ await BackupResource.copySqliteDatabases(
+ profilePath,
+ stagingPath,
+ sqliteDatabases
+ );
// Bug 1890585 - we don't currently have the ability to copy the
- // chrome-privileged IndexedDB databases under storage/permanent/chrome, so
- // we'll just skip that for now.
+ // chrome-privileged IndexedDB databases under storage/permanent/chrome.
+ // Instead, we'll manually export any IndexedDB data we need to backup
+ // to a separate JSON file.
+
+ // The first IndexedDB database we want to back up is the ActivityStream
+ // one - specifically, the "snippets" table, as this contains information
+ // on ASRouter impressions, blocked messages, message group impressions,
+ // etc.
+ let storage = new lazy.ActivityStreamStorage({
+ storeNames: [SNIPPETS_TABLE_NAME],
+ });
+ let snippetsTable = await storage.getDbTable(SNIPPETS_TABLE_NAME);
+ let snippetsObj = {};
+ for (let key of await snippetsTable.getAllKeys()) {
+ snippetsObj[key] = await snippetsTable.get(key);
+ }
+ let snippetsBackupFile = PathUtils.join(
+ stagingPath,
+ "activity-stream-snippets.json"
+ );
+ await IOUtils.writeJSON(snippetsBackupFile, snippetsObj);
return null;
}
- async measure(profilePath = PathUtils.profileDir) {
- const files = [
+ async recover(_manifestEntry, recoveryPath, destProfilePath) {
+ await BackupResource.copyFiles(
+ recoveryPath,
+ destProfilePath,
+ FILES_FOR_BACKUP
+ );
+
+ // The times.json file, the one that powers ProfileAge, works hand in hand
+ // with the Telemetry client ID. We don't want to accidentally _overwrite_
+ // a pre-existing times.json with data from a different profile, because
+ // then the client ID wouldn't match the times.json data anymore.
+ //
+ // The rule that we're following for backups and recoveries is that the
+ // recovered profile always inherits the client ID (and therefore the
+ // times.json) from the profile that _initiated recovery_.
+ //
+ // This means we want to copy the times.json file from the profile that's
+ // currently in use to the destProfilePath.
+ await BackupResource.copyFiles(PathUtils.profileDir, destProfilePath, [
"times.json",
- "enumerate_devices.txt",
- "protections.sqlite",
- "SiteSecurityServiceState.bin",
- ];
+ ]);
+
+ // We also want to write the recoveredFromBackup timestamp now.
+ let profileAge = await lazy.ProfileAge(destProfilePath);
+ await profileAge.recordRecoveredFromBackup();
+
+ // The activity-stream-snippets data will need to be written during the
+ // postRecovery phase, so we'll stash the path to the JSON file in the
+ // post recovery entry.
+ let snippetsBackupFile = PathUtils.join(
+ recoveryPath,
+ "activity-stream-snippets.json"
+ );
+ return { snippetsBackupFile };
+ }
+
+ async postRecovery(postRecoveryEntry) {
+ let { snippetsBackupFile } = postRecoveryEntry;
+ // If for some reason, the activity-stream-snippets data file has been
+ // removed already, there's nothing to do.
+ if (!IOUtils.exists(snippetsBackupFile)) {
+ return;
+ }
+
+ let snippetsData = await IOUtils.readJSON(snippetsBackupFile);
+ let storage = new lazy.ActivityStreamStorage({
+ storeNames: [SNIPPETS_TABLE_NAME],
+ });
+ let snippetsTable = await storage.getDbTable(SNIPPETS_TABLE_NAME);
+ for (let key in snippetsData) {
+ let value = snippetsData[key];
+ await snippetsTable.set(key, value);
+ }
+ }
+
+ async measure(profilePath = PathUtils.profileDir) {
let fullSize = 0;
- for (let filePath of files) {
+ for (let filePath of FILES_FOR_BACKUP) {
let resourcePath = PathUtils.join(profilePath, filePath);
let resourceSize = await BackupResource.getFileSize(resourcePath);
if (Number.isInteger(resourceSize)) {
diff --git a/browser/components/backup/resources/PlacesBackupResource.sys.mjs b/browser/components/backup/resources/PlacesBackupResource.sys.mjs
index 1955406f51..3a9433e67c 100644
--- a/browser/components/backup/resources/PlacesBackupResource.sys.mjs
+++ b/browser/components/backup/resources/PlacesBackupResource.sys.mjs
@@ -10,7 +10,6 @@ const lazy = {};
ChromeUtils.defineESModuleGetters(lazy, {
BookmarkJSONUtils: "resource://gre/modules/BookmarkJSONUtils.sys.mjs",
PrivateBrowsingUtils: "resource://gre/modules/PrivateBrowsingUtils.sys.mjs",
- Sqlite: "resource://gre/modules/Sqlite.sys.mjs",
});
XPCOMUtils.defineLazyPreferenceGetter(
@@ -26,6 +25,8 @@ XPCOMUtils.defineLazyPreferenceGetter(
false
);
+const BOOKMARKS_BACKUP_FILENAME = "bookmarks.jsonlz4";
+
/**
* Class representing Places database related files within a user profile.
*/
@@ -38,8 +39,11 @@ export class PlacesBackupResource extends BackupResource {
return false;
}
+ static get priority() {
+ return 1;
+ }
+
async backup(stagingPath, profilePath = PathUtils.profileDir) {
- const sqliteDatabases = ["places.sqlite", "favicons.sqlite"];
let canBackupHistory =
!lazy.PrivateBrowsingUtils.permanentPrivateBrowsing &&
!lazy.isSanitizeOnShutdownEnabled &&
@@ -52,7 +56,7 @@ export class PlacesBackupResource extends BackupResource {
if (!canBackupHistory) {
let bookmarksBackupFile = PathUtils.join(
stagingPath,
- "bookmarks.jsonlz4"
+ BOOKMARKS_BACKUP_FILENAME
);
await lazy.BookmarkJSONUtils.exportToFile(bookmarksBackupFile, {
compress: true,
@@ -60,25 +64,60 @@ export class PlacesBackupResource extends BackupResource {
return { bookmarksOnly: true };
}
- for (let fileName of sqliteDatabases) {
- let sourcePath = PathUtils.join(profilePath, fileName);
- let destPath = PathUtils.join(stagingPath, fileName);
- let connection;
+ // These are copied in parallel because they're attached[1], and we don't
+ // want them to get out of sync with one another.
+ //
+ // [1]: https://www.sqlite.org/lang_attach.html
+ await Promise.all([
+ BackupResource.copySqliteDatabases(profilePath, stagingPath, [
+ "places.sqlite",
+ ]),
+ BackupResource.copySqliteDatabases(profilePath, stagingPath, [
+ "favicons.sqlite",
+ ]),
+ ]);
+
+ return null;
+ }
- try {
- connection = await lazy.Sqlite.openConnection({
- path: sourcePath,
- readOnly: true,
- });
+ async recover(manifestEntry, recoveryPath, destProfilePath) {
+ if (!manifestEntry) {
+ const simpleCopyFiles = ["places.sqlite", "favicons.sqlite"];
+ await BackupResource.copyFiles(
+ recoveryPath,
+ destProfilePath,
+ simpleCopyFiles
+ );
+ } else {
+ const { bookmarksOnly } = manifestEntry;
- await connection.backup(destPath);
- } finally {
- await connection.close();
+ /**
+ * If the recovery file only has bookmarks backed up, pass the file path to postRecovery()
+ * so that we can import all bookmarks into the new profile once it's been launched and restored.
+ */
+ if (bookmarksOnly) {
+ let bookmarksBackupPath = PathUtils.join(
+ recoveryPath,
+ BOOKMARKS_BACKUP_FILENAME
+ );
+ return { bookmarksBackupPath };
}
}
+
return null;
}
+ async postRecovery(postRecoveryEntry) {
+ if (postRecoveryEntry?.bookmarksBackupPath) {
+ await lazy.BookmarkJSONUtils.importFromFile(
+ postRecoveryEntry.bookmarksBackupPath,
+ {
+ replace: true,
+ }
+ );
+ }
+ }
+
async measure(profilePath = PathUtils.profileDir) {
let placesDBPath = PathUtils.join(profilePath, "places.sqlite");
let faviconsDBPath = PathUtils.join(profilePath, "favicons.sqlite");
diff --git a/browser/components/backup/resources/PreferencesBackupResource.sys.mjs b/browser/components/backup/resources/PreferencesBackupResource.sys.mjs
index 012c0bf91e..80196cab74 100644
--- a/browser/components/backup/resources/PreferencesBackupResource.sys.mjs
+++ b/browser/components/backup/resources/PreferencesBackupResource.sys.mjs
@@ -3,7 +3,6 @@
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
import { BackupResource } from "resource:///modules/backup/BackupResource.sys.mjs";
-import { Sqlite } from "resource://gre/modules/Sqlite.sys.mjs";
/**
* Class representing files that modify preferences and permissions within a user profile.
@@ -28,32 +27,14 @@ export class PreferencesBackupResource extends BackupResource {
"user.js",
"chrome",
];
-
- for (let fileName of simpleCopyFiles) {
- let sourcePath = PathUtils.join(profilePath, fileName);
- let destPath = PathUtils.join(stagingPath, fileName);
- if (await IOUtils.exists(sourcePath)) {
- await IOUtils.copy(sourcePath, destPath, { recursive: true });
- }
- }
+ await BackupResource.copyFiles(profilePath, stagingPath, simpleCopyFiles);
const sqliteDatabases = ["permissions.sqlite", "content-prefs.sqlite"];
-
- for (let fileName of sqliteDatabases) {
- let sourcePath = PathUtils.join(profilePath, fileName);
- let destPath = PathUtils.join(stagingPath, fileName);
- let connection;
-
- try {
- connection = await Sqlite.openConnection({
- path: sourcePath,
- });
-
- await connection.backup(destPath);
- } finally {
- await connection.close();
- }
- }
+ await BackupResource.copySqliteDatabases(
+ profilePath,
+ stagingPath,
+ sqliteDatabases
+ );
// prefs.js is a special case - we have a helper function to flush the
// current prefs state to disk off of the main thread.
@@ -64,6 +45,27 @@ export class PreferencesBackupResource extends BackupResource {
return null;
}
+ async recover(_manifestEntry, recoveryPath, destProfilePath) {
+ const simpleCopyFiles = [
+ "prefs.js",
+ "xulstore.json",
+ "permissions.sqlite",
+ "content-prefs.sqlite",
+ "containers.json",
+ "handlers.json",
+ "search.json.mozlz4",
+ "user.js",
+ "chrome",
+ ];
+ await BackupResource.copyFiles(
+ recoveryPath,
+ destProfilePath,
+ simpleCopyFiles
+ );
+
+ return null;
+ }
+
async measure(profilePath = PathUtils.profileDir) {
const files = [
"prefs.js",
diff --git a/browser/components/backup/resources/SessionStoreBackupResource.sys.mjs b/browser/components/backup/resources/SessionStoreBackupResource.sys.mjs
index fa5dcca848..d28598944f 100644
--- a/browser/components/backup/resources/SessionStoreBackupResource.sys.mjs
+++ b/browser/components/backup/resources/SessionStoreBackupResource.sys.mjs
@@ -28,6 +28,32 @@ export class SessionStoreBackupResource extends BackupResource {
return false;
}
+ async backup(stagingPath, profilePath = PathUtils.profileDir) {
+ let sessionStoreState = lazy.SessionStore.getCurrentState(true);
+ let sessionStorePath = PathUtils.join(stagingPath, "sessionstore.jsonlz4");
+
+ /* Bug 1891854 - remove cookies from session store state if the backup file is
+ * not encrypted. */
+
+ await IOUtils.writeJSON(sessionStorePath, sessionStoreState, {
+ compress: true,
+ });
+ await BackupResource.copyFiles(profilePath, stagingPath, [
+ "sessionstore-backups",
+ ]);
+
+ return null;
+ }
+
+ async recover(_manifestEntry, recoveryPath, destProfilePath) {
+ await BackupResource.copyFiles(recoveryPath, destProfilePath, [
+ "sessionstore.jsonlz4",
+ "sessionstore-backups",
+ ]);
+
+ return null;
+ }
+
async measure(profilePath = PathUtils.profileDir) {
// Get the current state of the session store JSON and
// measure it's uncompressed size.
diff --git a/browser/components/backup/tests/browser/browser.toml b/browser/components/backup/tests/browser/browser.toml
new file mode 100644
index 0000000000..f222c3b825
--- /dev/null
+++ b/browser/components/backup/tests/browser/browser.toml
@@ -0,0 +1,7 @@
+[DEFAULT]
+prefs = [
+ "browser.backup.enabled=true",
+ "browser.backup.preferences.ui.enabled=true",
+]
+
+["browser_settings.js"]
diff --git a/browser/components/backup/tests/browser/browser_settings.js b/browser/components/backup/tests/browser/browser_settings.js
new file mode 100644
index 0000000000..b33dbec7bd
--- /dev/null
+++ b/browser/components/backup/tests/browser/browser_settings.js
@@ -0,0 +1,40 @@
+/* Any copyright is dedicated to the Public Domain.
+ https://creativecommons.org/publicdomain/zero/1.0/ */
+
+"use strict";
+
+/**
+ * Tests that the section for controlling backup in about:preferences is
+ * visible, but can also be hidden via a pref.
+ */
+add_task(async function test_preferences_visibility() {
+ await BrowserTestUtils.withNewTab("about:preferences", async browser => {
+ let backupSection =
+ browser.contentDocument.querySelector("#dataBackupGroup");
+ Assert.ok(backupSection, "Found backup preferences section");
+
+ // Our mochitest-browser tests are configured to have the section visible
+ // by default.
+ Assert.ok(
+ BrowserTestUtils.isVisible(backupSection),
+ "Backup section is visible"
+ );
+ });
+
+ await SpecialPowers.pushPrefEnv({
+ set: [["browser.backup.preferences.ui.enabled", false]],
+ });
+
+ await BrowserTestUtils.withNewTab("about:preferences", async browser => {
+ let backupSection =
+ browser.contentDocument.querySelector("#dataBackupGroup");
+ Assert.ok(backupSection, "Found backup preferences section");
+
+ Assert.ok(
+ BrowserTestUtils.isHidden(backupSection),
+ "Backup section is now hidden"
+ );
+ });
+
+ await SpecialPowers.popPrefEnv();
+});
diff --git a/browser/components/backup/tests/chrome/chrome.toml b/browser/components/backup/tests/chrome/chrome.toml
new file mode 100644
index 0000000000..b0c01b336f
--- /dev/null
+++ b/browser/components/backup/tests/chrome/chrome.toml
@@ -0,0 +1,4 @@
+[DEFAULT]
+skip-if = ["os == 'android'"]
+
+["test_backup_settings.html"]
diff --git a/browser/components/backup/tests/chrome/test_backup_settings.html b/browser/components/backup/tests/chrome/test_backup_settings.html
new file mode 100644
index 0000000000..3619f8a1f4
--- /dev/null
+++ b/browser/components/backup/tests/chrome/test_backup_settings.html
@@ -0,0 +1,43 @@
+<!DOCTYPE HTML>
+<html>
+<head>
+ <meta charset="utf-8">
+ <title>Tests for the BackupSettings component</title>
+ <script src="chrome://mochikit/content/tests/SimpleTest/SimpleTest.js"></script>
+ <script src="chrome://mochikit/content/tests/SimpleTest/EventUtils.js"></script>
+ <script
+ src="chrome://browser/content/backup/backup-settings.mjs"
+ type="module"
+></script>
+ <link rel="stylesheet" href="chrome://mochikit/content/tests/SimpleTest/test.css"/>
+ <script>
+
+ const { BrowserTestUtils } = ChromeUtils.importESModule(
+ "resource://testing-common/BrowserTestUtils.sys.mjs"
+ );
+
+ /**
+ * Tests that adding a backup-settings element to the DOM causes it to
+ * fire a BackupUI:InitWidget event.
+ */
+ add_task(async function test_initWidget() {
+ let settings = document.createElement("backup-settings");
+ let content = document.getElementById("content");
+
+ let sawInitWidget = BrowserTestUtils.waitForEvent(content, "BackupUI:InitWidget");
+ content.appendChild(settings);
+ await sawInitWidget;
+ ok(true, "Saw BackupUI:InitWidget");
+
+ settings.remove();
+ });
+ </script>
+</head>
+<body>
+<p id="display"></p>
+<div id="content" style="display: none">
+ <backup-settings id="test-backup-settings"></backup-settings>
+</div>
+<pre id="test"></pre>
+</body>
+</html>
diff --git a/browser/components/backup/tests/marionette/http2-ca.pem b/browser/components/backup/tests/marionette/http2-ca.pem
new file mode 100644
index 0000000000..ef5a801720
--- /dev/null
+++ b/browser/components/backup/tests/marionette/http2-ca.pem
@@ -0,0 +1,18 @@
+-----BEGIN CERTIFICATE-----
+MIIC1DCCAbygAwIBAgIURZvN7yVqFNwThGHASoy1OlOGvOMwDQYJKoZIhvcNAQEL
+BQAwGTEXMBUGA1UEAwwOIEhUVFAyIFRlc3QgQ0EwIhgPMjAxNzAxMDEwMDAwMDBa
+GA8yMDI3MDEwMTAwMDAwMFowGTEXMBUGA1UEAwwOIEhUVFAyIFRlc3QgQ0EwggEi
+MA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQC6iFGoRI4W1kH9braIBjYQPTwT
+2erkNUq07PVoV2wke8HHJajg2B+9sZwGm24ahvJr4q9adWtqZHEIeqVap0WH9xzV
+JJwCfs1D/B5p0DggKZOrIMNJ5Nu5TMJrbA7tFYIP8X6taRqx0wI6iypB7qdw4A8N
+jf1mCyuwJJKkfbmIYXmQsVeQPdI7xeC4SB+oN9OIQ+8nFthVt2Zaqn4CkC86exCA
+BiTMHGyXrZZhW7filhLAdTGjDJHdtMr3/K0dJdMJ77kXDqdo4bN7LyJvaeO0ipVh
+He4m1iWdq5EITjbLHCQELL8Wiy/l8Y+ZFzG4s/5JI/pyUcQx1QOs2hgKNe2NAgMB
+AAGjEDAOMAwGA1UdEwQFMAMBAf8wDQYJKoZIhvcNAQELBQADggEBADyDiQnKjsvR
+NrOk0aqgJ8XgK/IgJXFLbAVivjBLwnJGEkwxrFtC14mpTrPuXw9AybhroMjinq4Y
+cNYTFuTE34k0fZEU8d60J/Tpfd1i0EB8+oUPuqOn+N29/LeHPAnkDJdOZye3w0U+
+StAI79WqUYQaKIG7qLnt60dQwBte12uvbuPaB3mREIfDXOKcjLBdZHL1waWjtzUX
+z2E91VIdpvJGfEfXC3fIe1uO9Jh/E9NVWci84+njkNsl+OyBfOJ8T+pV3SHfWedp
+Zbjwh6UTukIuc3mW0rS/qZOa2w3HQaO53BMbluo0w1+cscOepsATld2HHvSiHB+0
+K8SWFRHdBOU=
+-----END CERTIFICATE-----
diff --git a/browser/components/backup/tests/marionette/manifest.toml b/browser/components/backup/tests/marionette/manifest.toml
new file mode 100644
index 0000000000..2982adb693
--- /dev/null
+++ b/browser/components/backup/tests/marionette/manifest.toml
@@ -0,0 +1,6 @@
+[DEFAULT]
+run-if = ["buildapp == 'browser'"]
+prefs = ["browser.backup.enabled=true", "browser.backup.log=true"]
+
+["test_backup.py"]
+support-files = ["http2-ca.pem"]
diff --git a/browser/components/backup/tests/marionette/test_backup.py b/browser/components/backup/tests/marionette/test_backup.py
new file mode 100644
index 0000000000..3b11b50ae8
--- /dev/null
+++ b/browser/components/backup/tests/marionette/test_backup.py
@@ -0,0 +1,713 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import json
+import os
+import shutil
+import tempfile
+
+import mozfile
+from marionette_harness import MarionetteTestCase
+
+
+class BackupTest(MarionetteTestCase):
+ # This is the DB key that will be computed for the http2-ca.pem certificate
+ # that's included in a support-file for this test.
+ _cert_db_key = "AAAAAAAAAAAAAAAUAAAAG0Wbze8lahTcE4RhwEqMtTpThrzjMBkxFzAVBgNVBAMMDiBIVFRQMiBUZXN0IENB"
+
+ def setUp(self):
+ MarionetteTestCase.setUp(self)
+ # We need to quit the browser and restart with the browser.backup.log
+ # pref already set to true in order for it to be displayed.
+ self.marionette.quit()
+ self.marionette.instance.prefs = {
+ "browser.backup.log": True,
+ }
+ # Now restart the browser.
+ self.marionette.instance.switch_profile()
+ self.marionette.start_session()
+
+ def test_backup(self):
+ self.marionette.set_context("chrome")
+
+ self.add_test_cookie()
+ self.add_test_login()
+ self.add_test_certificate()
+ self.add_test_saved_address()
+ self.add_test_identity_credential()
+ self.add_test_form_history()
+ self.add_test_activity_stream_snippets_data()
+ self.add_test_protections_data()
+ self.add_test_bookmarks()
+ self.add_test_history()
+ self.add_test_preferences()
+ self.add_test_permissions()
+
+ resourceKeys = self.marionette.execute_script(
+ """
+ const DefaultBackupResources = ChromeUtils.importESModule("resource:///modules/backup/BackupResources.sys.mjs");
+ let resourceKeys = [];
+ for (const resourceName in DefaultBackupResources) {
+ let resource = DefaultBackupResources[resourceName];
+ resourceKeys.push(resource.key);
+ }
+ return resourceKeys;
+ """
+ )
+
+ originalStagingPath = self.marionette.execute_async_script(
+ """
+ const { BackupService } = ChromeUtils.importESModule("resource:///modules/backup/BackupService.sys.mjs");
+ let bs = BackupService.init();
+ if (!bs) {
+ throw new Error("Could not get initialized BackupService.");
+ }
+
+ let [outerResolve] = arguments;
+ (async () => {
+ let { stagingPath } = await bs.createBackup();
+ if (!stagingPath) {
+ throw new Error("Could not create backup.");
+ }
+ return stagingPath;
+ })().then(outerResolve);
+ """
+ )
+
+ # When we switch over to the recovered profile, the Marionette framework
+ # will blow away the profile directory of the one that we created the
+ # backup on, which ruins our ability to do postRecovery work, since
+ # that relies on the prior profile sticking around. We work around this
+ # by moving the staging folder we got back to the OS temporary
+ # directory, and telling the recovery method to use that instead of the
+ # one from the profile directory.
+ stagingPath = os.path.join(tempfile.gettempdir(), "staging-test")
+ # Delete the destination folder if it exists already
+ shutil.rmtree(stagingPath, ignore_errors=True)
+ shutil.move(originalStagingPath, stagingPath)
+
+ # First, ensure that the staging path exists
+ self.assertTrue(os.path.exists(stagingPath))
+ # Now, ensure that the backup-manifest.json file exists within it.
+ manifestPath = os.path.join(stagingPath, "backup-manifest.json")
+ self.assertTrue(os.path.exists(manifestPath))
+
+ # For now, we just do a cursory check to ensure that for the resources
+ # that are listed in the manifest as having been backed up, that we
+ # have at least one file in their respective staging directories.
+ # We don't check the contents of the files, just that they exist.
+
+ # Read the JSON manifest file
+ with open(manifestPath, "r") as f:
+ manifest = json.load(f)
+
+ # Ensure that the manifest has a "resources" key
+ self.assertIn("resources", manifest)
+ resources = manifest["resources"]
+ self.assertTrue(isinstance(resources, dict))
+ self.assertTrue(len(resources) > 0)
+
+ # We don't have encryption capabilities wired up yet, so we'll check
+ # that all default resources are represented in the manifest.
+ self.assertEqual(len(resources), len(resourceKeys))
+ for resourceKey in resourceKeys:
+ self.assertIn(resourceKey, resources)
+
+ # Iterate the resources dict keys
+ for resourceKey in resources:
+ print("Checking resource: %s" % resourceKey)
+ # Ensure that there are staging directories created for each
+ # resource that was backed up
+ resourceStagingDir = os.path.join(stagingPath, resourceKey)
+ self.assertTrue(os.path.exists(resourceStagingDir))
+
+ # Start a brand new profile, one without any of the data we created or
+ # backed up. This is the one that we'll be starting recovery from.
+ self.marionette.quit()
+ self.marionette.instance.profile = None
+ self.marionette.start_session()
+ self.marionette.set_context("chrome")
+
+ # Recover the created backup into a new profile directory. Also get out
+ # the client ID of this profile, because we're going to want to make
+ # sure that this client ID is inherited by the recovered profile.
+ [
+ newProfileName,
+ newProfilePath,
+ expectedClientID,
+ ] = self.marionette.execute_async_script(
+ """
+ const { ClientID } = ChromeUtils.importESModule("resource://gre/modules/ClientID.sys.mjs");
+ const { BackupService } = ChromeUtils.importESModule("resource:///modules/backup/BackupService.sys.mjs");
+ let bs = BackupService.get();
+ if (!bs) {
+ throw new Error("Could not get initialized BackupService.");
+ }
+
+ let [stagingPath, outerResolve] = arguments;
+ (async () => {
+ let newProfileRootPath = await IOUtils.createUniqueDirectory(
+ PathUtils.tempDir,
+ "recoverFromBackupTest-newProfileRoot"
+ );
+ let newProfile = await bs.recoverFromBackup(stagingPath, false, newProfileRootPath)
+ if (!newProfile) {
+ throw new Error("Could not create recovery profile.");
+ }
+
+ let expectedClientID = await ClientID.getClientID();
+
+ return [newProfile.name, newProfile.rootDir.path, expectedClientID];
+ })().then(outerResolve);
+ """,
+ script_args=[stagingPath],
+ )
+
+ print("Recovery name: %s" % newProfileName)
+ print("Recovery path: %s" % newProfilePath)
+ print("Expected clientID: %s" % expectedClientID)
+
+ self.marionette.quit()
+ originalProfile = self.marionette.instance.profile
+ self.marionette.instance.profile = newProfilePath
+ self.marionette.start_session()
+ self.marionette.set_context("chrome")
+
+ # Ensure that all postRecovery actions have completed.
+ self.marionette.execute_async_script(
+ """
+ const { BackupService } = ChromeUtils.importESModule("resource:///modules/backup/BackupService.sys.mjs");
+ let bs = BackupService.get();
+ if (!bs) {
+ throw new Error("Could not get initialized BackupService.");
+ }
+
+ let [outerResolve] = arguments;
+ (async () => {
+ await bs.postRecoveryComplete;
+ })().then(outerResolve);
+ """
+ )
+
+ self.verify_recovered_test_cookie()
+ self.verify_recovered_test_login()
+ self.verify_recovered_test_certificate()
+ self.verify_recovered_saved_address()
+ self.verify_recovered_identity_credential()
+ self.verify_recovered_form_history()
+ self.verify_recovered_activity_stream_snippets_data()
+ self.verify_recovered_protections_data()
+ self.verify_recovered_bookmarks()
+ self.verify_recovered_history()
+ self.verify_recovered_preferences()
+ self.verify_recovered_permissions()
+
+ # Now also ensure that the recovered profile inherited the client ID
+ # from the profile that initiated recovery.
+ recoveredClientID = self.marionette.execute_async_script(
+ """
+ const { ClientID } = ChromeUtils.importESModule("resource://gre/modules/ClientID.sys.mjs");
+ let [outerResolve] = arguments;
+ (async () => {
+ return ClientID.getClientID();
+ })().then(outerResolve);
+ """
+ )
+ self.assertEqual(recoveredClientID, expectedClientID)
+
+ # Try not to pollute the profile list by getting rid of the one we just
+ # created.
+ self.marionette.quit()
+ self.marionette.instance.profile = originalProfile
+ self.marionette.start_session()
+ self.marionette.set_context("chrome")
+ self.marionette.execute_script(
+ """
+ let newProfileName = arguments[0];
+ let profileSvc = Cc["@mozilla.org/toolkit/profile-service;1"].getService(
+ Ci.nsIToolkitProfileService
+ );
+ let profile = profileSvc.getProfileByName(newProfileName);
+ profile.remove(true);
+ profileSvc.flush();
+ """,
+ script_args=[newProfileName],
+ )
+
+ # Cleanup the staging path that we moved
+ mozfile.remove(stagingPath)
+
+ def add_test_cookie(self):
+ self.marionette.execute_async_script(
+ """
+ let [outerResolve] = arguments;
+ (async () => {
+ // We'll just add a single cookie, and then make sure that it shows
+ // up on the other side.
+ Services.cookies.removeAll();
+ Services.cookies.add(
+ ".example.com",
+ "/",
+ "first",
+ "one",
+ false,
+ false,
+ false,
+ Date.now() / 1000 + 1,
+ {},
+ Ci.nsICookie.SAMESITE_NONE,
+ Ci.nsICookie.SCHEME_HTTP
+ );
+ })().then(outerResolve);
+ """
+ )
+
+ def verify_recovered_test_cookie(self):
+ cookiesLength = self.marionette.execute_async_script(
+ """
+ let [outerResolve] = arguments;
+ (async () => {
+ let cookies = Services.cookies.getCookiesFromHost("example.com", {});
+ return cookies.length;
+ })().then(outerResolve);
+ """
+ )
+ self.assertEqual(cookiesLength, 1)
+
+ def add_test_login(self):
+ self.marionette.execute_async_script(
+ """
+ let [outerResolve] = arguments;
+ (async () => {
+ // Let's start with adding a single password
+ Services.logins.removeAllLogins();
+
+ const nsLoginInfo = new Components.Constructor(
+ "@mozilla.org/login-manager/loginInfo;1",
+ Ci.nsILoginInfo,
+ "init"
+ );
+
+ const login1 = new nsLoginInfo(
+ "https://example.com",
+ "https://example.com",
+ null,
+ "notifyu1",
+ "notifyp1",
+ "user",
+ "pass"
+ );
+ await Services.logins.addLoginAsync(login1);
+ })().then(outerResolve);
+ """
+ )
+
+ def verify_recovered_test_login(self):
+ loginsLength = self.marionette.execute_async_script(
+ """
+ let [outerResolve] = arguments;
+ (async () => {
+ let logins = await Services.logins.searchLoginsAsync({
+ origin: "https://example.com",
+ });
+ return logins.length;
+ })().then(outerResolve);
+ """
+ )
+ self.assertEqual(loginsLength, 1)
+
+ def add_test_certificate(self):
+ certPath = os.path.join(os.path.dirname(__file__), "http2-ca.pem")
+ self.marionette.execute_async_script(
+ """
+ let [certPath, certDbKey, outerResolve] = arguments;
+ (async () => {
+ const { NetUtil } = ChromeUtils.importESModule(
+ "resource://gre/modules/NetUtil.sys.mjs"
+ );
+
+ let certDb = Cc["@mozilla.org/security/x509certdb;1"].getService(
+ Ci.nsIX509CertDB
+ );
+
+ if (certDb.findCertByDBKey(certDbKey)) {
+ throw new Error("Should not have this certificate yet!");
+ }
+
+ let certFile = await IOUtils.getFile(certPath);
+ let fstream = Cc["@mozilla.org/network/file-input-stream;1"].createInstance(
+ Ci.nsIFileInputStream
+ );
+ fstream.init(certFile, -1, 0, 0);
+ let data = NetUtil.readInputStreamToString(fstream, fstream.available());
+ fstream.close();
+
+ let pem = data.replace(/-----BEGIN CERTIFICATE-----/, "")
+ .replace(/-----END CERTIFICATE-----/, "")
+ .replace(/[\\r\\n]/g, "");
+ let cert = certDb.addCertFromBase64(pem, "CTu,u,u");
+
+ if (cert.dbKey != certDbKey) {
+ throw new Error("The inserted certificate DB key is unexpected.");
+ }
+ })().then(outerResolve);
+ """,
+ script_args=[certPath, self._cert_db_key],
+ )
+
+ def verify_recovered_test_certificate(self):
+ certExists = self.marionette.execute_async_script(
+ """
+ let [certDbKey, outerResolve] = arguments;
+ (async () => {
+ let certDb = Cc["@mozilla.org/security/x509certdb;1"].getService(
+ Ci.nsIX509CertDB
+ );
+ return certDb.findCertByDBKey(certDbKey) != null;
+ })().then(outerResolve);
+ """,
+ script_args=[self._cert_db_key],
+ )
+ self.assertTrue(certExists)
+
+ def add_test_saved_address(self):
+ self.marionette.execute_async_script(
+ """
+ const { formAutofillStorage } = ChromeUtils.importESModule(
+ "resource://autofill/FormAutofillStorage.sys.mjs"
+ );
+
+ let [outerResolve] = arguments;
+ (async () => {
+ const TEST_ADDRESS_1 = {
+ "given-name": "John",
+ "additional-name": "R.",
+ "family-name": "Smith",
+ organization: "World Wide Web Consortium",
+ "street-address": "32 Vassar Street\\\nMIT Room 32-G524",
+ "address-level2": "Cambridge",
+ "address-level1": "MA",
+ "postal-code": "02139",
+ country: "US",
+ tel: "+15195555555",
+ email: "user@example.com",
+ };
+ await formAutofillStorage.initialize();
+ formAutofillStorage.addresses.removeAll();
+ await formAutofillStorage.addresses.add(TEST_ADDRESS_1);
+ })().then(outerResolve);
+ """
+ )
+
+ def verify_recovered_saved_address(self):
+ addressesLength = self.marionette.execute_async_script(
+ """
+ const { formAutofillStorage } = ChromeUtils.importESModule(
+ "resource://autofill/FormAutofillStorage.sys.mjs"
+ );
+
+ let [outerResolve] = arguments;
+ (async () => {
+ await formAutofillStorage.initialize();
+ let addresses = await formAutofillStorage.addresses.getAll();
+ return addresses.length;
+ })().then(outerResolve);
+ """
+ )
+ self.assertEqual(addressesLength, 1)
+
+ def add_test_identity_credential(self):
+ self.marionette.execute_async_script(
+ """
+ let [outerResolve] = arguments;
+ (async () => {
+ let service = Cc["@mozilla.org/browser/identity-credential-storage-service;1"]
+ .getService(Ci.nsIIdentityCredentialStorageService);
+ service.clear();
+
+ let testPrincipal = Services.scriptSecurityManager.createContentPrincipal(
+ Services.io.newURI("https://test.com/"),
+ {}
+ );
+ let idpPrincipal = Services.scriptSecurityManager.createContentPrincipal(
+ Services.io.newURI("https://idp-test.com/"),
+ {}
+ );
+
+ service.setState(
+ testPrincipal,
+ idpPrincipal,
+ "ID",
+ true,
+ true
+ );
+
+ })().then(outerResolve);
+ """
+ )
+
+ def verify_recovered_identity_credential(self):
+ [registered, allowLogout] = self.marionette.execute_async_script(
+ """
+ let [outerResolve] = arguments;
+ (async () => {
+ let service = Cc["@mozilla.org/browser/identity-credential-storage-service;1"]
+ .getService(Ci.nsIIdentityCredentialStorageService);
+
+ let testPrincipal = Services.scriptSecurityManager.createContentPrincipal(
+ Services.io.newURI("https://test.com/"),
+ {}
+ );
+ let idpPrincipal = Services.scriptSecurityManager.createContentPrincipal(
+ Services.io.newURI("https://idp-test.com/"),
+ {}
+ );
+
+ let registered = {};
+ let allowLogout = {};
+
+ service.getState(
+ testPrincipal,
+ idpPrincipal,
+ "ID",
+ registered,
+ allowLogout
+ );
+
+ return [registered.value, allowLogout.value];
+ })().then(outerResolve);
+ """
+ )
+ self.assertTrue(registered)
+ self.assertTrue(allowLogout)
+
+ def add_test_form_history(self):
+ self.marionette.execute_async_script(
+ """
+ const { FormHistory } = ChromeUtils.importESModule(
+ "resource://gre/modules/FormHistory.sys.mjs"
+ );
+
+ let [outerResolve] = arguments;
+ (async () => {
+ await FormHistory.update({
+ op: "add",
+ fieldname: "some-test-field",
+ value: "I was recovered!",
+ timesUsed: 1,
+ firstUsed: 0,
+ lastUsed: 0,
+ });
+
+ })().then(outerResolve);
+ """
+ )
+
+ def verify_recovered_form_history(self):
+ formHistoryResultsLength = self.marionette.execute_async_script(
+ """
+ const { FormHistory } = ChromeUtils.importESModule(
+ "resource://gre/modules/FormHistory.sys.mjs"
+ );
+
+ let [outerResolve] = arguments;
+ (async () => {
+ let results = await FormHistory.search(
+ ["guid"],
+ { fieldname: "some-test-field" }
+ );
+ return results.length;
+ })().then(outerResolve);
+ """
+ )
+ self.assertEqual(formHistoryResultsLength, 1)
+
+ def add_test_activity_stream_snippets_data(self):
+ self.marionette.execute_async_script(
+ """
+ const { ActivityStreamStorage } = ChromeUtils.importESModule(
+ "resource://activity-stream/lib/ActivityStreamStorage.sys.mjs",
+ );
+ const SNIPPETS_TABLE_NAME = "snippets";
+
+ let [outerResolve] = arguments;
+ (async () => {
+ let storage = new ActivityStreamStorage({
+ storeNames: [SNIPPETS_TABLE_NAME],
+ });
+ let snippetsTable = await storage.getDbTable(SNIPPETS_TABLE_NAME);
+ await snippetsTable.set("backup-test", "some-test-value");
+ })().then(outerResolve);
+ """
+ )
+
+ def verify_recovered_activity_stream_snippets_data(self):
+ snippetsResult = self.marionette.execute_async_script(
+ """
+ const { ActivityStreamStorage } = ChromeUtils.importESModule(
+ "resource://activity-stream/lib/ActivityStreamStorage.sys.mjs",
+ );
+ const SNIPPETS_TABLE_NAME = "snippets";
+
+ let [outerResolve] = arguments;
+ (async () => {
+ let storage = new ActivityStreamStorage({
+ storeNames: [SNIPPETS_TABLE_NAME],
+ });
+ let snippetsTable = await storage.getDbTable(SNIPPETS_TABLE_NAME);
+ return await snippetsTable.get("backup-test");
+ })().then(outerResolve);
+ """
+ )
+ self.assertEqual(snippetsResult, "some-test-value")
+
+ def add_test_protections_data(self):
+ self.marionette.execute_async_script(
+ """
+ const TrackingDBService = Cc["@mozilla.org/tracking-db-service;1"]
+ .getService(Ci.nsITrackingDBService);
+
+ let [outerResolve] = arguments;
+ (async () => {
+ let entry = {
+ "https://test.com": [
+ [Ci.nsIWebProgressListener.STATE_BLOCKED_TRACKING_CONTENT, true, 1],
+ ],
+ };
+ await TrackingDBService.clearAll();
+ await TrackingDBService.saveEvents(JSON.stringify(entry));
+ })().then(outerResolve);
+ """
+ )
+
+ def verify_recovered_protections_data(self):
+ eventsSum = self.marionette.execute_async_script(
+ """
+ const TrackingDBService = Cc["@mozilla.org/tracking-db-service;1"]
+ .getService(Ci.nsITrackingDBService);
+
+ let [outerResolve] = arguments;
+ (async () => {
+ return TrackingDBService.sumAllEvents();
+ })().then(outerResolve);
+ """
+ )
+ self.assertEqual(eventsSum, 1)
+
+ def add_test_bookmarks(self):
+ self.marionette.execute_async_script(
+ """
+ const { PlacesUtils } = ChromeUtils.importESModule(
+ "resource://gre/modules/PlacesUtils.sys.mjs"
+ );
+
+ let [outerResolve] = arguments;
+ (async () => {
+ await PlacesUtils.bookmarks.eraseEverything();
+ await PlacesUtils.bookmarks.insert({
+ parentGuid: PlacesUtils.bookmarks.toolbarGuid,
+ title: "Some test page",
+ url: Services.io.newURI("https://www.backup.test/"),
+ });
+ })().then(outerResolve);
+ """
+ )
+
+ def verify_recovered_bookmarks(self):
+ bookmarkExists = self.marionette.execute_async_script(
+ """
+ const { PlacesUtils } = ChromeUtils.importESModule(
+ "resource://gre/modules/PlacesUtils.sys.mjs"
+ );
+
+ let [outerResolve] = arguments;
+ (async () => {
+ let url = Services.io.newURI("https://www.backup.test/");
+ let bookmark = await PlacesUtils.bookmarks.fetch({ url });
+ return bookmark != null;
+ })().then(outerResolve);
+ """
+ )
+ self.assertTrue(bookmarkExists)
+
+ def add_test_history(self):
+ self.marionette.execute_async_script(
+ """
+ const { PlacesUtils } = ChromeUtils.importESModule(
+ "resource://gre/modules/PlacesUtils.sys.mjs"
+ );
+
+ let [outerResolve] = arguments;
+ (async () => {
+ await PlacesUtils.history.clear();
+
+ let entry = {
+ url: "http://my-restored-history.com",
+ visits: [{ transition: PlacesUtils.history.TRANSITION_LINK }],
+ };
+
+ await PlacesUtils.history.insertMany([entry]);
+ })().then(outerResolve);
+ """
+ )
+
+ def verify_recovered_history(self):
+ historyExists = self.marionette.execute_async_script(
+ """
+ const { PlacesUtils } = ChromeUtils.importESModule(
+ "resource://gre/modules/PlacesUtils.sys.mjs"
+ );
+
+ let [outerResolve] = arguments;
+ (async () => {
+ let entry = await PlacesUtils.history.fetch("http://my-restored-history.com");
+ return entry != null;
+ })().then(outerResolve);
+ """
+ )
+ self.assertTrue(historyExists)
+
+ def add_test_preferences(self):
+ self.marionette.execute_script(
+ """
+ Services.prefs.setBoolPref("test-pref-for-backup", true)
+ """
+ )
+
+ def verify_recovered_preferences(self):
+ prefExists = self.marionette.execute_script(
+ """
+ return Services.prefs.getBoolPref("test-pref-for-backup", false);
+ """
+ )
+ self.assertTrue(prefExists)
+
+ def add_test_permissions(self):
+ self.marionette.execute_script(
+ """
+ let principal = Services.scriptSecurityManager.createContentPrincipalFromOrigin(
+ "https://test-permission-site.com"
+ );
+ Services.perms.addFromPrincipal(
+ principal,
+ "desktop-notification",
+ Services.perms.ALLOW_ACTION
+ );
+ """
+ )
+
+ def verify_recovered_permissions(self):
+ permissionExists = self.marionette.execute_script(
+ """
+ let principal = Services.scriptSecurityManager.createContentPrincipalFromOrigin(
+ "https://test-permission-site.com"
+ );
+ let perms = Services.perms.getAllForPrincipal(principal);
+ if (perms.length != 1) {
+ throw new Error("Got an unexpected number of permissions");
+ }
+ return perms[0].type == "desktop-notification"
+ """
+ )
+ self.assertTrue(permissionExists)
diff --git a/browser/components/backup/tests/xpcshell/data/test_xulstore.json b/browser/components/backup/tests/xpcshell/data/test_xulstore.json
index 0d0890ab16..e4ae6f1f66 100644
--- a/browser/components/backup/tests/xpcshell/data/test_xulstore.json
+++ b/browser/components/backup/tests/xpcshell/data/test_xulstore.json
@@ -9,7 +9,6 @@
"sizemode": "normal"
},
"sidebar-box": {
- "sidebarcommand": "viewBookmarksSidebar",
"width": "323",
"style": "width: 323px;"
},
diff --git a/browser/components/backup/tests/xpcshell/head.js b/browser/components/backup/tests/xpcshell/head.js
index 2402870a13..e5ed32fb63 100644
--- a/browser/components/backup/tests/xpcshell/head.js
+++ b/browser/components/backup/tests/xpcshell/head.js
@@ -50,6 +50,9 @@ class FakeBackupResource2 extends BackupResource {
static get requiresEncryption() {
return true;
}
+ static get priority() {
+ return 1;
+ }
}
/**
@@ -62,6 +65,9 @@ class FakeBackupResource3 extends BackupResource {
static get requiresEncryption() {
return false;
}
+ static get priority() {
+ return 2;
+ }
}
/**
diff --git a/browser/components/backup/tests/xpcshell/test_AddonsBackupResource.js b/browser/components/backup/tests/xpcshell/test_AddonsBackupResource.js
new file mode 100644
index 0000000000..d1c47ecdb0
--- /dev/null
+++ b/browser/components/backup/tests/xpcshell/test_AddonsBackupResource.js
@@ -0,0 +1,416 @@
+/* Any copyright is dedicated to the Public Domain.
+https://creativecommons.org/publicdomain/zero/1.0/ */
+
+"use strict";
+
+const { AddonsBackupResource } = ChromeUtils.importESModule(
+ "resource:///modules/backup/AddonsBackupResource.sys.mjs"
+);
+
+/**
+ * Tests that we can measure the size of all the addons & extensions data.
+ */
+add_task(async function test_measure() {
+ Services.fog.testResetFOG();
+ Services.telemetry.clearScalars();
+
+ const EXPECTED_KILOBYTES_FOR_EXTENSIONS_JSON = 250;
+ const EXPECTED_KILOBYTES_FOR_EXTENSIONS_STORE = 500;
+ const EXPECTED_KILOBYTES_FOR_STORAGE_SYNC = 50;
+ const EXPECTED_KILOBYTES_FOR_EXTENSIONS_XPI_A = 600;
+ const EXPECTED_KILOBYTES_FOR_EXTENSIONS_XPI_B = 400;
+ const EXPECTED_KILOBYTES_FOR_EXTENSIONS_XPI_C = 150;
+ const EXPECTED_KILOBYTES_FOR_EXTENSIONS_DIRECTORY = 1000;
+ const EXPECTED_KILOBYTES_FOR_EXTENSION_DATA = 100;
+ const EXPECTED_KILOBYTES_FOR_EXTENSIONS_STORAGE = 200;
+
+ let tempDir = PathUtils.tempDir;
+
+ // Create extensions json files (all the same size).
+ const extensionsFilePath = PathUtils.join(tempDir, "extensions.json");
+ await createKilobyteSizedFile(
+ extensionsFilePath,
+ EXPECTED_KILOBYTES_FOR_EXTENSIONS_JSON
+ );
+ const extensionSettingsFilePath = PathUtils.join(
+ tempDir,
+ "extension-settings.json"
+ );
+ await createKilobyteSizedFile(
+ extensionSettingsFilePath,
+ EXPECTED_KILOBYTES_FOR_EXTENSIONS_JSON
+ );
+ const extensionsPrefsFilePath = PathUtils.join(
+ tempDir,
+ "extension-preferences.json"
+ );
+ await createKilobyteSizedFile(
+ extensionsPrefsFilePath,
+ EXPECTED_KILOBYTES_FOR_EXTENSIONS_JSON
+ );
+ const addonStartupFilePath = PathUtils.join(tempDir, "addonStartup.json.lz4");
+ await createKilobyteSizedFile(
+ addonStartupFilePath,
+ EXPECTED_KILOBYTES_FOR_EXTENSIONS_JSON
+ );
+
+ // Create the extension store permissions data file.
+ let extensionStorePermissionsDataSize = PathUtils.join(
+ tempDir,
+ "extension-store-permissions",
+ "data.safe.bin"
+ );
+ await createKilobyteSizedFile(
+ extensionStorePermissionsDataSize,
+ EXPECTED_KILOBYTES_FOR_EXTENSIONS_STORE
+ );
+
+ // Create the storage sync database file.
+ let storageSyncPath = PathUtils.join(tempDir, "storage-sync-v2.sqlite");
+ await createKilobyteSizedFile(
+ storageSyncPath,
+ EXPECTED_KILOBYTES_FOR_STORAGE_SYNC
+ );
+
+ // Create the extensions directory with XPI files.
+ let extensionsXPIAPath = PathUtils.join(
+ tempDir,
+ "extensions",
+ "extension-b.xpi"
+ );
+ let extensionsXPIBPath = PathUtils.join(
+ tempDir,
+ "extensions",
+ "extension-a.xpi"
+ );
+ await createKilobyteSizedFile(
+ extensionsXPIAPath,
+ EXPECTED_KILOBYTES_FOR_EXTENSIONS_XPI_A
+ );
+ await createKilobyteSizedFile(
+ extensionsXPIBPath,
+ EXPECTED_KILOBYTES_FOR_EXTENSIONS_XPI_B
+ );
+ // Should be ignored.
+ let extensionsXPIStagedPath = PathUtils.join(
+ tempDir,
+ "extensions",
+ "staged",
+ "staged-test-extension.xpi"
+ );
+ let extensionsXPITrashPath = PathUtils.join(
+ tempDir,
+ "extensions",
+ "trash",
+ "trashed-test-extension.xpi"
+ );
+ let extensionsXPIUnpackedPath = PathUtils.join(
+ tempDir,
+ "extensions",
+ "unpacked-extension.xpi",
+ "manifest.json"
+ );
+ await createKilobyteSizedFile(
+ extensionsXPIStagedPath,
+ EXPECTED_KILOBYTES_FOR_EXTENSIONS_XPI_C
+ );
+ await createKilobyteSizedFile(
+ extensionsXPITrashPath,
+ EXPECTED_KILOBYTES_FOR_EXTENSIONS_XPI_C
+ );
+ await createKilobyteSizedFile(
+ extensionsXPIUnpackedPath,
+ EXPECTED_KILOBYTES_FOR_EXTENSIONS_XPI_C
+ );
+
+ // Create the browser extension data directory.
+ let browserExtensionDataPath = PathUtils.join(
+ tempDir,
+ "browser-extension-data",
+ "test-file"
+ );
+ await createKilobyteSizedFile(
+ browserExtensionDataPath,
+ EXPECTED_KILOBYTES_FOR_EXTENSION_DATA
+ );
+
+ // Create the extensions storage directory.
+ let extensionsStoragePath = PathUtils.join(
+ tempDir,
+ "storage",
+ "default",
+ "moz-extension+++test-extension-id",
+ "idb",
+ "data.sqlite"
+ );
+ // Other storage files that should not be counted.
+ let otherStoragePath = PathUtils.join(
+ tempDir,
+ "storage",
+ "default",
+ "https+++accounts.firefox.com",
+ "ls",
+ "data.sqlite"
+ );
+
+ await createKilobyteSizedFile(
+ extensionsStoragePath,
+ EXPECTED_KILOBYTES_FOR_EXTENSIONS_STORAGE
+ );
+ await createKilobyteSizedFile(
+ otherStoragePath,
+ EXPECTED_KILOBYTES_FOR_EXTENSIONS_STORAGE
+ );
+
+ // Measure all the extensions data.
+ let extensionsBackupResource = new AddonsBackupResource();
+ await extensionsBackupResource.measure(tempDir);
+
+ let extensionsJsonSizeMeasurement =
+ Glean.browserBackup.extensionsJsonSize.testGetValue();
+ Assert.equal(
+ extensionsJsonSizeMeasurement,
+ EXPECTED_KILOBYTES_FOR_EXTENSIONS_JSON * 4, // There are 4 equally sized files.
+ "Should have collected the correct measurement of the total size of all extensions JSON files"
+ );
+
+ let extensionStorePermissionsDataSizeMeasurement =
+ Glean.browserBackup.extensionStorePermissionsDataSize.testGetValue();
+ Assert.equal(
+ extensionStorePermissionsDataSizeMeasurement,
+ EXPECTED_KILOBYTES_FOR_EXTENSIONS_STORE,
+ "Should have collected the correct measurement of the size of the extension store permissions data"
+ );
+
+ let storageSyncSizeMeasurement =
+ Glean.browserBackup.storageSyncSize.testGetValue();
+ Assert.equal(
+ storageSyncSizeMeasurement,
+ EXPECTED_KILOBYTES_FOR_STORAGE_SYNC,
+ "Should have collected the correct measurement of the size of the storage sync database"
+ );
+
+ let extensionsXPIDirectorySizeMeasurement =
+ Glean.browserBackup.extensionsXpiDirectorySize.testGetValue();
+ Assert.equal(
+ extensionsXPIDirectorySizeMeasurement,
+ EXPECTED_KILOBYTES_FOR_EXTENSIONS_DIRECTORY,
+ "Should have collected the correct measurement of the size 2 equally sized XPI files in the extensions directory"
+ );
+
+ let browserExtensionDataSizeMeasurement =
+ Glean.browserBackup.browserExtensionDataSize.testGetValue();
+ Assert.equal(
+ browserExtensionDataSizeMeasurement,
+ EXPECTED_KILOBYTES_FOR_EXTENSION_DATA,
+ "Should have collected the correct measurement of the size of the browser extension data directory"
+ );
+
+ let extensionsStorageSizeMeasurement =
+ Glean.browserBackup.extensionsStorageSize.testGetValue();
+ Assert.equal(
+ extensionsStorageSizeMeasurement,
+ EXPECTED_KILOBYTES_FOR_EXTENSIONS_STORAGE,
+ "Should have collected the correct measurement of all the extensions storage"
+ );
+
+ // Compare glean vs telemetry measurements
+ let scalars = TelemetryTestUtils.getProcessScalars("parent", false, false);
+ TelemetryTestUtils.assertScalar(
+ scalars,
+ "browser.backup.extensions_json_size",
+ extensionsJsonSizeMeasurement,
+ "Glean and telemetry measurements for extensions JSON should be equal"
+ );
+ TelemetryTestUtils.assertScalar(
+ scalars,
+ "browser.backup.extension_store_permissions_data_size",
+ extensionStorePermissionsDataSizeMeasurement,
+ "Glean and telemetry measurements for extension store permissions data should be equal"
+ );
+ TelemetryTestUtils.assertScalar(
+ scalars,
+ "browser.backup.storage_sync_size",
+ storageSyncSizeMeasurement,
+ "Glean and telemetry measurements for storage sync database should be equal"
+ );
+ TelemetryTestUtils.assertScalar(
+ scalars,
+ "browser.backup.extensions_xpi_directory_size",
+ extensionsXPIDirectorySizeMeasurement,
+ "Glean and telemetry measurements for extensions directory should be equal"
+ );
+ TelemetryTestUtils.assertScalar(
+ scalars,
+ "browser.backup.browser_extension_data_size",
+ browserExtensionDataSizeMeasurement,
+ "Glean and telemetry measurements for browser extension data should be equal"
+ );
+ TelemetryTestUtils.assertScalar(
+ scalars,
+ "browser.backup.extensions_storage_size",
+ extensionsStorageSizeMeasurement,
+ "Glean and telemetry measurements for extensions storage should be equal"
+ );
+
+ await maybeRemovePath(tempDir);
+});
+
+/**
+ * Tests that we can handle the extension store permissions data
+ * and moz-extension IndexedDB databases not existing.
+ */
+add_task(async function test_measure_missing_data() {
+ Services.fog.testResetFOG();
+
+ let tempDir = PathUtils.tempDir;
+
+ let extensionsBackupResource = new AddonsBackupResource();
+ await extensionsBackupResource.measure(tempDir);
+
+ let extensionStorePermissionsDataSizeMeasurement =
+ Glean.browserBackup.extensionStorePermissionsDataSize.testGetValue();
+ Assert.equal(
+ extensionStorePermissionsDataSizeMeasurement,
+ null,
+ "Should NOT have collected a measurement for the missing permissions data"
+ );
+
+ let extensionsStorageSizeMeasurement =
+ Glean.browserBackup.extensionsStorageSize.testGetValue();
+ Assert.equal(
+ extensionsStorageSizeMeasurement,
+ null,
+ "Should NOT have collected a measurement for the missing storage data"
+ );
+});
+
+/**
+ * Test that the backup method correctly copies items from the profile directory
+ * into the staging directory.
+ */
+add_task(async function test_backup() {
+ let sandbox = sinon.createSandbox();
+
+ let addonsBackupResource = new AddonsBackupResource();
+ let sourcePath = await IOUtils.createUniqueDirectory(
+ PathUtils.tempDir,
+ "AddonsBackupResource-source-test"
+ );
+ let stagingPath = await IOUtils.createUniqueDirectory(
+ PathUtils.tempDir,
+ "AddonsBackupResource-staging-test"
+ );
+
+ const simpleCopyFiles = [
+ { path: "extensions.json" },
+ { path: "extension-settings.json" },
+ { path: "extension-preferences.json" },
+ { path: "addonStartup.json.lz4" },
+ {
+ path: [
+ "browser-extension-data",
+ "{11aa1234-f111-1234-abcd-a9b8c7654d32}",
+ ],
+ },
+ { path: ["extension-store-permissions", "data.safe.bin"] },
+ { path: ["extensions", "{11aa1234-f111-1234-abcd-a9b8c7654d32}.xpi"] },
+ ];
+ await createTestFiles(sourcePath, simpleCopyFiles);
+
+ const junkFiles = [{ path: ["extensions", "junk"] }];
+ await createTestFiles(sourcePath, junkFiles);
+
+ // Create a fake storage-sync-v2 database file. We don't expect this to
+ // be copied to the staging directory in this test due to our stubbing
+ // of the backup method, so we don't include it in `simpleCopyFiles`.
+ await createTestFiles(sourcePath, [{ path: "storage-sync-v2.sqlite" }]);
+
+ let fakeConnection = {
+ backup: sandbox.stub().resolves(true),
+ close: sandbox.stub().resolves(true),
+ };
+ sandbox.stub(Sqlite, "openConnection").returns(fakeConnection);
+
+ let manifestEntry = await addonsBackupResource.backup(
+ stagingPath,
+ sourcePath
+ );
+ Assert.equal(
+ manifestEntry,
+ null,
+ "AddonsBackupResource.backup should return null as its ManifestEntry"
+ );
+
+ await assertFilesExist(stagingPath, simpleCopyFiles);
+
+ let junkFile = PathUtils.join(stagingPath, "extensions", "junk");
+ Assert.equal(
+ await IOUtils.exists(junkFile),
+ false,
+ `${junkFile} should not exist in the staging folder`
+ );
+
+ // Make sure storage-sync-v2 database is backed up.
+ Assert.ok(
+ fakeConnection.backup.calledOnce,
+ "Called backup the expected number of times for all connections"
+ );
+ Assert.ok(
+ fakeConnection.backup.calledWith(
+ PathUtils.join(stagingPath, "storage-sync-v2.sqlite")
+ ),
+ "Called backup on the storage-sync-v2 Sqlite connection"
+ );
+
+ await maybeRemovePath(stagingPath);
+ await maybeRemovePath(sourcePath);
+
+ sandbox.restore();
+});
+
+/**
+ * Test that the recover method correctly copies items from the recovery
+ * directory into the destination profile directory.
+ */
+add_task(async function test_recover() {
+ let addonsBackupResource = new AddonsBackupResource();
+ let recoveryPath = await IOUtils.createUniqueDirectory(
+ PathUtils.tempDir,
+ "addonsBackupResource-recovery-test"
+ );
+ let destProfilePath = await IOUtils.createUniqueDirectory(
+ PathUtils.tempDir,
+ "addonsBackupResource-test-profile"
+ );
+
+ const files = [
+ { path: "extensions.json" },
+ { path: "extension-settings.json" },
+ { path: "extension-preferences.json" },
+ { path: "addonStartup.json.lz4" },
+ { path: "storage-sync-v2.sqlite" },
+ { path: ["browser-extension-data", "addon@darkreader.org.xpi", "data"] },
+ { path: ["extensions", "addon@darkreader.org.xpi"] },
+ { path: ["extension-store-permissions", "data.safe.bin"] },
+ ];
+ await createTestFiles(recoveryPath, files);
+
+ // The backup method is expected to have returned a null ManifestEntry
+ let postRecoveryEntry = await addonsBackupResource.recover(
+ null /* manifestEntry */,
+ recoveryPath,
+ destProfilePath
+ );
+ Assert.equal(
+ postRecoveryEntry,
+ null,
+ "AddonsBackupResource.recover should return null as its post " +
+ "recovery entry"
+ );
+
+ await assertFilesExist(destProfilePath, files);
+
+ await maybeRemovePath(recoveryPath);
+ await maybeRemovePath(destProfilePath);
+});
diff --git a/browser/components/backup/tests/xpcshell/test_BackupResource.js b/browser/components/backup/tests/xpcshell/test_BackupResource.js
index 6623f4cd77..42cda918f9 100644
--- a/browser/components/backup/tests/xpcshell/test_BackupResource.js
+++ b/browser/components/backup/tests/xpcshell/test_BackupResource.js
@@ -31,7 +31,8 @@ add_task(async function test_getFileSize() {
});
/**
- * Tests that BackupService.getDirectorySize will get the total size of all the files in a directory and it's children in kilobytes.
+ * Tests that BackupService.getDirectorySize will get the total size of all the
+ * files in a directory and it's children in kilobytes.
*/
add_task(async function test_getDirectorySize() {
let file = do_get_file("data/test_xulstore.json");
@@ -75,3 +76,175 @@ add_task(async function test_bytesToFuzzyKilobytes() {
Assert.equal(smallSize, 1, "Sizes under 10 kilobytes return 1 kilobyte");
});
+
+/**
+ * Tests that BackupResource.copySqliteDatabases will call `backup` on a new
+ * read-only connection on each database file.
+ */
+add_task(async function test_copySqliteDatabases() {
+ let sandbox = sinon.createSandbox();
+ const SQLITE_PAGES_PER_STEP_PREF = "browser.backup.sqlite.pages_per_step";
+ const SQLITE_STEP_DELAY_MS_PREF = "browser.backup.sqlite.step_delay_ms";
+ const DEFAULT_SQLITE_PAGES_PER_STEP = Services.prefs.getIntPref(
+ SQLITE_PAGES_PER_STEP_PREF
+ );
+ const DEFAULT_SQLITE_STEP_DELAY_MS = Services.prefs.getIntPref(
+ SQLITE_STEP_DELAY_MS_PREF
+ );
+
+ let sourcePath = await IOUtils.createUniqueDirectory(
+ PathUtils.tempDir,
+ "BackupResource-source-test"
+ );
+ let destPath = await IOUtils.createUniqueDirectory(
+ PathUtils.tempDir,
+ "BackupResource-dest-test"
+ );
+ let pretendDatabases = ["places.sqlite", "favicons.sqlite"];
+ await createTestFiles(
+ sourcePath,
+ pretendDatabases.map(f => ({ path: f }))
+ );
+
+ let fakeConnection = {
+ backup: sandbox.stub().resolves(true),
+ close: sandbox.stub().resolves(true),
+ };
+ sandbox.stub(Sqlite, "openConnection").returns(fakeConnection);
+
+ await BackupResource.copySqliteDatabases(
+ sourcePath,
+ destPath,
+ pretendDatabases
+ );
+
+ Assert.ok(
+ Sqlite.openConnection.calledTwice,
+ "Sqlite.openConnection called twice"
+ );
+ Assert.ok(
+ Sqlite.openConnection.firstCall.calledWith({
+ path: PathUtils.join(sourcePath, "places.sqlite"),
+ readOnly: true,
+ }),
+ "openConnection called with places.sqlite as read-only"
+ );
+ Assert.ok(
+ Sqlite.openConnection.secondCall.calledWith({
+ path: PathUtils.join(sourcePath, "favicons.sqlite"),
+ readOnly: true,
+ }),
+ "openConnection called with favicons.sqlite as read-only"
+ );
+
+ Assert.ok(
+ fakeConnection.backup.calledTwice,
+ "backup on an Sqlite connection called twice"
+ );
+ Assert.ok(
+ fakeConnection.backup.firstCall.calledWith(
+ PathUtils.join(destPath, "places.sqlite"),
+ DEFAULT_SQLITE_PAGES_PER_STEP,
+ DEFAULT_SQLITE_STEP_DELAY_MS
+ ),
+ "backup called with places.sqlite to the destination path with the right " +
+ "pages per step and step delay"
+ );
+ Assert.ok(
+ fakeConnection.backup.secondCall.calledWith(
+ PathUtils.join(destPath, "favicons.sqlite"),
+ DEFAULT_SQLITE_PAGES_PER_STEP,
+ DEFAULT_SQLITE_STEP_DELAY_MS
+ ),
+ "backup called with favicons.sqlite to the destination path with the " +
+ "right pages per step and step delay"
+ );
+
+ Assert.ok(
+ fakeConnection.close.calledTwice,
+ "close on an Sqlite connection called twice"
+ );
+
+ // Now check that we can override the default pages per step and step delay.
+ fakeConnection.backup.resetHistory();
+ const NEW_SQLITE_PAGES_PER_STEP = 10;
+ const NEW_SQLITE_STEP_DELAY_MS = 500;
+ Services.prefs.setIntPref(
+ SQLITE_PAGES_PER_STEP_PREF,
+ NEW_SQLITE_PAGES_PER_STEP
+ );
+ Services.prefs.setIntPref(
+ SQLITE_STEP_DELAY_MS_PREF,
+ NEW_SQLITE_STEP_DELAY_MS
+ );
+ await BackupResource.copySqliteDatabases(
+ sourcePath,
+ destPath,
+ pretendDatabases
+ );
+ Assert.ok(
+ fakeConnection.backup.calledTwice,
+ "backup on an Sqlite connection called twice"
+ );
+ Assert.ok(
+ fakeConnection.backup.firstCall.calledWith(
+ PathUtils.join(destPath, "places.sqlite"),
+ NEW_SQLITE_PAGES_PER_STEP,
+ NEW_SQLITE_STEP_DELAY_MS
+ ),
+ "backup called with places.sqlite to the destination path with the right " +
+ "pages per step and step delay"
+ );
+ Assert.ok(
+ fakeConnection.backup.secondCall.calledWith(
+ PathUtils.join(destPath, "favicons.sqlite"),
+ NEW_SQLITE_PAGES_PER_STEP,
+ NEW_SQLITE_STEP_DELAY_MS
+ ),
+ "backup called with favicons.sqlite to the destination path with the " +
+ "right pages per step and step delay"
+ );
+
+ await maybeRemovePath(sourcePath);
+ await maybeRemovePath(destPath);
+ sandbox.restore();
+});
+
+/**
+ * Tests that BackupResource.copyFiles will copy files from one directory to
+ * another.
+ */
+add_task(async function test_copyFiles() {
+ let sourcePath = await IOUtils.createUniqueDirectory(
+ PathUtils.tempDir,
+ "BackupResource-source-test"
+ );
+ let destPath = await IOUtils.createUniqueDirectory(
+ PathUtils.tempDir,
+ "BackupResource-dest-test"
+ );
+
+ const testFiles = [
+ { path: "file1.txt" },
+ { path: ["some", "nested", "file", "file2.txt"] },
+ { path: "file3.txt" },
+ ];
+
+ await createTestFiles(sourcePath, testFiles);
+
+ await BackupResource.copyFiles(sourcePath, destPath, [
+ "file1.txt",
+ "some",
+ "file3.txt",
+ "does-not-exist.txt",
+ ]);
+
+ await assertFilesExist(destPath, testFiles);
+ Assert.ok(
+ !(await IOUtils.exists(PathUtils.join(destPath, "does-not-exist.txt"))),
+ "does-not-exist.txt wasn't somehow written to."
+ );
+
+ await maybeRemovePath(sourcePath);
+ await maybeRemovePath(destPath);
+});
diff --git a/browser/components/backup/tests/xpcshell/test_BackupService.js b/browser/components/backup/tests/xpcshell/test_BackupService.js
new file mode 100644
index 0000000000..33fb9fbb99
--- /dev/null
+++ b/browser/components/backup/tests/xpcshell/test_BackupService.js
@@ -0,0 +1,451 @@
+/* Any copyright is dedicated to the Public Domain.
+https://creativecommons.org/publicdomain/zero/1.0/ */
+
+"use strict";
+
+const { AppConstants } = ChromeUtils.importESModule(
+ "resource://gre/modules/AppConstants.sys.mjs"
+);
+const { JsonSchemaValidator } = ChromeUtils.importESModule(
+ "resource://gre/modules/components-utils/JsonSchemaValidator.sys.mjs"
+);
+const { UIState } = ChromeUtils.importESModule(
+ "resource://services-sync/UIState.sys.mjs"
+);
+const { ClientID } = ChromeUtils.importESModule(
+ "resource://gre/modules/ClientID.sys.mjs"
+);
+
+add_setup(function () {
+ // Much of this setup is copied from toolkit/profile/xpcshell/head.js. It is
+ // needed in order to put the xpcshell test environment into the state where
+ // it thinks its profile is the one pointed at by
+ // nsIToolkitProfileService.currentProfile.
+ let gProfD = do_get_profile();
+ let gDataHome = gProfD.clone();
+ gDataHome.append("data");
+ gDataHome.createUnique(Ci.nsIFile.DIRECTORY_TYPE, 0o755);
+ let gDataHomeLocal = gProfD.clone();
+ gDataHomeLocal.append("local");
+ gDataHomeLocal.createUnique(Ci.nsIFile.DIRECTORY_TYPE, 0o755);
+
+ let xreDirProvider = Cc["@mozilla.org/xre/directory-provider;1"].getService(
+ Ci.nsIXREDirProvider
+ );
+ xreDirProvider.setUserDataDirectory(gDataHome, false);
+ xreDirProvider.setUserDataDirectory(gDataHomeLocal, true);
+
+ let profileSvc = Cc["@mozilla.org/toolkit/profile-service;1"].getService(
+ Ci.nsIToolkitProfileService
+ );
+
+ let createdProfile = {};
+ let didCreate = profileSvc.selectStartupProfile(
+ ["xpcshell"],
+ false,
+ AppConstants.UPDATE_CHANNEL,
+ "",
+ {},
+ {},
+ createdProfile
+ );
+ Assert.ok(didCreate, "Created a testing profile and set it to current.");
+ Assert.equal(
+ profileSvc.currentProfile,
+ createdProfile.value,
+ "Profile set to current"
+ );
+});
+
+/**
+ * A utility function for testing BackupService.createBackup. This helper
+ * function:
+ *
+ * 1. Ensures that `backup` will be called on BackupResources with the service
+ * 2. Ensures that a backup-manifest.json will be written and contain the
+ * ManifestEntry data returned by each BackupResource.
+ * 3. Ensures that a `staging` folder will be written to and renamed properly
+ * once the backup creation is complete.
+ *
+ * Once this is done, a task function can be run. The task function is passed
+ * the parsed backup-manifest.json object as its only argument.
+ *
+ * @param {object} sandbox
+ * The Sinon sandbox to be used stubs and mocks. The test using this helper
+ * is responsible for creating and resetting this sandbox.
+ * @param {Function} taskFn
+ * A function that is run once all default checks are done on the manifest
+ * and staging folder. After this function returns, the staging folder will
+ * be cleaned up.
+ * @returns {Promise<undefined>}
+ */
+async function testCreateBackupHelper(sandbox, taskFn) {
+ const EXPECTED_CLIENT_ID = await ClientID.getClientID();
+
+ let fake1ManifestEntry = { fake1: "hello from 1" };
+ sandbox
+ .stub(FakeBackupResource1.prototype, "backup")
+ .resolves(fake1ManifestEntry);
+
+ sandbox
+ .stub(FakeBackupResource2.prototype, "backup")
+ .rejects(new Error("Some failure to backup"));
+
+ let fake3ManifestEntry = { fake3: "hello from 3" };
+ sandbox
+ .stub(FakeBackupResource3.prototype, "backup")
+ .resolves(fake3ManifestEntry);
+
+ let bs = new BackupService({
+ FakeBackupResource1,
+ FakeBackupResource2,
+ FakeBackupResource3,
+ });
+
+ let fakeProfilePath = await IOUtils.createUniqueDirectory(
+ PathUtils.tempDir,
+ "createBackupTest"
+ );
+
+ await bs.createBackup({ profilePath: fakeProfilePath });
+
+ // We expect the staging folder to exist then be renamed under the fakeProfilePath.
+ // We should also find a folder for each fake BackupResource.
+ let backupsFolderPath = PathUtils.join(fakeProfilePath, "backups");
+ let stagingPath = PathUtils.join(backupsFolderPath, "staging");
+
+ // For now, we expect a single backup only to be saved.
+ let backups = await IOUtils.getChildren(backupsFolderPath);
+ Assert.equal(
+ backups.length,
+ 1,
+ "There should only be 1 backup in the backups folder"
+ );
+
+ let renamedFilename = await PathUtils.filename(backups[0]);
+ let expectedFormatRegex = /^\d{4}(-\d{2}){2}T(\d{2}-){2}\d{2}Z$/;
+ Assert.ok(
+ renamedFilename.match(expectedFormatRegex),
+ "Renamed staging folder should have format YYYY-MM-DDTHH-mm-ssZ"
+ );
+
+ let stagingPathRenamed = PathUtils.join(backupsFolderPath, renamedFilename);
+
+ for (let backupResourceClass of [
+ FakeBackupResource1,
+ FakeBackupResource2,
+ FakeBackupResource3,
+ ]) {
+ let expectedResourceFolderBeforeRename = PathUtils.join(
+ stagingPath,
+ backupResourceClass.key
+ );
+ let expectedResourceFolderAfterRename = PathUtils.join(
+ stagingPathRenamed,
+ backupResourceClass.key
+ );
+
+ Assert.ok(
+ await IOUtils.exists(expectedResourceFolderAfterRename),
+ `BackupResource folder exists for ${backupResourceClass.key} after rename`
+ );
+ Assert.ok(
+ backupResourceClass.prototype.backup.calledOnce,
+ `Backup was called for ${backupResourceClass.key}`
+ );
+ Assert.ok(
+ backupResourceClass.prototype.backup.calledWith(
+ expectedResourceFolderBeforeRename,
+ fakeProfilePath
+ ),
+ `Backup was called in the staging folder for ${backupResourceClass.key} before rename`
+ );
+ }
+
+ // Check that resources were called from highest to lowest backup priority.
+ sinon.assert.callOrder(
+ FakeBackupResource3.prototype.backup,
+ FakeBackupResource2.prototype.backup,
+ FakeBackupResource1.prototype.backup
+ );
+
+ let manifestPath = PathUtils.join(
+ stagingPathRenamed,
+ BackupService.MANIFEST_FILE_NAME
+ );
+
+ Assert.ok(await IOUtils.exists(manifestPath), "Manifest file exists");
+ let manifest = await IOUtils.readJSON(manifestPath);
+
+ let schema = await BackupService.MANIFEST_SCHEMA;
+ let validationResult = JsonSchemaValidator.validate(manifest, schema);
+ Assert.ok(validationResult.valid, "Schema matches manifest");
+ Assert.deepEqual(
+ Object.keys(manifest.resources).sort(),
+ ["fake1", "fake3"],
+ "Manifest contains all expected BackupResource keys"
+ );
+ Assert.deepEqual(
+ manifest.resources.fake1,
+ fake1ManifestEntry,
+ "Manifest contains the expected entry for FakeBackupResource1"
+ );
+ Assert.deepEqual(
+ manifest.resources.fake3,
+ fake3ManifestEntry,
+ "Manifest contains the expected entry for FakeBackupResource3"
+ );
+ Assert.equal(
+ manifest.meta.legacyClientID,
+ EXPECTED_CLIENT_ID,
+ "The client ID was stored properly."
+ );
+
+ taskFn(manifest);
+
+ // After createBackup is more fleshed out, we're going to want to make sure
+ // that we're writing the manifest file and that it contains the expected
+ // ManifestEntry objects, and that the staging folder was successfully
+ // renamed with the current date.
+ await IOUtils.remove(fakeProfilePath, { recursive: true });
+}
+
+/**
+ * Tests that calling BackupService.createBackup will call backup on each
+ * registered BackupResource, and that each BackupResource will have a folder
+ * created for them to write into. Tests in the signed-out state.
+ */
+add_task(async function test_createBackup_signed_out() {
+ let sandbox = sinon.createSandbox();
+
+ sandbox
+ .stub(UIState, "get")
+ .returns({ status: UIState.STATUS_NOT_CONFIGURED });
+ await testCreateBackupHelper(sandbox, manifest => {
+ Assert.equal(
+ manifest.meta.accountID,
+ undefined,
+ "Account ID should be undefined."
+ );
+ Assert.equal(
+ manifest.meta.accountEmail,
+ undefined,
+ "Account email should be undefined."
+ );
+ });
+
+ sandbox.restore();
+});
+
+/**
+ * Tests that calling BackupService.createBackup will call backup on each
+ * registered BackupResource, and that each BackupResource will have a folder
+ * created for them to write into. Tests in the signed-in state.
+ */
+add_task(async function test_createBackup_signed_in() {
+ let sandbox = sinon.createSandbox();
+
+ const TEST_UID = "ThisIsMyTestUID";
+ const TEST_EMAIL = "foxy@mozilla.org";
+
+ sandbox.stub(UIState, "get").returns({
+ status: UIState.STATUS_SIGNED_IN,
+ uid: TEST_UID,
+ email: TEST_EMAIL,
+ });
+
+ await testCreateBackupHelper(sandbox, manifest => {
+ Assert.equal(
+ manifest.meta.accountID,
+ TEST_UID,
+ "Account ID should be set properly."
+ );
+ Assert.equal(
+ manifest.meta.accountEmail,
+ TEST_EMAIL,
+ "Account email should be set properly."
+ );
+ });
+
+ sandbox.restore();
+});
+
+/**
+ * Creates a directory that looks a lot like a decompressed backup archive,
+ * and then tests that BackupService.recoverFromBackup can create a new profile
+ * and recover into it.
+ */
+add_task(async function test_recoverFromBackup() {
+ let sandbox = sinon.createSandbox();
+ let fakeEntryMap = new Map();
+ let backupResourceClasses = [
+ FakeBackupResource1,
+ FakeBackupResource2,
+ FakeBackupResource3,
+ ];
+
+ let i = 1;
+ for (let backupResourceClass of backupResourceClasses) {
+ let fakeManifestEntry = { [`fake${i}`]: `hello from backup - ${i}` };
+ sandbox
+ .stub(backupResourceClass.prototype, "backup")
+ .resolves(fakeManifestEntry);
+
+ let fakePostRecoveryEntry = { [`fake${i}`]: `hello from recover - ${i}` };
+ sandbox
+ .stub(backupResourceClass.prototype, "recover")
+ .resolves(fakePostRecoveryEntry);
+
+ fakeEntryMap.set(backupResourceClass, {
+ manifestEntry: fakeManifestEntry,
+ postRecoveryEntry: fakePostRecoveryEntry,
+ });
+
+ ++i;
+ }
+
+ let bs = new BackupService({
+ FakeBackupResource1,
+ FakeBackupResource2,
+ FakeBackupResource3,
+ });
+
+ let oldProfilePath = await IOUtils.createUniqueDirectory(
+ PathUtils.tempDir,
+ "recoverFromBackupTest"
+ );
+ let newProfileRootPath = await IOUtils.createUniqueDirectory(
+ PathUtils.tempDir,
+ "recoverFromBackupTest-newProfileRoot"
+ );
+
+ let { stagingPath } = await bs.createBackup({ profilePath: oldProfilePath });
+
+ let testTelemetryStateObject = {
+ clientID: "ed209123-04a1-04a1-04a1-c0ffeec0ffee",
+ };
+ await IOUtils.writeJSON(
+ PathUtils.join(PathUtils.profileDir, "datareporting", "state.json"),
+ testTelemetryStateObject
+ );
+
+ let profile = await bs.recoverFromBackup(
+ stagingPath,
+ false /* shouldLaunch */,
+ newProfileRootPath
+ );
+ Assert.ok(profile, "An nsIToolkitProfile was created.");
+ let newProfilePath = profile.rootDir.path;
+
+ let postRecoveryFilePath = PathUtils.join(
+ newProfilePath,
+ "post-recovery.json"
+ );
+ let postRecovery = await IOUtils.readJSON(postRecoveryFilePath);
+
+ for (let backupResourceClass of backupResourceClasses) {
+ let expectedResourceFolder = PathUtils.join(
+ stagingPath,
+ backupResourceClass.key
+ );
+
+ let { manifestEntry, postRecoveryEntry } =
+ fakeEntryMap.get(backupResourceClass);
+
+ Assert.ok(
+ backupResourceClass.prototype.recover.calledOnce,
+ `Recover was called for ${backupResourceClass.key}`
+ );
+ Assert.ok(
+ backupResourceClass.prototype.recover.calledWith(
+ manifestEntry,
+ expectedResourceFolder,
+ newProfilePath
+ ),
+ `Recover was passed the right arguments for ${backupResourceClass.key}`
+ );
+ Assert.deepEqual(
+ postRecoveryEntry,
+ postRecovery[backupResourceClass.key],
+ "The post recovery data is as expected"
+ );
+ }
+
+ let newProfileTelemetryStateObject = await IOUtils.readJSON(
+ PathUtils.join(newProfileRootPath, "datareporting", "state.json")
+ );
+ Assert.deepEqual(
+ testTelemetryStateObject,
+ newProfileTelemetryStateObject,
+ "Recovered profile inherited telemetry state from the profile that " +
+ "initiated recovery"
+ );
+
+ await IOUtils.remove(oldProfilePath, { recursive: true });
+ await IOUtils.remove(newProfileRootPath, { recursive: true });
+ sandbox.restore();
+});
+
+/**
+ * Tests that if there's a post-recovery.json file in the profile directory
+ * when checkForPostRecovery() is called, that it is processed, and the
+ * postRecovery methods on the associated BackupResources are called with the
+ * entry values from the file.
+ */
+add_task(async function test_checkForPostRecovery() {
+ let sandbox = sinon.createSandbox();
+
+ let testProfilePath = await IOUtils.createUniqueDirectory(
+ PathUtils.tempDir,
+ "checkForPostRecoveryTest"
+ );
+ let fakePostRecoveryObject = {
+ [FakeBackupResource1.key]: "test 1",
+ [FakeBackupResource3.key]: "test 3",
+ };
+ await IOUtils.writeJSON(
+ PathUtils.join(testProfilePath, BackupService.POST_RECOVERY_FILE_NAME),
+ fakePostRecoveryObject
+ );
+
+ sandbox.stub(FakeBackupResource1.prototype, "postRecovery").resolves();
+ sandbox.stub(FakeBackupResource2.prototype, "postRecovery").resolves();
+ sandbox.stub(FakeBackupResource3.prototype, "postRecovery").resolves();
+
+ let bs = new BackupService({
+ FakeBackupResource1,
+ FakeBackupResource2,
+ FakeBackupResource3,
+ });
+
+ await bs.checkForPostRecovery(testProfilePath);
+ await bs.postRecoveryComplete;
+
+ Assert.ok(
+ FakeBackupResource1.prototype.postRecovery.calledOnce,
+ "FakeBackupResource1.postRecovery was called once"
+ );
+ Assert.ok(
+ FakeBackupResource2.prototype.postRecovery.notCalled,
+ "FakeBackupResource2.postRecovery was not called"
+ );
+ Assert.ok(
+ FakeBackupResource3.prototype.postRecovery.calledOnce,
+ "FakeBackupResource3.postRecovery was called once"
+ );
+ Assert.ok(
+ FakeBackupResource1.prototype.postRecovery.calledWith(
+ fakePostRecoveryObject[FakeBackupResource1.key]
+ ),
+ "FakeBackupResource1.postRecovery was called with the expected argument"
+ );
+ Assert.ok(
+ FakeBackupResource3.prototype.postRecovery.calledWith(
+ fakePostRecoveryObject[FakeBackupResource3.key]
+ ),
+ "FakeBackupResource3.postRecovery was called with the expected argument"
+ );
+
+ await IOUtils.remove(testProfilePath, { recursive: true });
+ sandbox.restore();
+});
diff --git a/browser/components/backup/tests/xpcshell/test_BackupService_takeMeasurements.js b/browser/components/backup/tests/xpcshell/test_BackupService_takeMeasurements.js
new file mode 100644
index 0000000000..c73482dfe6
--- /dev/null
+++ b/browser/components/backup/tests/xpcshell/test_BackupService_takeMeasurements.js
@@ -0,0 +1,59 @@
+/* Any copyright is dedicated to the Public Domain.
+http://creativecommons.org/publicdomain/zero/1.0/ */
+
+"use strict";
+
+add_setup(() => {
+ // FOG needs to be initialized in order for data to flow.
+ Services.fog.initializeFOG();
+ Services.telemetry.clearScalars();
+});
+
+/**
+ * Tests that calling `BackupService.takeMeasurements` will call the measure
+ * method of all registered BackupResource classes.
+ */
+add_task(async function test_takeMeasurements() {
+ let sandbox = sinon.createSandbox();
+ sandbox.stub(FakeBackupResource1.prototype, "measure").resolves();
+ sandbox
+ .stub(FakeBackupResource2.prototype, "measure")
+ .rejects(new Error("Some failure to measure"));
+
+ let bs = new BackupService({ FakeBackupResource1, FakeBackupResource2 });
+ await bs.takeMeasurements();
+
+ for (let backupResourceClass of [FakeBackupResource1, FakeBackupResource2]) {
+ Assert.ok(
+ backupResourceClass.prototype.measure.calledOnce,
+ "Measure was called"
+ );
+ Assert.ok(
+ backupResourceClass.prototype.measure.calledWith(PathUtils.profileDir),
+ "Measure was called with the profile directory argument"
+ );
+ }
+
+ sandbox.restore();
+});
+
+/**
+ * Tests that we can measure the disk space available in the profile directory.
+ */
+add_task(async function test_profDDiskSpace() {
+ let bs = new BackupService();
+ await bs.takeMeasurements();
+ let measurement = Glean.browserBackup.profDDiskSpace.testGetValue();
+ TelemetryTestUtils.assertScalar(
+ TelemetryTestUtils.getProcessScalars("parent", false, true),
+ "browser.backup.prof_d_disk_space",
+ measurement
+ );
+
+ Assert.greater(
+ measurement,
+ 0,
+ "Should have collected a measurement for the profile directory storage " +
+ "device"
+ );
+});
diff --git a/browser/components/backup/tests/xpcshell/test_CookiesBackupResource.js b/browser/components/backup/tests/xpcshell/test_CookiesBackupResource.js
new file mode 100644
index 0000000000..1690580437
--- /dev/null
+++ b/browser/components/backup/tests/xpcshell/test_CookiesBackupResource.js
@@ -0,0 +1,142 @@
+/* Any copyright is dedicated to the Public Domain.
+https://creativecommons.org/publicdomain/zero/1.0/ */
+
+"use strict";
+
+const { CookiesBackupResource } = ChromeUtils.importESModule(
+ "resource:///modules/backup/CookiesBackupResource.sys.mjs"
+);
+
+/**
+ * Tests that we can measure the Cookies db in a profile directory.
+ */
+add_task(async function test_measure() {
+ const EXPECTED_COOKIES_DB_SIZE = 1230;
+
+ Services.fog.testResetFOG();
+
+ // Create resource files in temporary directory
+ let tempDir = PathUtils.tempDir;
+ let tempCookiesDBPath = PathUtils.join(tempDir, "cookies.sqlite");
+ await createKilobyteSizedFile(tempCookiesDBPath, EXPECTED_COOKIES_DB_SIZE);
+
+ let cookiesBackupResource = new CookiesBackupResource();
+ await cookiesBackupResource.measure(tempDir);
+
+ let cookiesMeasurement = Glean.browserBackup.cookiesSize.testGetValue();
+ let scalars = TelemetryTestUtils.getProcessScalars("parent", false, false);
+
+ // Compare glean vs telemetry measurements
+ TelemetryTestUtils.assertScalar(
+ scalars,
+ "browser.backup.cookies_size",
+ cookiesMeasurement,
+ "Glean and telemetry measurements for cookies.sqlite should be equal"
+ );
+
+ // Compare glean measurements vs actual file sizes
+ Assert.equal(
+ cookiesMeasurement,
+ EXPECTED_COOKIES_DB_SIZE,
+ "Should have collected the correct glean measurement for cookies.sqlite"
+ );
+
+ await maybeRemovePath(tempCookiesDBPath);
+});
+
+/**
+ * Test that the backup method correctly copies items from the profile directory
+ * into the staging directory.
+ */
+add_task(async function test_backup() {
+ let sandbox = sinon.createSandbox();
+
+ let cookiesBackupResource = new CookiesBackupResource();
+ let sourcePath = await IOUtils.createUniqueDirectory(
+ PathUtils.tempDir,
+ "CookiesBackupResource-source-test"
+ );
+ let stagingPath = await IOUtils.createUniqueDirectory(
+ PathUtils.tempDir,
+ "CookiesBackupResource-staging-test"
+ );
+
+ // Make sure this file exists in the source directory, otherwise
+ // BackupResource will skip attempting to back it up.
+ await createTestFiles(sourcePath, [{ path: "cookies.sqlite" }]);
+
+ // We have no need to test that Sqlite.sys.mjs's backup method is working -
+ // this is something that is tested in Sqlite's own tests. We can just make
+ // sure that it's being called using sinon. Unfortunately, we cannot do the
+ // same thing with IOUtils.copy, as its methods are not stubbable.
+ let fakeConnection = {
+ backup: sandbox.stub().resolves(true),
+ close: sandbox.stub().resolves(true),
+ };
+ sandbox.stub(Sqlite, "openConnection").returns(fakeConnection);
+
+ let manifestEntry = await cookiesBackupResource.backup(
+ stagingPath,
+ sourcePath
+ );
+ Assert.equal(
+ manifestEntry,
+ null,
+ "CookiesBackupResource.backup should return null as its ManifestEntry"
+ );
+
+ // Next, we'll make sure that the Sqlite connection had `backup` called on it
+ // with the right arguments.
+ Assert.ok(
+ fakeConnection.backup.calledOnce,
+ "Called backup the expected number of times for all connections"
+ );
+ Assert.ok(
+ fakeConnection.backup.calledWith(
+ PathUtils.join(stagingPath, "cookies.sqlite")
+ ),
+ "Called backup on the cookies.sqlite Sqlite connection"
+ );
+
+ await maybeRemovePath(stagingPath);
+ await maybeRemovePath(sourcePath);
+
+ sandbox.restore();
+});
+
+/**
+ * Test that the recover method correctly copies items from the recovery
+ * directory into the destination profile directory.
+ */
+add_task(async function test_recover() {
+ let cookiesBackupResource = new CookiesBackupResource();
+ let recoveryPath = await IOUtils.createUniqueDirectory(
+ PathUtils.tempDir,
+ "CookiesBackupResource-recovery-test"
+ );
+ let destProfilePath = await IOUtils.createUniqueDirectory(
+ PathUtils.tempDir,
+ "CookiesBackupResource-test-profile"
+ );
+
+ const simpleCopyFiles = [{ path: "cookies.sqlite" }];
+ await createTestFiles(recoveryPath, simpleCopyFiles);
+
+ // The backup method is expected to have returned a null ManifestEntry
+ let postRecoveryEntry = await cookiesBackupResource.recover(
+ null /* manifestEntry */,
+ recoveryPath,
+ destProfilePath
+ );
+ Assert.equal(
+ postRecoveryEntry,
+ null,
+ "CookiesBackupResource.recover should return null as its post " +
+ "recovery entry"
+ );
+
+ await assertFilesExist(destProfilePath, simpleCopyFiles);
+
+ await maybeRemovePath(recoveryPath);
+ await maybeRemovePath(destProfilePath);
+});
diff --git a/browser/components/backup/tests/xpcshell/test_CredentialsAndSecurityBackupResource.js b/browser/components/backup/tests/xpcshell/test_CredentialsAndSecurityBackupResource.js
new file mode 100644
index 0000000000..f53fec8d3f
--- /dev/null
+++ b/browser/components/backup/tests/xpcshell/test_CredentialsAndSecurityBackupResource.js
@@ -0,0 +1,215 @@
+/* Any copyright is dedicated to the Public Domain.
+https://creativecommons.org/publicdomain/zero/1.0/ */
+
+"use strict";
+
+const { CredentialsAndSecurityBackupResource } = ChromeUtils.importESModule(
+ "resource:///modules/backup/CredentialsAndSecurityBackupResource.sys.mjs"
+);
+
+/**
+ * Tests that we can measure credentials related files in the profile directory.
+ */
+add_task(async function test_measure() {
+ Services.fog.testResetFOG();
+
+ const EXPECTED_CREDENTIALS_KILOBYTES_SIZE = 413;
+ const EXPECTED_SECURITY_KILOBYTES_SIZE = 231;
+
+ // Create resource files in temporary directory
+ const tempDir = await IOUtils.createUniqueDirectory(
+ PathUtils.tempDir,
+ "CredentialsAndSecurityBackupResource-measurement-test"
+ );
+
+ const mockFiles = [
+ // Set up credentials files
+ { path: "key4.db", sizeInKB: 300 },
+ { path: "logins.json", sizeInKB: 1 },
+ { path: "logins-backup.json", sizeInKB: 1 },
+ { path: "autofill-profiles.json", sizeInKB: 1 },
+ { path: "credentialstate.sqlite", sizeInKB: 100 },
+ { path: "signedInUser.json", sizeInKB: 5 },
+ // Set up security files
+ { path: "cert9.db", sizeInKB: 230 },
+ { path: "pkcs11.txt", sizeInKB: 1 },
+ ];
+
+ await createTestFiles(tempDir, mockFiles);
+
+ let credentialsAndSecurityBackupResource =
+ new CredentialsAndSecurityBackupResource();
+ await credentialsAndSecurityBackupResource.measure(tempDir);
+
+ let credentialsMeasurement =
+ Glean.browserBackup.credentialsDataSize.testGetValue();
+ let securityMeasurement = Glean.browserBackup.securityDataSize.testGetValue();
+ let scalars = TelemetryTestUtils.getProcessScalars("parent", false, false);
+
+ // Credentials measurements
+ TelemetryTestUtils.assertScalar(
+ scalars,
+ "browser.backup.credentials_data_size",
+ credentialsMeasurement,
+ "Glean and telemetry measurements for credentials data should be equal"
+ );
+
+ Assert.equal(
+ credentialsMeasurement,
+ EXPECTED_CREDENTIALS_KILOBYTES_SIZE,
+ "Should have collected the correct glean measurement for credentials files"
+ );
+
+ // Security measurements
+ TelemetryTestUtils.assertScalar(
+ scalars,
+ "browser.backup.security_data_size",
+ securityMeasurement,
+ "Glean and telemetry measurements for security data should be equal"
+ );
+ Assert.equal(
+ securityMeasurement,
+ EXPECTED_SECURITY_KILOBYTES_SIZE,
+ "Should have collected the correct glean measurement for security files"
+ );
+
+ // Cleanup
+ await maybeRemovePath(tempDir);
+});
+
+/**
+ * Test that the backup method correctly copies items from the profile directory
+ * into the staging directory.
+ */
+add_task(async function test_backup() {
+ let sandbox = sinon.createSandbox();
+
+ let credentialsAndSecurityBackupResource =
+ new CredentialsAndSecurityBackupResource();
+ let sourcePath = await IOUtils.createUniqueDirectory(
+ PathUtils.tempDir,
+ "CredentialsAndSecurityBackupResource-source-test"
+ );
+ let stagingPath = await IOUtils.createUniqueDirectory(
+ PathUtils.tempDir,
+ "CredentialsAndSecurityBackupResource-staging-test"
+ );
+
+ const simpleCopyFiles = [
+ { path: "logins.json", sizeInKB: 1 },
+ { path: "logins-backup.json", sizeInKB: 1 },
+ { path: "autofill-profiles.json", sizeInKB: 1 },
+ { path: "signedInUser.json", sizeInKB: 5 },
+ { path: "pkcs11.txt", sizeInKB: 1 },
+ ];
+ await createTestFiles(sourcePath, simpleCopyFiles);
+
+ // Create our fake database files. We don't expect these to be copied to the
+ // staging directory in this test due to our stubbing of the backup method, so
+ // we don't include it in `simpleCopyFiles`.
+ await createTestFiles(sourcePath, [
+ { path: "cert9.db" },
+ { path: "key4.db" },
+ { path: "credentialstate.sqlite" },
+ ]);
+
+ // We have no need to test that Sqlite.sys.mjs's backup method is working -
+ // this is something that is tested in Sqlite's own tests. We can just make
+ // sure that it's being called using sinon. Unfortunately, we cannot do the
+ // same thing with IOUtils.copy, as its methods are not stubbable.
+ let fakeConnection = {
+ backup: sandbox.stub().resolves(true),
+ close: sandbox.stub().resolves(true),
+ };
+ sandbox.stub(Sqlite, "openConnection").returns(fakeConnection);
+
+ let manifestEntry = await credentialsAndSecurityBackupResource.backup(
+ stagingPath,
+ sourcePath
+ );
+
+ Assert.equal(
+ manifestEntry,
+ null,
+ "CredentialsAndSecurityBackupResource.backup should return null as its ManifestEntry"
+ );
+
+ await assertFilesExist(stagingPath, simpleCopyFiles);
+
+ // Next, we'll make sure that the Sqlite connection had `backup` called on it
+ // with the right arguments.
+ Assert.ok(
+ fakeConnection.backup.calledThrice,
+ "Called backup the expected number of times for all connections"
+ );
+ Assert.ok(
+ fakeConnection.backup.firstCall.calledWith(
+ PathUtils.join(stagingPath, "cert9.db")
+ ),
+ "Called backup on cert9.db connection first"
+ );
+ Assert.ok(
+ fakeConnection.backup.secondCall.calledWith(
+ PathUtils.join(stagingPath, "key4.db")
+ ),
+ "Called backup on key4.db connection second"
+ );
+ Assert.ok(
+ fakeConnection.backup.thirdCall.calledWith(
+ PathUtils.join(stagingPath, "credentialstate.sqlite")
+ ),
+ "Called backup on credentialstate.sqlite connection third"
+ );
+
+ await maybeRemovePath(stagingPath);
+ await maybeRemovePath(sourcePath);
+
+ sandbox.restore();
+});
+
+/**
+ * Test that the recover method correctly copies items from the recovery
+ * directory into the destination profile directory.
+ */
+add_task(async function test_recover() {
+ let credentialsAndSecurityBackupResource =
+ new CredentialsAndSecurityBackupResource();
+ let recoveryPath = await IOUtils.createUniqueDirectory(
+ PathUtils.tempDir,
+ "CredentialsAndSecurityBackupResource-recovery-test"
+ );
+ let destProfilePath = await IOUtils.createUniqueDirectory(
+ PathUtils.tempDir,
+ "CredentialsAndSecurityBackupResource-test-profile"
+ );
+
+ const files = [
+ { path: "logins.json" },
+ { path: "logins-backup.json" },
+ { path: "autofill-profiles.json" },
+ { path: "credentialstate.sqlite" },
+ { path: "signedInUser.json" },
+ { path: "cert9.db" },
+ { path: "key4.db" },
+ { path: "pkcs11.txt" },
+ ];
+ await createTestFiles(recoveryPath, files);
+
+ // The backup method is expected to have returned a null ManifestEntry
+ let postRecoveryEntry = await credentialsAndSecurityBackupResource.recover(
+ null /* manifestEntry */,
+ recoveryPath,
+ destProfilePath
+ );
+ Assert.equal(
+ postRecoveryEntry,
+ null,
+ "CredentialsAndSecurityBackupResource.recover should return null as its post " +
+ "recovery entry"
+ );
+
+ await assertFilesExist(destProfilePath, files);
+
+ await maybeRemovePath(recoveryPath);
+ await maybeRemovePath(destProfilePath);
+});
diff --git a/browser/components/backup/tests/xpcshell/test_FormHistoryBackupResource.js b/browser/components/backup/tests/xpcshell/test_FormHistoryBackupResource.js
new file mode 100644
index 0000000000..93434daa9c
--- /dev/null
+++ b/browser/components/backup/tests/xpcshell/test_FormHistoryBackupResource.js
@@ -0,0 +1,146 @@
+/* Any copyright is dedicated to the Public Domain.
+https://creativecommons.org/publicdomain/zero/1.0/ */
+
+"use strict";
+
+const { FormHistoryBackupResource } = ChromeUtils.importESModule(
+ "resource:///modules/backup/FormHistoryBackupResource.sys.mjs"
+);
+
+/**
+ * Tests that we can measure the Form History db in a profile directory.
+ */
+add_task(async function test_measure() {
+ const EXPECTED_FORM_HISTORY_DB_SIZE = 500;
+
+ Services.fog.testResetFOG();
+
+ // Create resource files in temporary directory
+ let tempDir = PathUtils.tempDir;
+ let tempFormHistoryDBPath = PathUtils.join(tempDir, "formhistory.sqlite");
+ await createKilobyteSizedFile(
+ tempFormHistoryDBPath,
+ EXPECTED_FORM_HISTORY_DB_SIZE
+ );
+
+ let formHistoryBackupResource = new FormHistoryBackupResource();
+ await formHistoryBackupResource.measure(tempDir);
+
+ let formHistoryMeasurement =
+ Glean.browserBackup.formHistorySize.testGetValue();
+ let scalars = TelemetryTestUtils.getProcessScalars("parent", false, false);
+
+ // Compare glean vs telemetry measurements
+ TelemetryTestUtils.assertScalar(
+ scalars,
+ "browser.backup.form_history_size",
+ formHistoryMeasurement,
+ "Glean and telemetry measurements for formhistory.sqlite should be equal"
+ );
+
+ // Compare glean measurements vs actual file sizes
+ Assert.equal(
+ formHistoryMeasurement,
+ EXPECTED_FORM_HISTORY_DB_SIZE,
+ "Should have collected the correct glean measurement for formhistory.sqlite"
+ );
+
+ await IOUtils.remove(tempFormHistoryDBPath);
+});
+
+/**
+ * Test that the backup method correctly copies items from the profile directory
+ * into the staging directory.
+ */
+add_task(async function test_backup() {
+ let sandbox = sinon.createSandbox();
+
+ let formHistoryBackupResource = new FormHistoryBackupResource();
+ let sourcePath = await IOUtils.createUniqueDirectory(
+ PathUtils.tempDir,
+ "FormHistoryBackupResource-source-test"
+ );
+ let stagingPath = await IOUtils.createUniqueDirectory(
+ PathUtils.tempDir,
+ "FormHistoryBackupResource-staging-test"
+ );
+
+ // Make sure this file exists in the source directory, otherwise
+ // BackupResource will skip attempting to back it up.
+ await createTestFiles(sourcePath, [{ path: "formhistory.sqlite" }]);
+
+ // We have no need to test that Sqlite.sys.mjs's backup method is working -
+ // this is something that is tested in Sqlite's own tests. We can just make
+ // sure that it's being called using sinon. Unfortunately, we cannot do the
+ // same thing with IOUtils.copy, as its methods are not stubbable.
+ let fakeConnection = {
+ backup: sandbox.stub().resolves(true),
+ close: sandbox.stub().resolves(true),
+ };
+ sandbox.stub(Sqlite, "openConnection").returns(fakeConnection);
+
+ let manifestEntry = await formHistoryBackupResource.backup(
+ stagingPath,
+ sourcePath
+ );
+ Assert.equal(
+ manifestEntry,
+ null,
+ "FormHistoryBackupResource.backup should return null as its ManifestEntry"
+ );
+
+ // Next, we'll make sure that the Sqlite connection had `backup` called on it
+ // with the right arguments.
+ Assert.ok(
+ fakeConnection.backup.calledOnce,
+ "Called backup the expected number of times for all connections"
+ );
+ Assert.ok(
+ fakeConnection.backup.calledWith(
+ PathUtils.join(stagingPath, "formhistory.sqlite")
+ ),
+ "Called backup on the formhistory.sqlite Sqlite connection"
+ );
+
+ await maybeRemovePath(stagingPath);
+ await maybeRemovePath(sourcePath);
+
+ sandbox.restore();
+});
+
+/**
+ * Test that the recover method correctly copies items from the recovery
+ * directory into the destination profile directory.
+ */
+add_task(async function test_recover() {
+ let formHistoryBackupResource = new FormHistoryBackupResource();
+ let recoveryPath = await IOUtils.createUniqueDirectory(
+ PathUtils.tempDir,
+ "FormHistoryBackupResource-recovery-test"
+ );
+ let destProfilePath = await IOUtils.createUniqueDirectory(
+ PathUtils.tempDir,
+ "FormHistoryBackupResource-test-profile"
+ );
+
+ const simpleCopyFiles = [{ path: "formhistory.sqlite" }];
+ await createTestFiles(recoveryPath, simpleCopyFiles);
+
+ // The backup method is expected to have returned a null ManifestEntry
+ let postRecoveryEntry = await formHistoryBackupResource.recover(
+ null /* manifestEntry */,
+ recoveryPath,
+ destProfilePath
+ );
+ Assert.equal(
+ postRecoveryEntry,
+ null,
+ "FormHistoryBackupResource.recover should return null as its post " +
+ "recovery entry"
+ );
+
+ await assertFilesExist(destProfilePath, simpleCopyFiles);
+
+ await maybeRemovePath(recoveryPath);
+ await maybeRemovePath(destProfilePath);
+});
diff --git a/browser/components/backup/tests/xpcshell/test_MiscDataBackupResource.js b/browser/components/backup/tests/xpcshell/test_MiscDataBackupResource.js
index e57dd50cd3..ab63b65332 100644
--- a/browser/components/backup/tests/xpcshell/test_MiscDataBackupResource.js
+++ b/browser/components/backup/tests/xpcshell/test_MiscDataBackupResource.js
@@ -7,20 +7,27 @@ const { MiscDataBackupResource } = ChromeUtils.importESModule(
"resource:///modules/backup/MiscDataBackupResource.sys.mjs"
);
+const { ActivityStreamStorage } = ChromeUtils.importESModule(
+ "resource://activity-stream/lib/ActivityStreamStorage.sys.mjs"
+);
+
+const { ProfileAge } = ChromeUtils.importESModule(
+ "resource://gre/modules/ProfileAge.sys.mjs"
+);
+
/**
* Tests that we can measure miscellaneous files in the profile directory.
*/
add_task(async function test_measure() {
Services.fog.testResetFOG();
- const EXPECTED_MISC_KILOBYTES_SIZE = 241;
+ const EXPECTED_MISC_KILOBYTES_SIZE = 231;
const tempDir = await IOUtils.createUniqueDirectory(
PathUtils.tempDir,
"MiscDataBackupResource-measurement-test"
);
const mockFiles = [
- { path: "times.json", sizeInKB: 5 },
{ path: "enumerate_devices.txt", sizeInKB: 1 },
{ path: "protections.sqlite", sizeInKB: 100 },
{ path: "SiteSecurityServiceState.bin", sizeInKB: 10 },
@@ -69,12 +76,16 @@ add_task(async function test_backup() {
);
const simpleCopyFiles = [
- { path: "times.json" },
{ path: "enumerate_devices.txt" },
{ path: "SiteSecurityServiceState.bin" },
];
await createTestFiles(sourcePath, simpleCopyFiles);
+ // Create our fake database files. We don't expect this to be copied to the
+ // staging directory in this test due to our stubbing of the backup method, so
+ // we don't include it in `simpleCopyFiles`.
+ await createTestFiles(sourcePath, [{ path: "protections.sqlite" }]);
+
// We have no need to test that Sqlite.sys.mjs's backup method is working -
// this is something that is tested in Sqlite's own tests. We can just make
// sure that it's being called using sinon. Unfortunately, we cannot do the
@@ -85,7 +96,27 @@ add_task(async function test_backup() {
};
sandbox.stub(Sqlite, "openConnection").returns(fakeConnection);
- await miscDataBackupResource.backup(stagingPath, sourcePath);
+ let snippetsTableStub = {
+ getAllKeys: sandbox.stub().resolves(["key1", "key2"]),
+ get: sandbox.stub().callsFake(key => {
+ return { key: `value for ${key}` };
+ }),
+ };
+
+ sandbox
+ .stub(ActivityStreamStorage.prototype, "getDbTable")
+ .withArgs("snippets")
+ .resolves(snippetsTableStub);
+
+ let manifestEntry = await miscDataBackupResource.backup(
+ stagingPath,
+ sourcePath
+ );
+ Assert.equal(
+ manifestEntry,
+ null,
+ "MiscDataBackupResource.backup should return null as its ManifestEntry"
+ );
await assertFilesExist(stagingPath, simpleCopyFiles);
@@ -102,12 +133,170 @@ add_task(async function test_backup() {
"Called backup on the protections.sqlite Sqlite connection"
);
- // Bug 1890585 - we don't currently have the ability to copy the
- // chrome-privileged IndexedDB databases under storage/permanent/chrome, so
- // we'll just skip testing that for now.
+ // Bug 1890585 - we don't currently have the generalized ability to copy the
+ // chrome-privileged IndexedDB databases under storage/permanent/chrome, but
+ // we do support copying individual IndexedDB databases by manually exporting
+ // and re-importing their contents.
+ let snippetsBackupPath = PathUtils.join(
+ stagingPath,
+ "activity-stream-snippets.json"
+ );
+ Assert.ok(
+ await IOUtils.exists(snippetsBackupPath),
+ "The activity-stream-snippets.json file should exist"
+ );
+ let snippetsBackupContents = await IOUtils.readJSON(snippetsBackupPath);
+ Assert.deepEqual(
+ snippetsBackupContents,
+ {
+ key1: { key: "value for key1" },
+ key2: { key: "value for key2" },
+ },
+ "The contents of the activity-stream-snippets.json file should be as expected"
+ );
await maybeRemovePath(stagingPath);
await maybeRemovePath(sourcePath);
sandbox.restore();
});
+
+/**
+ * Test that the recover method correctly copies items from the recovery
+ * directory into the destination profile directory.
+ */
+add_task(async function test_recover() {
+ let miscBackupResource = new MiscDataBackupResource();
+ let recoveryPath = await IOUtils.createUniqueDirectory(
+ PathUtils.tempDir,
+ "MiscDataBackupResource-recovery-test"
+ );
+ let destProfilePath = await IOUtils.createUniqueDirectory(
+ PathUtils.tempDir,
+ "MiscDataBackupResource-test-profile"
+ );
+
+ // Write a dummy times.json into the xpcshell test profile directory. We
+ // expect it to be copied into the destination profile.
+ let originalProfileAge = await ProfileAge(PathUtils.profileDir);
+ await originalProfileAge.computeAndPersistCreated();
+ Assert.ok(
+ await IOUtils.exists(PathUtils.join(PathUtils.profileDir, "times.json"))
+ );
+
+ const simpleCopyFiles = [
+ { path: "enumerate_devices.txt" },
+ { path: "protections.sqlite" },
+ { path: "SiteSecurityServiceState.bin" },
+ ];
+ await createTestFiles(recoveryPath, simpleCopyFiles);
+
+ const SNIPPETS_BACKUP_FILE = "activity-stream-snippets.json";
+
+ // We'll also separately create the activity-stream-snippets.json file, which
+ // is not expected to be copied into the profile directory, but is expected
+ // to exist in the recovery path.
+ await createTestFiles(recoveryPath, [{ path: SNIPPETS_BACKUP_FILE }]);
+
+ // The backup method is expected to have returned a null ManifestEntry
+ let postRecoveryEntry = await miscBackupResource.recover(
+ null /* manifestEntry */,
+ recoveryPath,
+ destProfilePath
+ );
+ Assert.deepEqual(
+ postRecoveryEntry,
+ {
+ snippetsBackupFile: PathUtils.join(recoveryPath, SNIPPETS_BACKUP_FILE),
+ },
+ "MiscDataBackupResource.recover should return the snippets backup data " +
+ "path as its post recovery entry"
+ );
+
+ await assertFilesExist(destProfilePath, simpleCopyFiles);
+
+ // The activity-stream-snippets.json path should _not_ have been written to
+ // the profile path.
+ Assert.ok(
+ !(await IOUtils.exists(
+ PathUtils.join(destProfilePath, SNIPPETS_BACKUP_FILE)
+ )),
+ "Snippets backup data should not have gone into the profile directory"
+ );
+
+ // The times.json file should have been copied over and a backup recovery
+ // time written into it.
+ Assert.ok(
+ await IOUtils.exists(PathUtils.join(destProfilePath, "times.json"))
+ );
+ let copiedProfileAge = await ProfileAge(destProfilePath);
+ Assert.equal(
+ await originalProfileAge.created,
+ await copiedProfileAge.created,
+ "Created timestamp should match."
+ );
+ Assert.equal(
+ await originalProfileAge.firstUse,
+ await copiedProfileAge.firstUse,
+ "First use timestamp should match."
+ );
+ Assert.ok(
+ await copiedProfileAge.recoveredFromBackup,
+ "Backup recovery timestamp should have been set."
+ );
+
+ await maybeRemovePath(recoveryPath);
+ await maybeRemovePath(destProfilePath);
+});
+
+/**
+ * Test that the postRecovery method correctly writes the snippets backup data
+ * into the snippets IndexedDB table.
+ */
+add_task(async function test_postRecovery() {
+ let sandbox = sinon.createSandbox();
+
+ let fakeProfilePath = await IOUtils.createUniqueDirectory(
+ PathUtils.tempDir,
+ "MiscDataBackupResource-test-profile"
+ );
+ let fakeSnippetsData = {
+ key1: "value1",
+ key2: "value2",
+ };
+ const SNIPPEST_BACKUP_FILE = PathUtils.join(
+ fakeProfilePath,
+ "activity-stream-snippets.json"
+ );
+
+ await IOUtils.writeJSON(SNIPPEST_BACKUP_FILE, fakeSnippetsData);
+
+ let snippetsTableStub = {
+ set: sandbox.stub(),
+ };
+
+ sandbox
+ .stub(ActivityStreamStorage.prototype, "getDbTable")
+ .withArgs("snippets")
+ .resolves(snippetsTableStub);
+
+ let miscBackupResource = new MiscDataBackupResource();
+ await miscBackupResource.postRecovery({
+ snippetsBackupFile: SNIPPEST_BACKUP_FILE,
+ });
+
+ Assert.ok(
+ snippetsTableStub.set.calledTwice,
+ "The snippets table's set method was called twice"
+ );
+ Assert.ok(
+ snippetsTableStub.set.firstCall.calledWith("key1", "value1"),
+ "The snippets table's set method was called with the first key-value pair"
+ );
+ Assert.ok(
+ snippetsTableStub.set.secondCall.calledWith("key2", "value2"),
+ "The snippets table's set method was called with the second key-value pair"
+ );
+
+ sandbox.restore();
+});
diff --git a/browser/components/backup/tests/xpcshell/test_PlacesBackupResource.js b/browser/components/backup/tests/xpcshell/test_PlacesBackupResource.js
index de97281372..7248a5c614 100644
--- a/browser/components/backup/tests/xpcshell/test_PlacesBackupResource.js
+++ b/browser/components/backup/tests/xpcshell/test_PlacesBackupResource.js
@@ -3,6 +3,9 @@ https://creativecommons.org/publicdomain/zero/1.0/ */
"use strict";
+const { BookmarkJSONUtils } = ChromeUtils.importESModule(
+ "resource://gre/modules/BookmarkJSONUtils.sys.mjs"
+);
const { PlacesBackupResource } = ChromeUtils.importESModule(
"resource:///modules/backup/PlacesBackupResource.sys.mjs"
);
@@ -93,13 +96,28 @@ add_task(async function test_backup() {
"PlacesBackupResource-staging-test"
);
+ // Make sure these files exist in the source directory, otherwise
+ // BackupResource will skip attempting to back them up.
+ await createTestFiles(sourcePath, [
+ { path: "places.sqlite" },
+ { path: "favicons.sqlite" },
+ ]);
+
let fakeConnection = {
backup: sandbox.stub().resolves(true),
close: sandbox.stub().resolves(true),
};
sandbox.stub(Sqlite, "openConnection").returns(fakeConnection);
- await placesBackupResource.backup(stagingPath, sourcePath);
+ let manifestEntry = await placesBackupResource.backup(
+ stagingPath,
+ sourcePath
+ );
+ Assert.equal(
+ manifestEntry,
+ null,
+ "PlacesBackupResource.backup should return null as its ManifestEntry"
+ );
Assert.ok(
fakeConnection.backup.calledTwice,
@@ -154,7 +172,16 @@ add_task(async function test_backup_no_saved_history() {
Services.prefs.setBoolPref(HISTORY_ENABLED_PREF, false);
Services.prefs.setBoolPref(SANITIZE_ON_SHUTDOWN_PREF, false);
- await placesBackupResource.backup(stagingPath, sourcePath);
+ let manifestEntry = await placesBackupResource.backup(
+ stagingPath,
+ sourcePath
+ );
+ Assert.deepEqual(
+ manifestEntry,
+ { bookmarksOnly: true },
+ "Should have gotten back a ManifestEntry indicating that we only copied " +
+ "bookmarks"
+ );
Assert.ok(
fakeConnection.backup.notCalled,
@@ -171,7 +198,13 @@ add_task(async function test_backup_no_saved_history() {
Services.prefs.setBoolPref(SANITIZE_ON_SHUTDOWN_PREF, true);
fakeConnection.backup.resetHistory();
- await placesBackupResource.backup(stagingPath, sourcePath);
+ manifestEntry = await placesBackupResource.backup(stagingPath, sourcePath);
+ Assert.deepEqual(
+ manifestEntry,
+ { bookmarksOnly: true },
+ "Should have gotten back a ManifestEntry indicating that we only copied " +
+ "bookmarks"
+ );
Assert.ok(
fakeConnection.backup.notCalled,
@@ -211,7 +244,16 @@ add_task(async function test_backup_private_browsing() {
sandbox.stub(Sqlite, "openConnection").returns(fakeConnection);
sandbox.stub(PrivateBrowsingUtils, "permanentPrivateBrowsing").value(true);
- await placesBackupResource.backup(stagingPath, sourcePath);
+ let manifestEntry = await placesBackupResource.backup(
+ stagingPath,
+ sourcePath
+ );
+ Assert.deepEqual(
+ manifestEntry,
+ { bookmarksOnly: true },
+ "Should have gotten back a ManifestEntry indicating that we only copied " +
+ "bookmarks"
+ );
Assert.ok(
fakeConnection.backup.notCalled,
@@ -224,3 +266,104 @@ add_task(async function test_backup_private_browsing() {
sandbox.restore();
});
+
+/**
+ * Test that the recover method correctly copies places.sqlite and favicons.sqlite
+ * from the recovery directory into the destination profile directory.
+ */
+add_task(async function test_recover() {
+ let placesBackupResource = new PlacesBackupResource();
+ let recoveryPath = await IOUtils.createUniqueDirectory(
+ PathUtils.tempDir,
+ "PlacesBackupResource-recovery-test"
+ );
+ let destProfilePath = await IOUtils.createUniqueDirectory(
+ PathUtils.tempDir,
+ "PlacesBackupResource-test-profile"
+ );
+
+ const simpleCopyFiles = [
+ { path: "places.sqlite" },
+ { path: "favicons.sqlite" },
+ ];
+ await createTestFiles(recoveryPath, simpleCopyFiles);
+
+ // The backup method is expected to have returned a null ManifestEntry
+ let postRecoveryEntry = await placesBackupResource.recover(
+ null /* manifestEntry */,
+ recoveryPath,
+ destProfilePath
+ );
+ Assert.equal(
+ postRecoveryEntry,
+ null,
+ "PlacesBackupResource.recover should return null as its post recovery entry"
+ );
+
+ await assertFilesExist(destProfilePath, simpleCopyFiles);
+
+ await maybeRemovePath(recoveryPath);
+ await maybeRemovePath(destProfilePath);
+});
+
+/**
+ * Test that the recover method correctly copies bookmarks.jsonlz4 from the recovery
+ * directory into the destination profile directory.
+ */
+add_task(async function test_recover_bookmarks_only() {
+ let sandbox = sinon.createSandbox();
+ let placesBackupResource = new PlacesBackupResource();
+ let recoveryPath = await IOUtils.createUniqueDirectory(
+ PathUtils.tempDir,
+ "PlacesBackupResource-recovery-test"
+ );
+ let destProfilePath = await IOUtils.createUniqueDirectory(
+ PathUtils.tempDir,
+ "PlacesBackupResource-test-profile"
+ );
+ let bookmarksImportStub = sandbox
+ .stub(BookmarkJSONUtils, "importFromFile")
+ .resolves(true);
+
+ await createTestFiles(recoveryPath, [{ path: "bookmarks.jsonlz4" }]);
+
+ // The backup method is expected to detect bookmarks import only
+ let postRecoveryEntry = await placesBackupResource.recover(
+ { bookmarksOnly: true },
+ recoveryPath,
+ destProfilePath
+ );
+
+ let expectedBookmarksPath = PathUtils.join(recoveryPath, "bookmarks.jsonlz4");
+
+ // Expect the bookmarks backup file path to be passed from recover()
+ Assert.deepEqual(
+ postRecoveryEntry,
+ { bookmarksBackupPath: expectedBookmarksPath },
+ "PlacesBackupResource.recover should return the expected post recovery entry"
+ );
+
+ // Ensure that files stored in a places backup are not copied to the new profile during recovery
+ for (let placesFile of [
+ "places.sqlite",
+ "favicons.sqlite",
+ "bookmarks.jsonlz4",
+ ]) {
+ Assert.ok(
+ !(await IOUtils.exists(PathUtils.join(destProfilePath, placesFile))),
+ `${placesFile} should not exist in the new profile`
+ );
+ }
+
+ // Now pretend that BackupService called the postRecovery method
+ await placesBackupResource.postRecovery(postRecoveryEntry);
+ Assert.ok(
+ bookmarksImportStub.calledOnce,
+ "BookmarkJSONUtils.importFromFile was called in the postRecovery step"
+ );
+
+ await maybeRemovePath(recoveryPath);
+ await maybeRemovePath(destProfilePath);
+
+ sandbox.restore();
+});
diff --git a/browser/components/backup/tests/xpcshell/test_PreferencesBackupResource.js b/browser/components/backup/tests/xpcshell/test_PreferencesBackupResource.js
index 6845431bb8..2075b57e91 100644
--- a/browser/components/backup/tests/xpcshell/test_PreferencesBackupResource.js
+++ b/browser/components/backup/tests/xpcshell/test_PreferencesBackupResource.js
@@ -86,6 +86,14 @@ add_task(async function test_backup() {
];
await createTestFiles(sourcePath, simpleCopyFiles);
+ // Create our fake database files. We don't expect these to be copied to the
+ // staging directory in this test due to our stubbing of the backup method, so
+ // we don't include it in `simpleCopyFiles`.
+ await createTestFiles(sourcePath, [
+ { path: "permissions.sqlite" },
+ { path: "content-prefs.sqlite" },
+ ]);
+
// We have no need to test that Sqlite.sys.mjs's backup method is working -
// this is something that is tested in Sqlite's own tests. We can just make
// sure that it's being called using sinon. Unfortunately, we cannot do the
@@ -96,7 +104,15 @@ add_task(async function test_backup() {
};
sandbox.stub(Sqlite, "openConnection").returns(fakeConnection);
- await preferencesBackupResource.backup(stagingPath, sourcePath);
+ let manifestEntry = await preferencesBackupResource.backup(
+ stagingPath,
+ sourcePath
+ );
+ Assert.equal(
+ manifestEntry,
+ null,
+ "PreferencesBackupResource.backup should return null as its ManifestEntry"
+ );
await assertFilesExist(stagingPath, simpleCopyFiles);
@@ -130,3 +146,51 @@ add_task(async function test_backup() {
sandbox.restore();
});
+
+/**
+ * Test that the recover method correctly copies items from the recovery
+ * directory into the destination profile directory.
+ */
+add_task(async function test_recover() {
+ let preferencesBackupResource = new PreferencesBackupResource();
+ let recoveryPath = await IOUtils.createUniqueDirectory(
+ PathUtils.tempDir,
+ "PreferencesBackupResource-recovery-test"
+ );
+ let destProfilePath = await IOUtils.createUniqueDirectory(
+ PathUtils.tempDir,
+ "PreferencesBackupResource-test-profile"
+ );
+
+ const simpleCopyFiles = [
+ { path: "prefs.js" },
+ { path: "xulstore.json" },
+ { path: "permissions.sqlite" },
+ { path: "content-prefs.sqlite" },
+ { path: "containers.json" },
+ { path: "handlers.json" },
+ { path: "search.json.mozlz4" },
+ { path: "user.js" },
+ { path: ["chrome", "userChrome.css"] },
+ { path: ["chrome", "userContent.css"] },
+ { path: ["chrome", "childFolder", "someOtherStylesheet.css"] },
+ ];
+ await createTestFiles(recoveryPath, simpleCopyFiles);
+
+ // The backup method is expected to have returned a null ManifestEntry
+ let postRecoveryEntry = await preferencesBackupResource.recover(
+ null /* manifestEntry */,
+ recoveryPath,
+ destProfilePath
+ );
+ Assert.equal(
+ postRecoveryEntry,
+ null,
+ "PreferencesBackupResource.recover should return null as its post recovery entry"
+ );
+
+ await assertFilesExist(destProfilePath, simpleCopyFiles);
+
+ await maybeRemovePath(recoveryPath);
+ await maybeRemovePath(destProfilePath);
+});
diff --git a/browser/components/backup/tests/xpcshell/test_SessionStoreBackupResource.js b/browser/components/backup/tests/xpcshell/test_SessionStoreBackupResource.js
new file mode 100644
index 0000000000..d57f2d3a25
--- /dev/null
+++ b/browser/components/backup/tests/xpcshell/test_SessionStoreBackupResource.js
@@ -0,0 +1,209 @@
+/* Any copyright is dedicated to the Public Domain.
+https://creativecommons.org/publicdomain/zero/1.0/ */
+
+"use strict";
+
+const { SessionStoreBackupResource } = ChromeUtils.importESModule(
+ "resource:///modules/backup/SessionStoreBackupResource.sys.mjs"
+);
+const { SessionStore } = ChromeUtils.importESModule(
+ "resource:///modules/sessionstore/SessionStore.sys.mjs"
+);
+
+/**
+ * Tests that we can measure the Session Store JSON and backups directory.
+ */
+add_task(async function test_measure() {
+ const EXPECTED_KILOBYTES_FOR_BACKUPS_DIR = 1000;
+ Services.fog.testResetFOG();
+
+ // Create the sessionstore-backups directory.
+ let tempDir = PathUtils.tempDir;
+ let sessionStoreBackupsPath = PathUtils.join(
+ tempDir,
+ "sessionstore-backups",
+ "restore.jsonlz4"
+ );
+ await createKilobyteSizedFile(
+ sessionStoreBackupsPath,
+ EXPECTED_KILOBYTES_FOR_BACKUPS_DIR
+ );
+
+ let sessionStoreBackupResource = new SessionStoreBackupResource();
+ await sessionStoreBackupResource.measure(tempDir);
+
+ let sessionStoreBackupsDirectoryMeasurement =
+ Glean.browserBackup.sessionStoreBackupsDirectorySize.testGetValue();
+ let sessionStoreMeasurement =
+ Glean.browserBackup.sessionStoreSize.testGetValue();
+ let scalars = TelemetryTestUtils.getProcessScalars("parent", false, false);
+
+ // Compare glean vs telemetry measurements
+ TelemetryTestUtils.assertScalar(
+ scalars,
+ "browser.backup.session_store_backups_directory_size",
+ sessionStoreBackupsDirectoryMeasurement,
+ "Glean and telemetry measurements for session store backups directory should be equal"
+ );
+ TelemetryTestUtils.assertScalar(
+ scalars,
+ "browser.backup.session_store_size",
+ sessionStoreMeasurement,
+ "Glean and telemetry measurements for session store should be equal"
+ );
+
+ // Compare glean measurements vs actual file sizes
+ Assert.equal(
+ sessionStoreBackupsDirectoryMeasurement,
+ EXPECTED_KILOBYTES_FOR_BACKUPS_DIR,
+ "Should have collected the correct glean measurement for the sessionstore-backups directory"
+ );
+
+ // Session store measurement is from `getCurrentState`, so exact size is unknown.
+ Assert.greater(
+ sessionStoreMeasurement,
+ 0,
+ "Should have collected a measurement for the session store"
+ );
+
+ await IOUtils.remove(sessionStoreBackupsPath);
+});
+
+/**
+ * Test that the backup method correctly copies items from the profile directory
+ * into the staging directory.
+ */
+add_task(async function test_backup() {
+ let sandbox = sinon.createSandbox();
+
+ let sessionStoreBackupResource = new SessionStoreBackupResource();
+ let sourcePath = await IOUtils.createUniqueDirectory(
+ PathUtils.tempDir,
+ "SessionStoreBackupResource-source-test"
+ );
+ let stagingPath = await IOUtils.createUniqueDirectory(
+ PathUtils.tempDir,
+ "SessionStoreBackupResource-staging-test"
+ );
+
+ const simpleCopyFiles = [
+ { path: ["sessionstore-backups", "test-sessionstore-backup.jsonlz4"] },
+ { path: ["sessionstore-backups", "test-sessionstore-recovery.baklz4"] },
+ ];
+ await createTestFiles(sourcePath, simpleCopyFiles);
+
+ let sessionStoreState = SessionStore.getCurrentState(true);
+ let manifestEntry = await sessionStoreBackupResource.backup(
+ stagingPath,
+ sourcePath
+ );
+ Assert.equal(
+ manifestEntry,
+ null,
+ "SessionStoreBackupResource.backup should return null as its ManifestEntry"
+ );
+
+ /**
+ * We don't expect the actual file sessionstore.jsonlz4 to exist in the profile directory before calling the backup method.
+ * Instead, verify that it is created by the backup method and exists in the staging folder right after.
+ */
+ await assertFilesExist(stagingPath, [
+ ...simpleCopyFiles,
+ { path: "sessionstore.jsonlz4" },
+ ]);
+
+ /**
+ * Do a deep comparison between the recorded session state before backup and the file made in the staging folder
+ * to verify that information about session state was correctly written for backup.
+ */
+ let sessionStoreStateStaged = await IOUtils.readJSON(
+ PathUtils.join(stagingPath, "sessionstore.jsonlz4"),
+ { decompress: true }
+ );
+
+ /**
+ * These timestamps might be slightly different from one another, so we'll exclude
+ * them from the comparison.
+ */
+ delete sessionStoreStateStaged.session.lastUpdate;
+ delete sessionStoreState.session.lastUpdate;
+ Assert.deepEqual(
+ sessionStoreStateStaged,
+ sessionStoreState,
+ "sessionstore.jsonlz4 in the staging folder matches the recorded session state"
+ );
+
+ await maybeRemovePath(stagingPath);
+ await maybeRemovePath(sourcePath);
+
+ sandbox.restore();
+});
+
+/**
+ * Test that the recover method correctly copies items from the recovery
+ * directory into the destination profile directory.
+ */
+add_task(async function test_recover() {
+ let sessionStoreBackupResource = new SessionStoreBackupResource();
+ let recoveryPath = await IOUtils.createUniqueDirectory(
+ PathUtils.tempDir,
+ "SessionStoreBackupResource-recovery-test"
+ );
+ let destProfilePath = await IOUtils.createUniqueDirectory(
+ PathUtils.tempDir,
+ "SessionStoreBackupResource-test-profile"
+ );
+
+ const simpleCopyFiles = [
+ { path: ["sessionstore-backups", "test-sessionstore-backup.jsonlz4"] },
+ { path: ["sessionstore-backups", "test-sessionstore-recovery.baklz4"] },
+ ];
+ await createTestFiles(recoveryPath, simpleCopyFiles);
+
+ // We backup a copy of sessionstore.jsonlz4, so ensure it exists in the recovery path
+ let sessionStoreState = SessionStore.getCurrentState(true);
+ let sessionStoreBackupPath = PathUtils.join(
+ recoveryPath,
+ "sessionstore.jsonlz4"
+ );
+ await IOUtils.writeJSON(sessionStoreBackupPath, sessionStoreState, {
+ compress: true,
+ });
+
+ // The backup method is expected to have returned a null ManifestEntry
+ let postRecoveryEntry = await sessionStoreBackupResource.recover(
+ null /* manifestEntry */,
+ recoveryPath,
+ destProfilePath
+ );
+ Assert.equal(
+ postRecoveryEntry,
+ null,
+ "SessionStoreBackupResource.recover should return null as its post recovery entry"
+ );
+
+ await assertFilesExist(destProfilePath, [
+ ...simpleCopyFiles,
+ { path: "sessionstore.jsonlz4" },
+ ]);
+
+ let sessionStateCopied = await IOUtils.readJSON(
+ PathUtils.join(destProfilePath, "sessionstore.jsonlz4"),
+ { decompress: true }
+ );
+
+ /**
+ * These timestamps might be slightly different from one another, so we'll exclude
+ * them from the comparison.
+ */
+ delete sessionStateCopied.session.lastUpdate;
+ delete sessionStoreState.session.lastUpdate;
+ Assert.deepEqual(
+ sessionStateCopied,
+ sessionStoreState,
+ "sessionstore.jsonlz4 in the destination profile folder matches the backed up session state"
+ );
+
+ await maybeRemovePath(recoveryPath);
+ await maybeRemovePath(destProfilePath);
+});
diff --git a/browser/components/backup/tests/xpcshell/test_createBackup.js b/browser/components/backup/tests/xpcshell/test_createBackup.js
deleted file mode 100644
index fcace695ef..0000000000
--- a/browser/components/backup/tests/xpcshell/test_createBackup.js
+++ /dev/null
@@ -1,74 +0,0 @@
-/* Any copyright is dedicated to the Public Domain.
-https://creativecommons.org/publicdomain/zero/1.0/ */
-
-"use strict";
-
-/**
- * Tests that calling BackupService.createBackup will call backup on each
- * registered BackupResource, and that each BackupResource will have a folder
- * created for them to write into.
- */
-add_task(async function test_createBackup() {
- let sandbox = sinon.createSandbox();
- sandbox
- .stub(FakeBackupResource1.prototype, "backup")
- .resolves({ fake1: "hello from 1" });
- sandbox
- .stub(FakeBackupResource2.prototype, "backup")
- .rejects(new Error("Some failure to backup"));
- sandbox
- .stub(FakeBackupResource3.prototype, "backup")
- .resolves({ fake3: "hello from 3" });
-
- let bs = new BackupService({
- FakeBackupResource1,
- FakeBackupResource2,
- FakeBackupResource3,
- });
-
- let fakeProfilePath = await IOUtils.createUniqueDirectory(
- PathUtils.tempDir,
- "createBackupTest"
- );
-
- await bs.createBackup({ profilePath: fakeProfilePath });
-
- // For now, we expect a staging folder to exist under the fakeProfilePath,
- // and we should find a folder for each fake BackupResource.
- let stagingPath = PathUtils.join(fakeProfilePath, "backups", "staging");
- Assert.ok(await IOUtils.exists(stagingPath), "Staging folder exists");
-
- for (let backupResourceClass of [
- FakeBackupResource1,
- FakeBackupResource2,
- FakeBackupResource3,
- ]) {
- let expectedResourceFolder = PathUtils.join(
- stagingPath,
- backupResourceClass.key
- );
- Assert.ok(
- await IOUtils.exists(expectedResourceFolder),
- `BackupResource staging folder exists for ${backupResourceClass.key}`
- );
- Assert.ok(
- backupResourceClass.prototype.backup.calledOnce,
- `Backup was called for ${backupResourceClass.key}`
- );
- Assert.ok(
- backupResourceClass.prototype.backup.calledWith(
- expectedResourceFolder,
- fakeProfilePath
- ),
- `Backup was passed the right paths for ${backupResourceClass.key}`
- );
- }
-
- // After createBackup is more fleshed out, we're going to want to make sure
- // that we're writing the manifest file and that it contains the expected
- // ManifestEntry objects, and that the staging folder was successfully
- // renamed with the current date.
- await IOUtils.remove(fakeProfilePath, { recursive: true });
-
- sandbox.restore();
-});
diff --git a/browser/components/backup/tests/xpcshell/test_measurements.js b/browser/components/backup/tests/xpcshell/test_measurements.js
deleted file mode 100644
index 0dece6b370..0000000000
--- a/browser/components/backup/tests/xpcshell/test_measurements.js
+++ /dev/null
@@ -1,577 +0,0 @@
-/* Any copyright is dedicated to the Public Domain.
-http://creativecommons.org/publicdomain/zero/1.0/ */
-
-"use strict";
-
-const { CredentialsAndSecurityBackupResource } = ChromeUtils.importESModule(
- "resource:///modules/backup/CredentialsAndSecurityBackupResource.sys.mjs"
-);
-const { AddonsBackupResource } = ChromeUtils.importESModule(
- "resource:///modules/backup/AddonsBackupResource.sys.mjs"
-);
-const { CookiesBackupResource } = ChromeUtils.importESModule(
- "resource:///modules/backup/CookiesBackupResource.sys.mjs"
-);
-
-const { FormHistoryBackupResource } = ChromeUtils.importESModule(
- "resource:///modules/backup/FormHistoryBackupResource.sys.mjs"
-);
-
-const { SessionStoreBackupResource } = ChromeUtils.importESModule(
- "resource:///modules/backup/SessionStoreBackupResource.sys.mjs"
-);
-
-add_setup(() => {
- // FOG needs to be initialized in order for data to flow.
- Services.fog.initializeFOG();
- Services.telemetry.clearScalars();
-});
-
-/**
- * Tests that calling `BackupService.takeMeasurements` will call the measure
- * method of all registered BackupResource classes.
- */
-add_task(async function test_takeMeasurements() {
- let sandbox = sinon.createSandbox();
- sandbox.stub(FakeBackupResource1.prototype, "measure").resolves();
- sandbox
- .stub(FakeBackupResource2.prototype, "measure")
- .rejects(new Error("Some failure to measure"));
-
- let bs = new BackupService({ FakeBackupResource1, FakeBackupResource2 });
- await bs.takeMeasurements();
-
- for (let backupResourceClass of [FakeBackupResource1, FakeBackupResource2]) {
- Assert.ok(
- backupResourceClass.prototype.measure.calledOnce,
- "Measure was called"
- );
- Assert.ok(
- backupResourceClass.prototype.measure.calledWith(PathUtils.profileDir),
- "Measure was called with the profile directory argument"
- );
- }
-
- sandbox.restore();
-});
-
-/**
- * Tests that we can measure the disk space available in the profile directory.
- */
-add_task(async function test_profDDiskSpace() {
- let bs = new BackupService();
- await bs.takeMeasurements();
- let measurement = Glean.browserBackup.profDDiskSpace.testGetValue();
- TelemetryTestUtils.assertScalar(
- TelemetryTestUtils.getProcessScalars("parent", false, true),
- "browser.backup.prof_d_disk_space",
- measurement
- );
-
- Assert.greater(
- measurement,
- 0,
- "Should have collected a measurement for the profile directory storage " +
- "device"
- );
-});
-
-/**
- * Tests that we can measure credentials related files in the profile directory.
- */
-add_task(async function test_credentialsAndSecurityBackupResource() {
- Services.fog.testResetFOG();
-
- const EXPECTED_CREDENTIALS_KILOBYTES_SIZE = 413;
- const EXPECTED_SECURITY_KILOBYTES_SIZE = 231;
-
- // Create resource files in temporary directory
- const tempDir = await IOUtils.createUniqueDirectory(
- PathUtils.tempDir,
- "CredentialsAndSecurityBackupResource-measurement-test"
- );
-
- const mockFiles = [
- // Set up credentials files
- { path: "key4.db", sizeInKB: 300 },
- { path: "logins.json", sizeInKB: 1 },
- { path: "logins-backup.json", sizeInKB: 1 },
- { path: "autofill-profiles.json", sizeInKB: 1 },
- { path: "credentialstate.sqlite", sizeInKB: 100 },
- { path: "signedInUser.json", sizeInKB: 5 },
- // Set up security files
- { path: "cert9.db", sizeInKB: 230 },
- { path: "pkcs11.txt", sizeInKB: 1 },
- ];
-
- await createTestFiles(tempDir, mockFiles);
-
- let credentialsAndSecurityBackupResource =
- new CredentialsAndSecurityBackupResource();
- await credentialsAndSecurityBackupResource.measure(tempDir);
-
- let credentialsMeasurement =
- Glean.browserBackup.credentialsDataSize.testGetValue();
- let securityMeasurement = Glean.browserBackup.securityDataSize.testGetValue();
- let scalars = TelemetryTestUtils.getProcessScalars("parent", false, false);
-
- // Credentials measurements
- TelemetryTestUtils.assertScalar(
- scalars,
- "browser.backup.credentials_data_size",
- credentialsMeasurement,
- "Glean and telemetry measurements for credentials data should be equal"
- );
-
- Assert.equal(
- credentialsMeasurement,
- EXPECTED_CREDENTIALS_KILOBYTES_SIZE,
- "Should have collected the correct glean measurement for credentials files"
- );
-
- // Security measurements
- TelemetryTestUtils.assertScalar(
- scalars,
- "browser.backup.security_data_size",
- securityMeasurement,
- "Glean and telemetry measurements for security data should be equal"
- );
- Assert.equal(
- securityMeasurement,
- EXPECTED_SECURITY_KILOBYTES_SIZE,
- "Should have collected the correct glean measurement for security files"
- );
-
- // Cleanup
- await maybeRemovePath(tempDir);
-});
-
-/**
- * Tests that we can measure the Cookies db in a profile directory.
- */
-add_task(async function test_cookiesBackupResource() {
- const EXPECTED_COOKIES_DB_SIZE = 1230;
-
- Services.fog.testResetFOG();
-
- // Create resource files in temporary directory
- let tempDir = PathUtils.tempDir;
- let tempCookiesDBPath = PathUtils.join(tempDir, "cookies.sqlite");
- await createKilobyteSizedFile(tempCookiesDBPath, EXPECTED_COOKIES_DB_SIZE);
-
- let cookiesBackupResource = new CookiesBackupResource();
- await cookiesBackupResource.measure(tempDir);
-
- let cookiesMeasurement = Glean.browserBackup.cookiesSize.testGetValue();
- let scalars = TelemetryTestUtils.getProcessScalars("parent", false, false);
-
- // Compare glean vs telemetry measurements
- TelemetryTestUtils.assertScalar(
- scalars,
- "browser.backup.cookies_size",
- cookiesMeasurement,
- "Glean and telemetry measurements for cookies.sqlite should be equal"
- );
-
- // Compare glean measurements vs actual file sizes
- Assert.equal(
- cookiesMeasurement,
- EXPECTED_COOKIES_DB_SIZE,
- "Should have collected the correct glean measurement for cookies.sqlite"
- );
-
- await maybeRemovePath(tempCookiesDBPath);
-});
-
-/**
- * Tests that we can measure the Form History db in a profile directory.
- */
-add_task(async function test_formHistoryBackupResource() {
- const EXPECTED_FORM_HISTORY_DB_SIZE = 500;
-
- Services.fog.testResetFOG();
-
- // Create resource files in temporary directory
- let tempDir = PathUtils.tempDir;
- let tempFormHistoryDBPath = PathUtils.join(tempDir, "formhistory.sqlite");
- await createKilobyteSizedFile(
- tempFormHistoryDBPath,
- EXPECTED_FORM_HISTORY_DB_SIZE
- );
-
- let formHistoryBackupResource = new FormHistoryBackupResource();
- await formHistoryBackupResource.measure(tempDir);
-
- let formHistoryMeasurement =
- Glean.browserBackup.formHistorySize.testGetValue();
- let scalars = TelemetryTestUtils.getProcessScalars("parent", false, false);
-
- // Compare glean vs telemetry measurements
- TelemetryTestUtils.assertScalar(
- scalars,
- "browser.backup.form_history_size",
- formHistoryMeasurement,
- "Glean and telemetry measurements for formhistory.sqlite should be equal"
- );
-
- // Compare glean measurements vs actual file sizes
- Assert.equal(
- formHistoryMeasurement,
- EXPECTED_FORM_HISTORY_DB_SIZE,
- "Should have collected the correct glean measurement for formhistory.sqlite"
- );
-
- await IOUtils.remove(tempFormHistoryDBPath);
-});
-
-/**
- * Tests that we can measure the Session Store JSON and backups directory.
- */
-add_task(async function test_sessionStoreBackupResource() {
- const EXPECTED_KILOBYTES_FOR_BACKUPS_DIR = 1000;
- Services.fog.testResetFOG();
-
- // Create the sessionstore-backups directory.
- let tempDir = PathUtils.tempDir;
- let sessionStoreBackupsPath = PathUtils.join(
- tempDir,
- "sessionstore-backups",
- "restore.jsonlz4"
- );
- await createKilobyteSizedFile(
- sessionStoreBackupsPath,
- EXPECTED_KILOBYTES_FOR_BACKUPS_DIR
- );
-
- let sessionStoreBackupResource = new SessionStoreBackupResource();
- await sessionStoreBackupResource.measure(tempDir);
-
- let sessionStoreBackupsDirectoryMeasurement =
- Glean.browserBackup.sessionStoreBackupsDirectorySize.testGetValue();
- let sessionStoreMeasurement =
- Glean.browserBackup.sessionStoreSize.testGetValue();
- let scalars = TelemetryTestUtils.getProcessScalars("parent", false, false);
-
- // Compare glean vs telemetry measurements
- TelemetryTestUtils.assertScalar(
- scalars,
- "browser.backup.session_store_backups_directory_size",
- sessionStoreBackupsDirectoryMeasurement,
- "Glean and telemetry measurements for session store backups directory should be equal"
- );
- TelemetryTestUtils.assertScalar(
- scalars,
- "browser.backup.session_store_size",
- sessionStoreMeasurement,
- "Glean and telemetry measurements for session store should be equal"
- );
-
- // Compare glean measurements vs actual file sizes
- Assert.equal(
- sessionStoreBackupsDirectoryMeasurement,
- EXPECTED_KILOBYTES_FOR_BACKUPS_DIR,
- "Should have collected the correct glean measurement for the sessionstore-backups directory"
- );
-
- // Session store measurement is from `getCurrentState`, so exact size is unknown.
- Assert.greater(
- sessionStoreMeasurement,
- 0,
- "Should have collected a measurement for the session store"
- );
-
- await IOUtils.remove(sessionStoreBackupsPath);
-});
-
-/**
- * Tests that we can measure the size of all the addons & extensions data.
- */
-add_task(async function test_AddonsBackupResource() {
- Services.fog.testResetFOG();
- Services.telemetry.clearScalars();
-
- const EXPECTED_KILOBYTES_FOR_EXTENSIONS_JSON = 250;
- const EXPECTED_KILOBYTES_FOR_EXTENSIONS_STORE = 500;
- const EXPECTED_KILOBYTES_FOR_STORAGE_SYNC = 50;
- const EXPECTED_KILOBYTES_FOR_EXTENSIONS_XPI_A = 600;
- const EXPECTED_KILOBYTES_FOR_EXTENSIONS_XPI_B = 400;
- const EXPECTED_KILOBYTES_FOR_EXTENSIONS_XPI_C = 150;
- const EXPECTED_KILOBYTES_FOR_EXTENSIONS_DIRECTORY = 1000;
- const EXPECTED_KILOBYTES_FOR_EXTENSION_DATA = 100;
- const EXPECTED_KILOBYTES_FOR_EXTENSIONS_STORAGE = 200;
-
- let tempDir = PathUtils.tempDir;
-
- // Create extensions json files (all the same size).
- const extensionsFilePath = PathUtils.join(tempDir, "extensions.json");
- await createKilobyteSizedFile(
- extensionsFilePath,
- EXPECTED_KILOBYTES_FOR_EXTENSIONS_JSON
- );
- const extensionSettingsFilePath = PathUtils.join(
- tempDir,
- "extension-settings.json"
- );
- await createKilobyteSizedFile(
- extensionSettingsFilePath,
- EXPECTED_KILOBYTES_FOR_EXTENSIONS_JSON
- );
- const extensionsPrefsFilePath = PathUtils.join(
- tempDir,
- "extension-preferences.json"
- );
- await createKilobyteSizedFile(
- extensionsPrefsFilePath,
- EXPECTED_KILOBYTES_FOR_EXTENSIONS_JSON
- );
- const addonStartupFilePath = PathUtils.join(tempDir, "addonStartup.json.lz4");
- await createKilobyteSizedFile(
- addonStartupFilePath,
- EXPECTED_KILOBYTES_FOR_EXTENSIONS_JSON
- );
-
- // Create the extension store permissions data file.
- let extensionStorePermissionsDataSize = PathUtils.join(
- tempDir,
- "extension-store-permissions",
- "data.safe.bin"
- );
- await createKilobyteSizedFile(
- extensionStorePermissionsDataSize,
- EXPECTED_KILOBYTES_FOR_EXTENSIONS_STORE
- );
-
- // Create the storage sync database file.
- let storageSyncPath = PathUtils.join(tempDir, "storage-sync-v2.sqlite");
- await createKilobyteSizedFile(
- storageSyncPath,
- EXPECTED_KILOBYTES_FOR_STORAGE_SYNC
- );
-
- // Create the extensions directory with XPI files.
- let extensionsXpiAPath = PathUtils.join(
- tempDir,
- "extensions",
- "extension-b.xpi"
- );
- let extensionsXpiBPath = PathUtils.join(
- tempDir,
- "extensions",
- "extension-a.xpi"
- );
- await createKilobyteSizedFile(
- extensionsXpiAPath,
- EXPECTED_KILOBYTES_FOR_EXTENSIONS_XPI_A
- );
- await createKilobyteSizedFile(
- extensionsXpiBPath,
- EXPECTED_KILOBYTES_FOR_EXTENSIONS_XPI_B
- );
- // Should be ignored.
- let extensionsXpiStagedPath = PathUtils.join(
- tempDir,
- "extensions",
- "staged",
- "staged-test-extension.xpi"
- );
- let extensionsXpiTrashPath = PathUtils.join(
- tempDir,
- "extensions",
- "trash",
- "trashed-test-extension.xpi"
- );
- let extensionsXpiUnpackedPath = PathUtils.join(
- tempDir,
- "extensions",
- "unpacked-extension.xpi",
- "manifest.json"
- );
- await createKilobyteSizedFile(
- extensionsXpiStagedPath,
- EXPECTED_KILOBYTES_FOR_EXTENSIONS_XPI_C
- );
- await createKilobyteSizedFile(
- extensionsXpiTrashPath,
- EXPECTED_KILOBYTES_FOR_EXTENSIONS_XPI_C
- );
- await createKilobyteSizedFile(
- extensionsXpiUnpackedPath,
- EXPECTED_KILOBYTES_FOR_EXTENSIONS_XPI_C
- );
-
- // Create the browser extension data directory.
- let browserExtensionDataPath = PathUtils.join(
- tempDir,
- "browser-extension-data",
- "test-file"
- );
- await createKilobyteSizedFile(
- browserExtensionDataPath,
- EXPECTED_KILOBYTES_FOR_EXTENSION_DATA
- );
-
- // Create the extensions storage directory.
- let extensionsStoragePath = PathUtils.join(
- tempDir,
- "storage",
- "default",
- "moz-extension+++test-extension-id",
- "idb",
- "data.sqlite"
- );
- // Other storage files that should not be counted.
- let otherStoragePath = PathUtils.join(
- tempDir,
- "storage",
- "default",
- "https+++accounts.firefox.com",
- "ls",
- "data.sqlite"
- );
-
- await createKilobyteSizedFile(
- extensionsStoragePath,
- EXPECTED_KILOBYTES_FOR_EXTENSIONS_STORAGE
- );
- await createKilobyteSizedFile(
- otherStoragePath,
- EXPECTED_KILOBYTES_FOR_EXTENSIONS_STORAGE
- );
-
- // Measure all the extensions data.
- let extensionsBackupResource = new AddonsBackupResource();
- await extensionsBackupResource.measure(tempDir);
-
- let extensionsJsonSizeMeasurement =
- Glean.browserBackup.extensionsJsonSize.testGetValue();
- Assert.equal(
- extensionsJsonSizeMeasurement,
- EXPECTED_KILOBYTES_FOR_EXTENSIONS_JSON * 4, // There are 4 equally sized files.
- "Should have collected the correct measurement of the total size of all extensions JSON files"
- );
-
- let extensionStorePermissionsDataSizeMeasurement =
- Glean.browserBackup.extensionStorePermissionsDataSize.testGetValue();
- Assert.equal(
- extensionStorePermissionsDataSizeMeasurement,
- EXPECTED_KILOBYTES_FOR_EXTENSIONS_STORE,
- "Should have collected the correct measurement of the size of the extension store permissions data"
- );
-
- let storageSyncSizeMeasurement =
- Glean.browserBackup.storageSyncSize.testGetValue();
- Assert.equal(
- storageSyncSizeMeasurement,
- EXPECTED_KILOBYTES_FOR_STORAGE_SYNC,
- "Should have collected the correct measurement of the size of the storage sync database"
- );
-
- let extensionsXpiDirectorySizeMeasurement =
- Glean.browserBackup.extensionsXpiDirectorySize.testGetValue();
- Assert.equal(
- extensionsXpiDirectorySizeMeasurement,
- EXPECTED_KILOBYTES_FOR_EXTENSIONS_DIRECTORY,
- "Should have collected the correct measurement of the size 2 equally sized XPI files in the extensions directory"
- );
-
- let browserExtensionDataSizeMeasurement =
- Glean.browserBackup.browserExtensionDataSize.testGetValue();
- Assert.equal(
- browserExtensionDataSizeMeasurement,
- EXPECTED_KILOBYTES_FOR_EXTENSION_DATA,
- "Should have collected the correct measurement of the size of the browser extension data directory"
- );
-
- let extensionsStorageSizeMeasurement =
- Glean.browserBackup.extensionsStorageSize.testGetValue();
- Assert.equal(
- extensionsStorageSizeMeasurement,
- EXPECTED_KILOBYTES_FOR_EXTENSIONS_STORAGE,
- "Should have collected the correct measurement of all the extensions storage"
- );
-
- // Compare glean vs telemetry measurements
- let scalars = TelemetryTestUtils.getProcessScalars("parent", false, false);
- TelemetryTestUtils.assertScalar(
- scalars,
- "browser.backup.extensions_json_size",
- extensionsJsonSizeMeasurement,
- "Glean and telemetry measurements for extensions JSON should be equal"
- );
- TelemetryTestUtils.assertScalar(
- scalars,
- "browser.backup.extension_store_permissions_data_size",
- extensionStorePermissionsDataSizeMeasurement,
- "Glean and telemetry measurements for extension store permissions data should be equal"
- );
- TelemetryTestUtils.assertScalar(
- scalars,
- "browser.backup.storage_sync_size",
- storageSyncSizeMeasurement,
- "Glean and telemetry measurements for storage sync database should be equal"
- );
- TelemetryTestUtils.assertScalar(
- scalars,
- "browser.backup.extensions_xpi_directory_size",
- extensionsXpiDirectorySizeMeasurement,
- "Glean and telemetry measurements for extensions directory should be equal"
- );
- TelemetryTestUtils.assertScalar(
- scalars,
- "browser.backup.browser_extension_data_size",
- browserExtensionDataSizeMeasurement,
- "Glean and telemetry measurements for browser extension data should be equal"
- );
- TelemetryTestUtils.assertScalar(
- scalars,
- "browser.backup.extensions_storage_size",
- extensionsStorageSizeMeasurement,
- "Glean and telemetry measurements for extensions storage should be equal"
- );
-
- await maybeRemovePath(tempDir);
-});
-
-/**
- * Tests that we can handle the extension store permissions data not existing.
- */
-add_task(
- async function test_AddonsBackupResource_no_extension_store_permissions_data() {
- Services.fog.testResetFOG();
-
- let tempDir = PathUtils.tempDir;
-
- let extensionsBackupResource = new AddonsBackupResource();
- await extensionsBackupResource.measure(tempDir);
-
- let extensionStorePermissionsDataSizeMeasurement =
- Glean.browserBackup.extensionStorePermissionsDataSize.testGetValue();
- Assert.equal(
- extensionStorePermissionsDataSizeMeasurement,
- null,
- "Should NOT have collected a measurement for the missing data"
- );
- }
-);
-
-/**
- * Tests that we can handle a profile with no moz-extension IndexedDB databases.
- */
-add_task(
- async function test_AddonsBackupResource_no_extension_storage_databases() {
- Services.fog.testResetFOG();
-
- let tempDir = PathUtils.tempDir;
-
- let extensionsBackupResource = new AddonsBackupResource();
- await extensionsBackupResource.measure(tempDir);
-
- let extensionsStorageSizeMeasurement =
- Glean.browserBackup.extensionsStorageSize.testGetValue();
- Assert.equal(
- extensionsStorageSizeMeasurement,
- null,
- "Should NOT have collected a measurement for the missing data"
- );
- }
-);
diff --git a/browser/components/backup/tests/xpcshell/xpcshell.toml b/browser/components/backup/tests/xpcshell/xpcshell.toml
index 07e517f1f2..8a41c9e761 100644
--- a/browser/components/backup/tests/xpcshell/xpcshell.toml
+++ b/browser/components/backup/tests/xpcshell/xpcshell.toml
@@ -6,15 +6,25 @@ prefs = [
"browser.backup.log=true",
]
+["test_AddonsBackupResource.js"]
+
["test_BackupResource.js"]
support-files = ["data/test_xulstore.json"]
+["test_BackupService.js"]
+
+["test_BackupService_takeMeasurements.js"]
+
+["test_CookiesBackupResource.js"]
+
+["test_CredentialsAndSecurityBackupResource.js"]
+
+["test_FormHistoryBackupResource.js"]
+
["test_MiscDataBackupResource.js"]
["test_PlacesBackupResource.js"]
["test_PreferencesBackupResource.js"]
-["test_createBackup.js"]
-
-["test_measurements.js"]
+["test_SessionStoreBackupResource.js"]