diff options
author | Daniel Baumann <daniel.baumann@progress-linux.org> | 2024-06-12 05:35:29 +0000 |
---|---|---|
committer | Daniel Baumann <daniel.baumann@progress-linux.org> | 2024-06-12 05:35:29 +0000 |
commit | 59203c63bb777a3bacec32fb8830fba33540e809 (patch) | |
tree | 58298e711c0ff0575818c30485b44a2f21bf28a0 /browser/components/backup/tests/xpcshell | |
parent | Adding upstream version 126.0.1. (diff) | |
download | firefox-59203c63bb777a3bacec32fb8830fba33540e809.tar.xz firefox-59203c63bb777a3bacec32fb8830fba33540e809.zip |
Adding upstream version 127.0.upstream/127.0
Signed-off-by: Daniel Baumann <daniel.baumann@progress-linux.org>
Diffstat (limited to 'browser/components/backup/tests/xpcshell')
16 files changed, 2239 insertions, 668 deletions
diff --git a/browser/components/backup/tests/xpcshell/data/test_xulstore.json b/browser/components/backup/tests/xpcshell/data/test_xulstore.json index 0d0890ab16..e4ae6f1f66 100644 --- a/browser/components/backup/tests/xpcshell/data/test_xulstore.json +++ b/browser/components/backup/tests/xpcshell/data/test_xulstore.json @@ -9,7 +9,6 @@ "sizemode": "normal" }, "sidebar-box": { - "sidebarcommand": "viewBookmarksSidebar", "width": "323", "style": "width: 323px;" }, diff --git a/browser/components/backup/tests/xpcshell/head.js b/browser/components/backup/tests/xpcshell/head.js index 2402870a13..e5ed32fb63 100644 --- a/browser/components/backup/tests/xpcshell/head.js +++ b/browser/components/backup/tests/xpcshell/head.js @@ -50,6 +50,9 @@ class FakeBackupResource2 extends BackupResource { static get requiresEncryption() { return true; } + static get priority() { + return 1; + } } /** @@ -62,6 +65,9 @@ class FakeBackupResource3 extends BackupResource { static get requiresEncryption() { return false; } + static get priority() { + return 2; + } } /** diff --git a/browser/components/backup/tests/xpcshell/test_AddonsBackupResource.js b/browser/components/backup/tests/xpcshell/test_AddonsBackupResource.js new file mode 100644 index 0000000000..d1c47ecdb0 --- /dev/null +++ b/browser/components/backup/tests/xpcshell/test_AddonsBackupResource.js @@ -0,0 +1,416 @@ +/* Any copyright is dedicated to the Public Domain. +https://creativecommons.org/publicdomain/zero/1.0/ */ + +"use strict"; + +const { AddonsBackupResource } = ChromeUtils.importESModule( + "resource:///modules/backup/AddonsBackupResource.sys.mjs" +); + +/** + * Tests that we can measure the size of all the addons & extensions data. + */ +add_task(async function test_measure() { + Services.fog.testResetFOG(); + Services.telemetry.clearScalars(); + + const EXPECTED_KILOBYTES_FOR_EXTENSIONS_JSON = 250; + const EXPECTED_KILOBYTES_FOR_EXTENSIONS_STORE = 500; + const EXPECTED_KILOBYTES_FOR_STORAGE_SYNC = 50; + const EXPECTED_KILOBYTES_FOR_EXTENSIONS_XPI_A = 600; + const EXPECTED_KILOBYTES_FOR_EXTENSIONS_XPI_B = 400; + const EXPECTED_KILOBYTES_FOR_EXTENSIONS_XPI_C = 150; + const EXPECTED_KILOBYTES_FOR_EXTENSIONS_DIRECTORY = 1000; + const EXPECTED_KILOBYTES_FOR_EXTENSION_DATA = 100; + const EXPECTED_KILOBYTES_FOR_EXTENSIONS_STORAGE = 200; + + let tempDir = PathUtils.tempDir; + + // Create extensions json files (all the same size). + const extensionsFilePath = PathUtils.join(tempDir, "extensions.json"); + await createKilobyteSizedFile( + extensionsFilePath, + EXPECTED_KILOBYTES_FOR_EXTENSIONS_JSON + ); + const extensionSettingsFilePath = PathUtils.join( + tempDir, + "extension-settings.json" + ); + await createKilobyteSizedFile( + extensionSettingsFilePath, + EXPECTED_KILOBYTES_FOR_EXTENSIONS_JSON + ); + const extensionsPrefsFilePath = PathUtils.join( + tempDir, + "extension-preferences.json" + ); + await createKilobyteSizedFile( + extensionsPrefsFilePath, + EXPECTED_KILOBYTES_FOR_EXTENSIONS_JSON + ); + const addonStartupFilePath = PathUtils.join(tempDir, "addonStartup.json.lz4"); + await createKilobyteSizedFile( + addonStartupFilePath, + EXPECTED_KILOBYTES_FOR_EXTENSIONS_JSON + ); + + // Create the extension store permissions data file. + let extensionStorePermissionsDataSize = PathUtils.join( + tempDir, + "extension-store-permissions", + "data.safe.bin" + ); + await createKilobyteSizedFile( + extensionStorePermissionsDataSize, + EXPECTED_KILOBYTES_FOR_EXTENSIONS_STORE + ); + + // Create the storage sync database file. + let storageSyncPath = PathUtils.join(tempDir, "storage-sync-v2.sqlite"); + await createKilobyteSizedFile( + storageSyncPath, + EXPECTED_KILOBYTES_FOR_STORAGE_SYNC + ); + + // Create the extensions directory with XPI files. + let extensionsXPIAPath = PathUtils.join( + tempDir, + "extensions", + "extension-b.xpi" + ); + let extensionsXPIBPath = PathUtils.join( + tempDir, + "extensions", + "extension-a.xpi" + ); + await createKilobyteSizedFile( + extensionsXPIAPath, + EXPECTED_KILOBYTES_FOR_EXTENSIONS_XPI_A + ); + await createKilobyteSizedFile( + extensionsXPIBPath, + EXPECTED_KILOBYTES_FOR_EXTENSIONS_XPI_B + ); + // Should be ignored. + let extensionsXPIStagedPath = PathUtils.join( + tempDir, + "extensions", + "staged", + "staged-test-extension.xpi" + ); + let extensionsXPITrashPath = PathUtils.join( + tempDir, + "extensions", + "trash", + "trashed-test-extension.xpi" + ); + let extensionsXPIUnpackedPath = PathUtils.join( + tempDir, + "extensions", + "unpacked-extension.xpi", + "manifest.json" + ); + await createKilobyteSizedFile( + extensionsXPIStagedPath, + EXPECTED_KILOBYTES_FOR_EXTENSIONS_XPI_C + ); + await createKilobyteSizedFile( + extensionsXPITrashPath, + EXPECTED_KILOBYTES_FOR_EXTENSIONS_XPI_C + ); + await createKilobyteSizedFile( + extensionsXPIUnpackedPath, + EXPECTED_KILOBYTES_FOR_EXTENSIONS_XPI_C + ); + + // Create the browser extension data directory. + let browserExtensionDataPath = PathUtils.join( + tempDir, + "browser-extension-data", + "test-file" + ); + await createKilobyteSizedFile( + browserExtensionDataPath, + EXPECTED_KILOBYTES_FOR_EXTENSION_DATA + ); + + // Create the extensions storage directory. + let extensionsStoragePath = PathUtils.join( + tempDir, + "storage", + "default", + "moz-extension+++test-extension-id", + "idb", + "data.sqlite" + ); + // Other storage files that should not be counted. + let otherStoragePath = PathUtils.join( + tempDir, + "storage", + "default", + "https+++accounts.firefox.com", + "ls", + "data.sqlite" + ); + + await createKilobyteSizedFile( + extensionsStoragePath, + EXPECTED_KILOBYTES_FOR_EXTENSIONS_STORAGE + ); + await createKilobyteSizedFile( + otherStoragePath, + EXPECTED_KILOBYTES_FOR_EXTENSIONS_STORAGE + ); + + // Measure all the extensions data. + let extensionsBackupResource = new AddonsBackupResource(); + await extensionsBackupResource.measure(tempDir); + + let extensionsJsonSizeMeasurement = + Glean.browserBackup.extensionsJsonSize.testGetValue(); + Assert.equal( + extensionsJsonSizeMeasurement, + EXPECTED_KILOBYTES_FOR_EXTENSIONS_JSON * 4, // There are 4 equally sized files. + "Should have collected the correct measurement of the total size of all extensions JSON files" + ); + + let extensionStorePermissionsDataSizeMeasurement = + Glean.browserBackup.extensionStorePermissionsDataSize.testGetValue(); + Assert.equal( + extensionStorePermissionsDataSizeMeasurement, + EXPECTED_KILOBYTES_FOR_EXTENSIONS_STORE, + "Should have collected the correct measurement of the size of the extension store permissions data" + ); + + let storageSyncSizeMeasurement = + Glean.browserBackup.storageSyncSize.testGetValue(); + Assert.equal( + storageSyncSizeMeasurement, + EXPECTED_KILOBYTES_FOR_STORAGE_SYNC, + "Should have collected the correct measurement of the size of the storage sync database" + ); + + let extensionsXPIDirectorySizeMeasurement = + Glean.browserBackup.extensionsXpiDirectorySize.testGetValue(); + Assert.equal( + extensionsXPIDirectorySizeMeasurement, + EXPECTED_KILOBYTES_FOR_EXTENSIONS_DIRECTORY, + "Should have collected the correct measurement of the size 2 equally sized XPI files in the extensions directory" + ); + + let browserExtensionDataSizeMeasurement = + Glean.browserBackup.browserExtensionDataSize.testGetValue(); + Assert.equal( + browserExtensionDataSizeMeasurement, + EXPECTED_KILOBYTES_FOR_EXTENSION_DATA, + "Should have collected the correct measurement of the size of the browser extension data directory" + ); + + let extensionsStorageSizeMeasurement = + Glean.browserBackup.extensionsStorageSize.testGetValue(); + Assert.equal( + extensionsStorageSizeMeasurement, + EXPECTED_KILOBYTES_FOR_EXTENSIONS_STORAGE, + "Should have collected the correct measurement of all the extensions storage" + ); + + // Compare glean vs telemetry measurements + let scalars = TelemetryTestUtils.getProcessScalars("parent", false, false); + TelemetryTestUtils.assertScalar( + scalars, + "browser.backup.extensions_json_size", + extensionsJsonSizeMeasurement, + "Glean and telemetry measurements for extensions JSON should be equal" + ); + TelemetryTestUtils.assertScalar( + scalars, + "browser.backup.extension_store_permissions_data_size", + extensionStorePermissionsDataSizeMeasurement, + "Glean and telemetry measurements for extension store permissions data should be equal" + ); + TelemetryTestUtils.assertScalar( + scalars, + "browser.backup.storage_sync_size", + storageSyncSizeMeasurement, + "Glean and telemetry measurements for storage sync database should be equal" + ); + TelemetryTestUtils.assertScalar( + scalars, + "browser.backup.extensions_xpi_directory_size", + extensionsXPIDirectorySizeMeasurement, + "Glean and telemetry measurements for extensions directory should be equal" + ); + TelemetryTestUtils.assertScalar( + scalars, + "browser.backup.browser_extension_data_size", + browserExtensionDataSizeMeasurement, + "Glean and telemetry measurements for browser extension data should be equal" + ); + TelemetryTestUtils.assertScalar( + scalars, + "browser.backup.extensions_storage_size", + extensionsStorageSizeMeasurement, + "Glean and telemetry measurements for extensions storage should be equal" + ); + + await maybeRemovePath(tempDir); +}); + +/** + * Tests that we can handle the extension store permissions data + * and moz-extension IndexedDB databases not existing. + */ +add_task(async function test_measure_missing_data() { + Services.fog.testResetFOG(); + + let tempDir = PathUtils.tempDir; + + let extensionsBackupResource = new AddonsBackupResource(); + await extensionsBackupResource.measure(tempDir); + + let extensionStorePermissionsDataSizeMeasurement = + Glean.browserBackup.extensionStorePermissionsDataSize.testGetValue(); + Assert.equal( + extensionStorePermissionsDataSizeMeasurement, + null, + "Should NOT have collected a measurement for the missing permissions data" + ); + + let extensionsStorageSizeMeasurement = + Glean.browserBackup.extensionsStorageSize.testGetValue(); + Assert.equal( + extensionsStorageSizeMeasurement, + null, + "Should NOT have collected a measurement for the missing storage data" + ); +}); + +/** + * Test that the backup method correctly copies items from the profile directory + * into the staging directory. + */ +add_task(async function test_backup() { + let sandbox = sinon.createSandbox(); + + let addonsBackupResource = new AddonsBackupResource(); + let sourcePath = await IOUtils.createUniqueDirectory( + PathUtils.tempDir, + "AddonsBackupResource-source-test" + ); + let stagingPath = await IOUtils.createUniqueDirectory( + PathUtils.tempDir, + "AddonsBackupResource-staging-test" + ); + + const simpleCopyFiles = [ + { path: "extensions.json" }, + { path: "extension-settings.json" }, + { path: "extension-preferences.json" }, + { path: "addonStartup.json.lz4" }, + { + path: [ + "browser-extension-data", + "{11aa1234-f111-1234-abcd-a9b8c7654d32}", + ], + }, + { path: ["extension-store-permissions", "data.safe.bin"] }, + { path: ["extensions", "{11aa1234-f111-1234-abcd-a9b8c7654d32}.xpi"] }, + ]; + await createTestFiles(sourcePath, simpleCopyFiles); + + const junkFiles = [{ path: ["extensions", "junk"] }]; + await createTestFiles(sourcePath, junkFiles); + + // Create a fake storage-sync-v2 database file. We don't expect this to + // be copied to the staging directory in this test due to our stubbing + // of the backup method, so we don't include it in `simpleCopyFiles`. + await createTestFiles(sourcePath, [{ path: "storage-sync-v2.sqlite" }]); + + let fakeConnection = { + backup: sandbox.stub().resolves(true), + close: sandbox.stub().resolves(true), + }; + sandbox.stub(Sqlite, "openConnection").returns(fakeConnection); + + let manifestEntry = await addonsBackupResource.backup( + stagingPath, + sourcePath + ); + Assert.equal( + manifestEntry, + null, + "AddonsBackupResource.backup should return null as its ManifestEntry" + ); + + await assertFilesExist(stagingPath, simpleCopyFiles); + + let junkFile = PathUtils.join(stagingPath, "extensions", "junk"); + Assert.equal( + await IOUtils.exists(junkFile), + false, + `${junkFile} should not exist in the staging folder` + ); + + // Make sure storage-sync-v2 database is backed up. + Assert.ok( + fakeConnection.backup.calledOnce, + "Called backup the expected number of times for all connections" + ); + Assert.ok( + fakeConnection.backup.calledWith( + PathUtils.join(stagingPath, "storage-sync-v2.sqlite") + ), + "Called backup on the storage-sync-v2 Sqlite connection" + ); + + await maybeRemovePath(stagingPath); + await maybeRemovePath(sourcePath); + + sandbox.restore(); +}); + +/** + * Test that the recover method correctly copies items from the recovery + * directory into the destination profile directory. + */ +add_task(async function test_recover() { + let addonsBackupResource = new AddonsBackupResource(); + let recoveryPath = await IOUtils.createUniqueDirectory( + PathUtils.tempDir, + "addonsBackupResource-recovery-test" + ); + let destProfilePath = await IOUtils.createUniqueDirectory( + PathUtils.tempDir, + "addonsBackupResource-test-profile" + ); + + const files = [ + { path: "extensions.json" }, + { path: "extension-settings.json" }, + { path: "extension-preferences.json" }, + { path: "addonStartup.json.lz4" }, + { path: "storage-sync-v2.sqlite" }, + { path: ["browser-extension-data", "addon@darkreader.org.xpi", "data"] }, + { path: ["extensions", "addon@darkreader.org.xpi"] }, + { path: ["extension-store-permissions", "data.safe.bin"] }, + ]; + await createTestFiles(recoveryPath, files); + + // The backup method is expected to have returned a null ManifestEntry + let postRecoveryEntry = await addonsBackupResource.recover( + null /* manifestEntry */, + recoveryPath, + destProfilePath + ); + Assert.equal( + postRecoveryEntry, + null, + "AddonsBackupResource.recover should return null as its post " + + "recovery entry" + ); + + await assertFilesExist(destProfilePath, files); + + await maybeRemovePath(recoveryPath); + await maybeRemovePath(destProfilePath); +}); diff --git a/browser/components/backup/tests/xpcshell/test_BackupResource.js b/browser/components/backup/tests/xpcshell/test_BackupResource.js index 6623f4cd77..42cda918f9 100644 --- a/browser/components/backup/tests/xpcshell/test_BackupResource.js +++ b/browser/components/backup/tests/xpcshell/test_BackupResource.js @@ -31,7 +31,8 @@ add_task(async function test_getFileSize() { }); /** - * Tests that BackupService.getDirectorySize will get the total size of all the files in a directory and it's children in kilobytes. + * Tests that BackupService.getDirectorySize will get the total size of all the + * files in a directory and it's children in kilobytes. */ add_task(async function test_getDirectorySize() { let file = do_get_file("data/test_xulstore.json"); @@ -75,3 +76,175 @@ add_task(async function test_bytesToFuzzyKilobytes() { Assert.equal(smallSize, 1, "Sizes under 10 kilobytes return 1 kilobyte"); }); + +/** + * Tests that BackupResource.copySqliteDatabases will call `backup` on a new + * read-only connection on each database file. + */ +add_task(async function test_copySqliteDatabases() { + let sandbox = sinon.createSandbox(); + const SQLITE_PAGES_PER_STEP_PREF = "browser.backup.sqlite.pages_per_step"; + const SQLITE_STEP_DELAY_MS_PREF = "browser.backup.sqlite.step_delay_ms"; + const DEFAULT_SQLITE_PAGES_PER_STEP = Services.prefs.getIntPref( + SQLITE_PAGES_PER_STEP_PREF + ); + const DEFAULT_SQLITE_STEP_DELAY_MS = Services.prefs.getIntPref( + SQLITE_STEP_DELAY_MS_PREF + ); + + let sourcePath = await IOUtils.createUniqueDirectory( + PathUtils.tempDir, + "BackupResource-source-test" + ); + let destPath = await IOUtils.createUniqueDirectory( + PathUtils.tempDir, + "BackupResource-dest-test" + ); + let pretendDatabases = ["places.sqlite", "favicons.sqlite"]; + await createTestFiles( + sourcePath, + pretendDatabases.map(f => ({ path: f })) + ); + + let fakeConnection = { + backup: sandbox.stub().resolves(true), + close: sandbox.stub().resolves(true), + }; + sandbox.stub(Sqlite, "openConnection").returns(fakeConnection); + + await BackupResource.copySqliteDatabases( + sourcePath, + destPath, + pretendDatabases + ); + + Assert.ok( + Sqlite.openConnection.calledTwice, + "Sqlite.openConnection called twice" + ); + Assert.ok( + Sqlite.openConnection.firstCall.calledWith({ + path: PathUtils.join(sourcePath, "places.sqlite"), + readOnly: true, + }), + "openConnection called with places.sqlite as read-only" + ); + Assert.ok( + Sqlite.openConnection.secondCall.calledWith({ + path: PathUtils.join(sourcePath, "favicons.sqlite"), + readOnly: true, + }), + "openConnection called with favicons.sqlite as read-only" + ); + + Assert.ok( + fakeConnection.backup.calledTwice, + "backup on an Sqlite connection called twice" + ); + Assert.ok( + fakeConnection.backup.firstCall.calledWith( + PathUtils.join(destPath, "places.sqlite"), + DEFAULT_SQLITE_PAGES_PER_STEP, + DEFAULT_SQLITE_STEP_DELAY_MS + ), + "backup called with places.sqlite to the destination path with the right " + + "pages per step and step delay" + ); + Assert.ok( + fakeConnection.backup.secondCall.calledWith( + PathUtils.join(destPath, "favicons.sqlite"), + DEFAULT_SQLITE_PAGES_PER_STEP, + DEFAULT_SQLITE_STEP_DELAY_MS + ), + "backup called with favicons.sqlite to the destination path with the " + + "right pages per step and step delay" + ); + + Assert.ok( + fakeConnection.close.calledTwice, + "close on an Sqlite connection called twice" + ); + + // Now check that we can override the default pages per step and step delay. + fakeConnection.backup.resetHistory(); + const NEW_SQLITE_PAGES_PER_STEP = 10; + const NEW_SQLITE_STEP_DELAY_MS = 500; + Services.prefs.setIntPref( + SQLITE_PAGES_PER_STEP_PREF, + NEW_SQLITE_PAGES_PER_STEP + ); + Services.prefs.setIntPref( + SQLITE_STEP_DELAY_MS_PREF, + NEW_SQLITE_STEP_DELAY_MS + ); + await BackupResource.copySqliteDatabases( + sourcePath, + destPath, + pretendDatabases + ); + Assert.ok( + fakeConnection.backup.calledTwice, + "backup on an Sqlite connection called twice" + ); + Assert.ok( + fakeConnection.backup.firstCall.calledWith( + PathUtils.join(destPath, "places.sqlite"), + NEW_SQLITE_PAGES_PER_STEP, + NEW_SQLITE_STEP_DELAY_MS + ), + "backup called with places.sqlite to the destination path with the right " + + "pages per step and step delay" + ); + Assert.ok( + fakeConnection.backup.secondCall.calledWith( + PathUtils.join(destPath, "favicons.sqlite"), + NEW_SQLITE_PAGES_PER_STEP, + NEW_SQLITE_STEP_DELAY_MS + ), + "backup called with favicons.sqlite to the destination path with the " + + "right pages per step and step delay" + ); + + await maybeRemovePath(sourcePath); + await maybeRemovePath(destPath); + sandbox.restore(); +}); + +/** + * Tests that BackupResource.copyFiles will copy files from one directory to + * another. + */ +add_task(async function test_copyFiles() { + let sourcePath = await IOUtils.createUniqueDirectory( + PathUtils.tempDir, + "BackupResource-source-test" + ); + let destPath = await IOUtils.createUniqueDirectory( + PathUtils.tempDir, + "BackupResource-dest-test" + ); + + const testFiles = [ + { path: "file1.txt" }, + { path: ["some", "nested", "file", "file2.txt"] }, + { path: "file3.txt" }, + ]; + + await createTestFiles(sourcePath, testFiles); + + await BackupResource.copyFiles(sourcePath, destPath, [ + "file1.txt", + "some", + "file3.txt", + "does-not-exist.txt", + ]); + + await assertFilesExist(destPath, testFiles); + Assert.ok( + !(await IOUtils.exists(PathUtils.join(destPath, "does-not-exist.txt"))), + "does-not-exist.txt wasn't somehow written to." + ); + + await maybeRemovePath(sourcePath); + await maybeRemovePath(destPath); +}); diff --git a/browser/components/backup/tests/xpcshell/test_BackupService.js b/browser/components/backup/tests/xpcshell/test_BackupService.js new file mode 100644 index 0000000000..33fb9fbb99 --- /dev/null +++ b/browser/components/backup/tests/xpcshell/test_BackupService.js @@ -0,0 +1,451 @@ +/* Any copyright is dedicated to the Public Domain. +https://creativecommons.org/publicdomain/zero/1.0/ */ + +"use strict"; + +const { AppConstants } = ChromeUtils.importESModule( + "resource://gre/modules/AppConstants.sys.mjs" +); +const { JsonSchemaValidator } = ChromeUtils.importESModule( + "resource://gre/modules/components-utils/JsonSchemaValidator.sys.mjs" +); +const { UIState } = ChromeUtils.importESModule( + "resource://services-sync/UIState.sys.mjs" +); +const { ClientID } = ChromeUtils.importESModule( + "resource://gre/modules/ClientID.sys.mjs" +); + +add_setup(function () { + // Much of this setup is copied from toolkit/profile/xpcshell/head.js. It is + // needed in order to put the xpcshell test environment into the state where + // it thinks its profile is the one pointed at by + // nsIToolkitProfileService.currentProfile. + let gProfD = do_get_profile(); + let gDataHome = gProfD.clone(); + gDataHome.append("data"); + gDataHome.createUnique(Ci.nsIFile.DIRECTORY_TYPE, 0o755); + let gDataHomeLocal = gProfD.clone(); + gDataHomeLocal.append("local"); + gDataHomeLocal.createUnique(Ci.nsIFile.DIRECTORY_TYPE, 0o755); + + let xreDirProvider = Cc["@mozilla.org/xre/directory-provider;1"].getService( + Ci.nsIXREDirProvider + ); + xreDirProvider.setUserDataDirectory(gDataHome, false); + xreDirProvider.setUserDataDirectory(gDataHomeLocal, true); + + let profileSvc = Cc["@mozilla.org/toolkit/profile-service;1"].getService( + Ci.nsIToolkitProfileService + ); + + let createdProfile = {}; + let didCreate = profileSvc.selectStartupProfile( + ["xpcshell"], + false, + AppConstants.UPDATE_CHANNEL, + "", + {}, + {}, + createdProfile + ); + Assert.ok(didCreate, "Created a testing profile and set it to current."); + Assert.equal( + profileSvc.currentProfile, + createdProfile.value, + "Profile set to current" + ); +}); + +/** + * A utility function for testing BackupService.createBackup. This helper + * function: + * + * 1. Ensures that `backup` will be called on BackupResources with the service + * 2. Ensures that a backup-manifest.json will be written and contain the + * ManifestEntry data returned by each BackupResource. + * 3. Ensures that a `staging` folder will be written to and renamed properly + * once the backup creation is complete. + * + * Once this is done, a task function can be run. The task function is passed + * the parsed backup-manifest.json object as its only argument. + * + * @param {object} sandbox + * The Sinon sandbox to be used stubs and mocks. The test using this helper + * is responsible for creating and resetting this sandbox. + * @param {Function} taskFn + * A function that is run once all default checks are done on the manifest + * and staging folder. After this function returns, the staging folder will + * be cleaned up. + * @returns {Promise<undefined>} + */ +async function testCreateBackupHelper(sandbox, taskFn) { + const EXPECTED_CLIENT_ID = await ClientID.getClientID(); + + let fake1ManifestEntry = { fake1: "hello from 1" }; + sandbox + .stub(FakeBackupResource1.prototype, "backup") + .resolves(fake1ManifestEntry); + + sandbox + .stub(FakeBackupResource2.prototype, "backup") + .rejects(new Error("Some failure to backup")); + + let fake3ManifestEntry = { fake3: "hello from 3" }; + sandbox + .stub(FakeBackupResource3.prototype, "backup") + .resolves(fake3ManifestEntry); + + let bs = new BackupService({ + FakeBackupResource1, + FakeBackupResource2, + FakeBackupResource3, + }); + + let fakeProfilePath = await IOUtils.createUniqueDirectory( + PathUtils.tempDir, + "createBackupTest" + ); + + await bs.createBackup({ profilePath: fakeProfilePath }); + + // We expect the staging folder to exist then be renamed under the fakeProfilePath. + // We should also find a folder for each fake BackupResource. + let backupsFolderPath = PathUtils.join(fakeProfilePath, "backups"); + let stagingPath = PathUtils.join(backupsFolderPath, "staging"); + + // For now, we expect a single backup only to be saved. + let backups = await IOUtils.getChildren(backupsFolderPath); + Assert.equal( + backups.length, + 1, + "There should only be 1 backup in the backups folder" + ); + + let renamedFilename = await PathUtils.filename(backups[0]); + let expectedFormatRegex = /^\d{4}(-\d{2}){2}T(\d{2}-){2}\d{2}Z$/; + Assert.ok( + renamedFilename.match(expectedFormatRegex), + "Renamed staging folder should have format YYYY-MM-DDTHH-mm-ssZ" + ); + + let stagingPathRenamed = PathUtils.join(backupsFolderPath, renamedFilename); + + for (let backupResourceClass of [ + FakeBackupResource1, + FakeBackupResource2, + FakeBackupResource3, + ]) { + let expectedResourceFolderBeforeRename = PathUtils.join( + stagingPath, + backupResourceClass.key + ); + let expectedResourceFolderAfterRename = PathUtils.join( + stagingPathRenamed, + backupResourceClass.key + ); + + Assert.ok( + await IOUtils.exists(expectedResourceFolderAfterRename), + `BackupResource folder exists for ${backupResourceClass.key} after rename` + ); + Assert.ok( + backupResourceClass.prototype.backup.calledOnce, + `Backup was called for ${backupResourceClass.key}` + ); + Assert.ok( + backupResourceClass.prototype.backup.calledWith( + expectedResourceFolderBeforeRename, + fakeProfilePath + ), + `Backup was called in the staging folder for ${backupResourceClass.key} before rename` + ); + } + + // Check that resources were called from highest to lowest backup priority. + sinon.assert.callOrder( + FakeBackupResource3.prototype.backup, + FakeBackupResource2.prototype.backup, + FakeBackupResource1.prototype.backup + ); + + let manifestPath = PathUtils.join( + stagingPathRenamed, + BackupService.MANIFEST_FILE_NAME + ); + + Assert.ok(await IOUtils.exists(manifestPath), "Manifest file exists"); + let manifest = await IOUtils.readJSON(manifestPath); + + let schema = await BackupService.MANIFEST_SCHEMA; + let validationResult = JsonSchemaValidator.validate(manifest, schema); + Assert.ok(validationResult.valid, "Schema matches manifest"); + Assert.deepEqual( + Object.keys(manifest.resources).sort(), + ["fake1", "fake3"], + "Manifest contains all expected BackupResource keys" + ); + Assert.deepEqual( + manifest.resources.fake1, + fake1ManifestEntry, + "Manifest contains the expected entry for FakeBackupResource1" + ); + Assert.deepEqual( + manifest.resources.fake3, + fake3ManifestEntry, + "Manifest contains the expected entry for FakeBackupResource3" + ); + Assert.equal( + manifest.meta.legacyClientID, + EXPECTED_CLIENT_ID, + "The client ID was stored properly." + ); + + taskFn(manifest); + + // After createBackup is more fleshed out, we're going to want to make sure + // that we're writing the manifest file and that it contains the expected + // ManifestEntry objects, and that the staging folder was successfully + // renamed with the current date. + await IOUtils.remove(fakeProfilePath, { recursive: true }); +} + +/** + * Tests that calling BackupService.createBackup will call backup on each + * registered BackupResource, and that each BackupResource will have a folder + * created for them to write into. Tests in the signed-out state. + */ +add_task(async function test_createBackup_signed_out() { + let sandbox = sinon.createSandbox(); + + sandbox + .stub(UIState, "get") + .returns({ status: UIState.STATUS_NOT_CONFIGURED }); + await testCreateBackupHelper(sandbox, manifest => { + Assert.equal( + manifest.meta.accountID, + undefined, + "Account ID should be undefined." + ); + Assert.equal( + manifest.meta.accountEmail, + undefined, + "Account email should be undefined." + ); + }); + + sandbox.restore(); +}); + +/** + * Tests that calling BackupService.createBackup will call backup on each + * registered BackupResource, and that each BackupResource will have a folder + * created for them to write into. Tests in the signed-in state. + */ +add_task(async function test_createBackup_signed_in() { + let sandbox = sinon.createSandbox(); + + const TEST_UID = "ThisIsMyTestUID"; + const TEST_EMAIL = "foxy@mozilla.org"; + + sandbox.stub(UIState, "get").returns({ + status: UIState.STATUS_SIGNED_IN, + uid: TEST_UID, + email: TEST_EMAIL, + }); + + await testCreateBackupHelper(sandbox, manifest => { + Assert.equal( + manifest.meta.accountID, + TEST_UID, + "Account ID should be set properly." + ); + Assert.equal( + manifest.meta.accountEmail, + TEST_EMAIL, + "Account email should be set properly." + ); + }); + + sandbox.restore(); +}); + +/** + * Creates a directory that looks a lot like a decompressed backup archive, + * and then tests that BackupService.recoverFromBackup can create a new profile + * and recover into it. + */ +add_task(async function test_recoverFromBackup() { + let sandbox = sinon.createSandbox(); + let fakeEntryMap = new Map(); + let backupResourceClasses = [ + FakeBackupResource1, + FakeBackupResource2, + FakeBackupResource3, + ]; + + let i = 1; + for (let backupResourceClass of backupResourceClasses) { + let fakeManifestEntry = { [`fake${i}`]: `hello from backup - ${i}` }; + sandbox + .stub(backupResourceClass.prototype, "backup") + .resolves(fakeManifestEntry); + + let fakePostRecoveryEntry = { [`fake${i}`]: `hello from recover - ${i}` }; + sandbox + .stub(backupResourceClass.prototype, "recover") + .resolves(fakePostRecoveryEntry); + + fakeEntryMap.set(backupResourceClass, { + manifestEntry: fakeManifestEntry, + postRecoveryEntry: fakePostRecoveryEntry, + }); + + ++i; + } + + let bs = new BackupService({ + FakeBackupResource1, + FakeBackupResource2, + FakeBackupResource3, + }); + + let oldProfilePath = await IOUtils.createUniqueDirectory( + PathUtils.tempDir, + "recoverFromBackupTest" + ); + let newProfileRootPath = await IOUtils.createUniqueDirectory( + PathUtils.tempDir, + "recoverFromBackupTest-newProfileRoot" + ); + + let { stagingPath } = await bs.createBackup({ profilePath: oldProfilePath }); + + let testTelemetryStateObject = { + clientID: "ed209123-04a1-04a1-04a1-c0ffeec0ffee", + }; + await IOUtils.writeJSON( + PathUtils.join(PathUtils.profileDir, "datareporting", "state.json"), + testTelemetryStateObject + ); + + let profile = await bs.recoverFromBackup( + stagingPath, + false /* shouldLaunch */, + newProfileRootPath + ); + Assert.ok(profile, "An nsIToolkitProfile was created."); + let newProfilePath = profile.rootDir.path; + + let postRecoveryFilePath = PathUtils.join( + newProfilePath, + "post-recovery.json" + ); + let postRecovery = await IOUtils.readJSON(postRecoveryFilePath); + + for (let backupResourceClass of backupResourceClasses) { + let expectedResourceFolder = PathUtils.join( + stagingPath, + backupResourceClass.key + ); + + let { manifestEntry, postRecoveryEntry } = + fakeEntryMap.get(backupResourceClass); + + Assert.ok( + backupResourceClass.prototype.recover.calledOnce, + `Recover was called for ${backupResourceClass.key}` + ); + Assert.ok( + backupResourceClass.prototype.recover.calledWith( + manifestEntry, + expectedResourceFolder, + newProfilePath + ), + `Recover was passed the right arguments for ${backupResourceClass.key}` + ); + Assert.deepEqual( + postRecoveryEntry, + postRecovery[backupResourceClass.key], + "The post recovery data is as expected" + ); + } + + let newProfileTelemetryStateObject = await IOUtils.readJSON( + PathUtils.join(newProfileRootPath, "datareporting", "state.json") + ); + Assert.deepEqual( + testTelemetryStateObject, + newProfileTelemetryStateObject, + "Recovered profile inherited telemetry state from the profile that " + + "initiated recovery" + ); + + await IOUtils.remove(oldProfilePath, { recursive: true }); + await IOUtils.remove(newProfileRootPath, { recursive: true }); + sandbox.restore(); +}); + +/** + * Tests that if there's a post-recovery.json file in the profile directory + * when checkForPostRecovery() is called, that it is processed, and the + * postRecovery methods on the associated BackupResources are called with the + * entry values from the file. + */ +add_task(async function test_checkForPostRecovery() { + let sandbox = sinon.createSandbox(); + + let testProfilePath = await IOUtils.createUniqueDirectory( + PathUtils.tempDir, + "checkForPostRecoveryTest" + ); + let fakePostRecoveryObject = { + [FakeBackupResource1.key]: "test 1", + [FakeBackupResource3.key]: "test 3", + }; + await IOUtils.writeJSON( + PathUtils.join(testProfilePath, BackupService.POST_RECOVERY_FILE_NAME), + fakePostRecoveryObject + ); + + sandbox.stub(FakeBackupResource1.prototype, "postRecovery").resolves(); + sandbox.stub(FakeBackupResource2.prototype, "postRecovery").resolves(); + sandbox.stub(FakeBackupResource3.prototype, "postRecovery").resolves(); + + let bs = new BackupService({ + FakeBackupResource1, + FakeBackupResource2, + FakeBackupResource3, + }); + + await bs.checkForPostRecovery(testProfilePath); + await bs.postRecoveryComplete; + + Assert.ok( + FakeBackupResource1.prototype.postRecovery.calledOnce, + "FakeBackupResource1.postRecovery was called once" + ); + Assert.ok( + FakeBackupResource2.prototype.postRecovery.notCalled, + "FakeBackupResource2.postRecovery was not called" + ); + Assert.ok( + FakeBackupResource3.prototype.postRecovery.calledOnce, + "FakeBackupResource3.postRecovery was called once" + ); + Assert.ok( + FakeBackupResource1.prototype.postRecovery.calledWith( + fakePostRecoveryObject[FakeBackupResource1.key] + ), + "FakeBackupResource1.postRecovery was called with the expected argument" + ); + Assert.ok( + FakeBackupResource3.prototype.postRecovery.calledWith( + fakePostRecoveryObject[FakeBackupResource3.key] + ), + "FakeBackupResource3.postRecovery was called with the expected argument" + ); + + await IOUtils.remove(testProfilePath, { recursive: true }); + sandbox.restore(); +}); diff --git a/browser/components/backup/tests/xpcshell/test_BackupService_takeMeasurements.js b/browser/components/backup/tests/xpcshell/test_BackupService_takeMeasurements.js new file mode 100644 index 0000000000..c73482dfe6 --- /dev/null +++ b/browser/components/backup/tests/xpcshell/test_BackupService_takeMeasurements.js @@ -0,0 +1,59 @@ +/* Any copyright is dedicated to the Public Domain. +http://creativecommons.org/publicdomain/zero/1.0/ */ + +"use strict"; + +add_setup(() => { + // FOG needs to be initialized in order for data to flow. + Services.fog.initializeFOG(); + Services.telemetry.clearScalars(); +}); + +/** + * Tests that calling `BackupService.takeMeasurements` will call the measure + * method of all registered BackupResource classes. + */ +add_task(async function test_takeMeasurements() { + let sandbox = sinon.createSandbox(); + sandbox.stub(FakeBackupResource1.prototype, "measure").resolves(); + sandbox + .stub(FakeBackupResource2.prototype, "measure") + .rejects(new Error("Some failure to measure")); + + let bs = new BackupService({ FakeBackupResource1, FakeBackupResource2 }); + await bs.takeMeasurements(); + + for (let backupResourceClass of [FakeBackupResource1, FakeBackupResource2]) { + Assert.ok( + backupResourceClass.prototype.measure.calledOnce, + "Measure was called" + ); + Assert.ok( + backupResourceClass.prototype.measure.calledWith(PathUtils.profileDir), + "Measure was called with the profile directory argument" + ); + } + + sandbox.restore(); +}); + +/** + * Tests that we can measure the disk space available in the profile directory. + */ +add_task(async function test_profDDiskSpace() { + let bs = new BackupService(); + await bs.takeMeasurements(); + let measurement = Glean.browserBackup.profDDiskSpace.testGetValue(); + TelemetryTestUtils.assertScalar( + TelemetryTestUtils.getProcessScalars("parent", false, true), + "browser.backup.prof_d_disk_space", + measurement + ); + + Assert.greater( + measurement, + 0, + "Should have collected a measurement for the profile directory storage " + + "device" + ); +}); diff --git a/browser/components/backup/tests/xpcshell/test_CookiesBackupResource.js b/browser/components/backup/tests/xpcshell/test_CookiesBackupResource.js new file mode 100644 index 0000000000..1690580437 --- /dev/null +++ b/browser/components/backup/tests/xpcshell/test_CookiesBackupResource.js @@ -0,0 +1,142 @@ +/* Any copyright is dedicated to the Public Domain. +https://creativecommons.org/publicdomain/zero/1.0/ */ + +"use strict"; + +const { CookiesBackupResource } = ChromeUtils.importESModule( + "resource:///modules/backup/CookiesBackupResource.sys.mjs" +); + +/** + * Tests that we can measure the Cookies db in a profile directory. + */ +add_task(async function test_measure() { + const EXPECTED_COOKIES_DB_SIZE = 1230; + + Services.fog.testResetFOG(); + + // Create resource files in temporary directory + let tempDir = PathUtils.tempDir; + let tempCookiesDBPath = PathUtils.join(tempDir, "cookies.sqlite"); + await createKilobyteSizedFile(tempCookiesDBPath, EXPECTED_COOKIES_DB_SIZE); + + let cookiesBackupResource = new CookiesBackupResource(); + await cookiesBackupResource.measure(tempDir); + + let cookiesMeasurement = Glean.browserBackup.cookiesSize.testGetValue(); + let scalars = TelemetryTestUtils.getProcessScalars("parent", false, false); + + // Compare glean vs telemetry measurements + TelemetryTestUtils.assertScalar( + scalars, + "browser.backup.cookies_size", + cookiesMeasurement, + "Glean and telemetry measurements for cookies.sqlite should be equal" + ); + + // Compare glean measurements vs actual file sizes + Assert.equal( + cookiesMeasurement, + EXPECTED_COOKIES_DB_SIZE, + "Should have collected the correct glean measurement for cookies.sqlite" + ); + + await maybeRemovePath(tempCookiesDBPath); +}); + +/** + * Test that the backup method correctly copies items from the profile directory + * into the staging directory. + */ +add_task(async function test_backup() { + let sandbox = sinon.createSandbox(); + + let cookiesBackupResource = new CookiesBackupResource(); + let sourcePath = await IOUtils.createUniqueDirectory( + PathUtils.tempDir, + "CookiesBackupResource-source-test" + ); + let stagingPath = await IOUtils.createUniqueDirectory( + PathUtils.tempDir, + "CookiesBackupResource-staging-test" + ); + + // Make sure this file exists in the source directory, otherwise + // BackupResource will skip attempting to back it up. + await createTestFiles(sourcePath, [{ path: "cookies.sqlite" }]); + + // We have no need to test that Sqlite.sys.mjs's backup method is working - + // this is something that is tested in Sqlite's own tests. We can just make + // sure that it's being called using sinon. Unfortunately, we cannot do the + // same thing with IOUtils.copy, as its methods are not stubbable. + let fakeConnection = { + backup: sandbox.stub().resolves(true), + close: sandbox.stub().resolves(true), + }; + sandbox.stub(Sqlite, "openConnection").returns(fakeConnection); + + let manifestEntry = await cookiesBackupResource.backup( + stagingPath, + sourcePath + ); + Assert.equal( + manifestEntry, + null, + "CookiesBackupResource.backup should return null as its ManifestEntry" + ); + + // Next, we'll make sure that the Sqlite connection had `backup` called on it + // with the right arguments. + Assert.ok( + fakeConnection.backup.calledOnce, + "Called backup the expected number of times for all connections" + ); + Assert.ok( + fakeConnection.backup.calledWith( + PathUtils.join(stagingPath, "cookies.sqlite") + ), + "Called backup on the cookies.sqlite Sqlite connection" + ); + + await maybeRemovePath(stagingPath); + await maybeRemovePath(sourcePath); + + sandbox.restore(); +}); + +/** + * Test that the recover method correctly copies items from the recovery + * directory into the destination profile directory. + */ +add_task(async function test_recover() { + let cookiesBackupResource = new CookiesBackupResource(); + let recoveryPath = await IOUtils.createUniqueDirectory( + PathUtils.tempDir, + "CookiesBackupResource-recovery-test" + ); + let destProfilePath = await IOUtils.createUniqueDirectory( + PathUtils.tempDir, + "CookiesBackupResource-test-profile" + ); + + const simpleCopyFiles = [{ path: "cookies.sqlite" }]; + await createTestFiles(recoveryPath, simpleCopyFiles); + + // The backup method is expected to have returned a null ManifestEntry + let postRecoveryEntry = await cookiesBackupResource.recover( + null /* manifestEntry */, + recoveryPath, + destProfilePath + ); + Assert.equal( + postRecoveryEntry, + null, + "CookiesBackupResource.recover should return null as its post " + + "recovery entry" + ); + + await assertFilesExist(destProfilePath, simpleCopyFiles); + + await maybeRemovePath(recoveryPath); + await maybeRemovePath(destProfilePath); +}); diff --git a/browser/components/backup/tests/xpcshell/test_CredentialsAndSecurityBackupResource.js b/browser/components/backup/tests/xpcshell/test_CredentialsAndSecurityBackupResource.js new file mode 100644 index 0000000000..f53fec8d3f --- /dev/null +++ b/browser/components/backup/tests/xpcshell/test_CredentialsAndSecurityBackupResource.js @@ -0,0 +1,215 @@ +/* Any copyright is dedicated to the Public Domain. +https://creativecommons.org/publicdomain/zero/1.0/ */ + +"use strict"; + +const { CredentialsAndSecurityBackupResource } = ChromeUtils.importESModule( + "resource:///modules/backup/CredentialsAndSecurityBackupResource.sys.mjs" +); + +/** + * Tests that we can measure credentials related files in the profile directory. + */ +add_task(async function test_measure() { + Services.fog.testResetFOG(); + + const EXPECTED_CREDENTIALS_KILOBYTES_SIZE = 413; + const EXPECTED_SECURITY_KILOBYTES_SIZE = 231; + + // Create resource files in temporary directory + const tempDir = await IOUtils.createUniqueDirectory( + PathUtils.tempDir, + "CredentialsAndSecurityBackupResource-measurement-test" + ); + + const mockFiles = [ + // Set up credentials files + { path: "key4.db", sizeInKB: 300 }, + { path: "logins.json", sizeInKB: 1 }, + { path: "logins-backup.json", sizeInKB: 1 }, + { path: "autofill-profiles.json", sizeInKB: 1 }, + { path: "credentialstate.sqlite", sizeInKB: 100 }, + { path: "signedInUser.json", sizeInKB: 5 }, + // Set up security files + { path: "cert9.db", sizeInKB: 230 }, + { path: "pkcs11.txt", sizeInKB: 1 }, + ]; + + await createTestFiles(tempDir, mockFiles); + + let credentialsAndSecurityBackupResource = + new CredentialsAndSecurityBackupResource(); + await credentialsAndSecurityBackupResource.measure(tempDir); + + let credentialsMeasurement = + Glean.browserBackup.credentialsDataSize.testGetValue(); + let securityMeasurement = Glean.browserBackup.securityDataSize.testGetValue(); + let scalars = TelemetryTestUtils.getProcessScalars("parent", false, false); + + // Credentials measurements + TelemetryTestUtils.assertScalar( + scalars, + "browser.backup.credentials_data_size", + credentialsMeasurement, + "Glean and telemetry measurements for credentials data should be equal" + ); + + Assert.equal( + credentialsMeasurement, + EXPECTED_CREDENTIALS_KILOBYTES_SIZE, + "Should have collected the correct glean measurement for credentials files" + ); + + // Security measurements + TelemetryTestUtils.assertScalar( + scalars, + "browser.backup.security_data_size", + securityMeasurement, + "Glean and telemetry measurements for security data should be equal" + ); + Assert.equal( + securityMeasurement, + EXPECTED_SECURITY_KILOBYTES_SIZE, + "Should have collected the correct glean measurement for security files" + ); + + // Cleanup + await maybeRemovePath(tempDir); +}); + +/** + * Test that the backup method correctly copies items from the profile directory + * into the staging directory. + */ +add_task(async function test_backup() { + let sandbox = sinon.createSandbox(); + + let credentialsAndSecurityBackupResource = + new CredentialsAndSecurityBackupResource(); + let sourcePath = await IOUtils.createUniqueDirectory( + PathUtils.tempDir, + "CredentialsAndSecurityBackupResource-source-test" + ); + let stagingPath = await IOUtils.createUniqueDirectory( + PathUtils.tempDir, + "CredentialsAndSecurityBackupResource-staging-test" + ); + + const simpleCopyFiles = [ + { path: "logins.json", sizeInKB: 1 }, + { path: "logins-backup.json", sizeInKB: 1 }, + { path: "autofill-profiles.json", sizeInKB: 1 }, + { path: "signedInUser.json", sizeInKB: 5 }, + { path: "pkcs11.txt", sizeInKB: 1 }, + ]; + await createTestFiles(sourcePath, simpleCopyFiles); + + // Create our fake database files. We don't expect these to be copied to the + // staging directory in this test due to our stubbing of the backup method, so + // we don't include it in `simpleCopyFiles`. + await createTestFiles(sourcePath, [ + { path: "cert9.db" }, + { path: "key4.db" }, + { path: "credentialstate.sqlite" }, + ]); + + // We have no need to test that Sqlite.sys.mjs's backup method is working - + // this is something that is tested in Sqlite's own tests. We can just make + // sure that it's being called using sinon. Unfortunately, we cannot do the + // same thing with IOUtils.copy, as its methods are not stubbable. + let fakeConnection = { + backup: sandbox.stub().resolves(true), + close: sandbox.stub().resolves(true), + }; + sandbox.stub(Sqlite, "openConnection").returns(fakeConnection); + + let manifestEntry = await credentialsAndSecurityBackupResource.backup( + stagingPath, + sourcePath + ); + + Assert.equal( + manifestEntry, + null, + "CredentialsAndSecurityBackupResource.backup should return null as its ManifestEntry" + ); + + await assertFilesExist(stagingPath, simpleCopyFiles); + + // Next, we'll make sure that the Sqlite connection had `backup` called on it + // with the right arguments. + Assert.ok( + fakeConnection.backup.calledThrice, + "Called backup the expected number of times for all connections" + ); + Assert.ok( + fakeConnection.backup.firstCall.calledWith( + PathUtils.join(stagingPath, "cert9.db") + ), + "Called backup on cert9.db connection first" + ); + Assert.ok( + fakeConnection.backup.secondCall.calledWith( + PathUtils.join(stagingPath, "key4.db") + ), + "Called backup on key4.db connection second" + ); + Assert.ok( + fakeConnection.backup.thirdCall.calledWith( + PathUtils.join(stagingPath, "credentialstate.sqlite") + ), + "Called backup on credentialstate.sqlite connection third" + ); + + await maybeRemovePath(stagingPath); + await maybeRemovePath(sourcePath); + + sandbox.restore(); +}); + +/** + * Test that the recover method correctly copies items from the recovery + * directory into the destination profile directory. + */ +add_task(async function test_recover() { + let credentialsAndSecurityBackupResource = + new CredentialsAndSecurityBackupResource(); + let recoveryPath = await IOUtils.createUniqueDirectory( + PathUtils.tempDir, + "CredentialsAndSecurityBackupResource-recovery-test" + ); + let destProfilePath = await IOUtils.createUniqueDirectory( + PathUtils.tempDir, + "CredentialsAndSecurityBackupResource-test-profile" + ); + + const files = [ + { path: "logins.json" }, + { path: "logins-backup.json" }, + { path: "autofill-profiles.json" }, + { path: "credentialstate.sqlite" }, + { path: "signedInUser.json" }, + { path: "cert9.db" }, + { path: "key4.db" }, + { path: "pkcs11.txt" }, + ]; + await createTestFiles(recoveryPath, files); + + // The backup method is expected to have returned a null ManifestEntry + let postRecoveryEntry = await credentialsAndSecurityBackupResource.recover( + null /* manifestEntry */, + recoveryPath, + destProfilePath + ); + Assert.equal( + postRecoveryEntry, + null, + "CredentialsAndSecurityBackupResource.recover should return null as its post " + + "recovery entry" + ); + + await assertFilesExist(destProfilePath, files); + + await maybeRemovePath(recoveryPath); + await maybeRemovePath(destProfilePath); +}); diff --git a/browser/components/backup/tests/xpcshell/test_FormHistoryBackupResource.js b/browser/components/backup/tests/xpcshell/test_FormHistoryBackupResource.js new file mode 100644 index 0000000000..93434daa9c --- /dev/null +++ b/browser/components/backup/tests/xpcshell/test_FormHistoryBackupResource.js @@ -0,0 +1,146 @@ +/* Any copyright is dedicated to the Public Domain. +https://creativecommons.org/publicdomain/zero/1.0/ */ + +"use strict"; + +const { FormHistoryBackupResource } = ChromeUtils.importESModule( + "resource:///modules/backup/FormHistoryBackupResource.sys.mjs" +); + +/** + * Tests that we can measure the Form History db in a profile directory. + */ +add_task(async function test_measure() { + const EXPECTED_FORM_HISTORY_DB_SIZE = 500; + + Services.fog.testResetFOG(); + + // Create resource files in temporary directory + let tempDir = PathUtils.tempDir; + let tempFormHistoryDBPath = PathUtils.join(tempDir, "formhistory.sqlite"); + await createKilobyteSizedFile( + tempFormHistoryDBPath, + EXPECTED_FORM_HISTORY_DB_SIZE + ); + + let formHistoryBackupResource = new FormHistoryBackupResource(); + await formHistoryBackupResource.measure(tempDir); + + let formHistoryMeasurement = + Glean.browserBackup.formHistorySize.testGetValue(); + let scalars = TelemetryTestUtils.getProcessScalars("parent", false, false); + + // Compare glean vs telemetry measurements + TelemetryTestUtils.assertScalar( + scalars, + "browser.backup.form_history_size", + formHistoryMeasurement, + "Glean and telemetry measurements for formhistory.sqlite should be equal" + ); + + // Compare glean measurements vs actual file sizes + Assert.equal( + formHistoryMeasurement, + EXPECTED_FORM_HISTORY_DB_SIZE, + "Should have collected the correct glean measurement for formhistory.sqlite" + ); + + await IOUtils.remove(tempFormHistoryDBPath); +}); + +/** + * Test that the backup method correctly copies items from the profile directory + * into the staging directory. + */ +add_task(async function test_backup() { + let sandbox = sinon.createSandbox(); + + let formHistoryBackupResource = new FormHistoryBackupResource(); + let sourcePath = await IOUtils.createUniqueDirectory( + PathUtils.tempDir, + "FormHistoryBackupResource-source-test" + ); + let stagingPath = await IOUtils.createUniqueDirectory( + PathUtils.tempDir, + "FormHistoryBackupResource-staging-test" + ); + + // Make sure this file exists in the source directory, otherwise + // BackupResource will skip attempting to back it up. + await createTestFiles(sourcePath, [{ path: "formhistory.sqlite" }]); + + // We have no need to test that Sqlite.sys.mjs's backup method is working - + // this is something that is tested in Sqlite's own tests. We can just make + // sure that it's being called using sinon. Unfortunately, we cannot do the + // same thing with IOUtils.copy, as its methods are not stubbable. + let fakeConnection = { + backup: sandbox.stub().resolves(true), + close: sandbox.stub().resolves(true), + }; + sandbox.stub(Sqlite, "openConnection").returns(fakeConnection); + + let manifestEntry = await formHistoryBackupResource.backup( + stagingPath, + sourcePath + ); + Assert.equal( + manifestEntry, + null, + "FormHistoryBackupResource.backup should return null as its ManifestEntry" + ); + + // Next, we'll make sure that the Sqlite connection had `backup` called on it + // with the right arguments. + Assert.ok( + fakeConnection.backup.calledOnce, + "Called backup the expected number of times for all connections" + ); + Assert.ok( + fakeConnection.backup.calledWith( + PathUtils.join(stagingPath, "formhistory.sqlite") + ), + "Called backup on the formhistory.sqlite Sqlite connection" + ); + + await maybeRemovePath(stagingPath); + await maybeRemovePath(sourcePath); + + sandbox.restore(); +}); + +/** + * Test that the recover method correctly copies items from the recovery + * directory into the destination profile directory. + */ +add_task(async function test_recover() { + let formHistoryBackupResource = new FormHistoryBackupResource(); + let recoveryPath = await IOUtils.createUniqueDirectory( + PathUtils.tempDir, + "FormHistoryBackupResource-recovery-test" + ); + let destProfilePath = await IOUtils.createUniqueDirectory( + PathUtils.tempDir, + "FormHistoryBackupResource-test-profile" + ); + + const simpleCopyFiles = [{ path: "formhistory.sqlite" }]; + await createTestFiles(recoveryPath, simpleCopyFiles); + + // The backup method is expected to have returned a null ManifestEntry + let postRecoveryEntry = await formHistoryBackupResource.recover( + null /* manifestEntry */, + recoveryPath, + destProfilePath + ); + Assert.equal( + postRecoveryEntry, + null, + "FormHistoryBackupResource.recover should return null as its post " + + "recovery entry" + ); + + await assertFilesExist(destProfilePath, simpleCopyFiles); + + await maybeRemovePath(recoveryPath); + await maybeRemovePath(destProfilePath); +}); diff --git a/browser/components/backup/tests/xpcshell/test_MiscDataBackupResource.js b/browser/components/backup/tests/xpcshell/test_MiscDataBackupResource.js index e57dd50cd3..ab63b65332 100644 --- a/browser/components/backup/tests/xpcshell/test_MiscDataBackupResource.js +++ b/browser/components/backup/tests/xpcshell/test_MiscDataBackupResource.js @@ -7,20 +7,27 @@ const { MiscDataBackupResource } = ChromeUtils.importESModule( "resource:///modules/backup/MiscDataBackupResource.sys.mjs" ); +const { ActivityStreamStorage } = ChromeUtils.importESModule( + "resource://activity-stream/lib/ActivityStreamStorage.sys.mjs" +); + +const { ProfileAge } = ChromeUtils.importESModule( + "resource://gre/modules/ProfileAge.sys.mjs" +); + /** * Tests that we can measure miscellaneous files in the profile directory. */ add_task(async function test_measure() { Services.fog.testResetFOG(); - const EXPECTED_MISC_KILOBYTES_SIZE = 241; + const EXPECTED_MISC_KILOBYTES_SIZE = 231; const tempDir = await IOUtils.createUniqueDirectory( PathUtils.tempDir, "MiscDataBackupResource-measurement-test" ); const mockFiles = [ - { path: "times.json", sizeInKB: 5 }, { path: "enumerate_devices.txt", sizeInKB: 1 }, { path: "protections.sqlite", sizeInKB: 100 }, { path: "SiteSecurityServiceState.bin", sizeInKB: 10 }, @@ -69,12 +76,16 @@ add_task(async function test_backup() { ); const simpleCopyFiles = [ - { path: "times.json" }, { path: "enumerate_devices.txt" }, { path: "SiteSecurityServiceState.bin" }, ]; await createTestFiles(sourcePath, simpleCopyFiles); + // Create our fake database files. We don't expect this to be copied to the + // staging directory in this test due to our stubbing of the backup method, so + // we don't include it in `simpleCopyFiles`. + await createTestFiles(sourcePath, [{ path: "protections.sqlite" }]); + // We have no need to test that Sqlite.sys.mjs's backup method is working - // this is something that is tested in Sqlite's own tests. We can just make // sure that it's being called using sinon. Unfortunately, we cannot do the @@ -85,7 +96,27 @@ add_task(async function test_backup() { }; sandbox.stub(Sqlite, "openConnection").returns(fakeConnection); - await miscDataBackupResource.backup(stagingPath, sourcePath); + let snippetsTableStub = { + getAllKeys: sandbox.stub().resolves(["key1", "key2"]), + get: sandbox.stub().callsFake(key => { + return { key: `value for ${key}` }; + }), + }; + + sandbox + .stub(ActivityStreamStorage.prototype, "getDbTable") + .withArgs("snippets") + .resolves(snippetsTableStub); + + let manifestEntry = await miscDataBackupResource.backup( + stagingPath, + sourcePath + ); + Assert.equal( + manifestEntry, + null, + "MiscDataBackupResource.backup should return null as its ManifestEntry" + ); await assertFilesExist(stagingPath, simpleCopyFiles); @@ -102,12 +133,170 @@ add_task(async function test_backup() { "Called backup on the protections.sqlite Sqlite connection" ); - // Bug 1890585 - we don't currently have the ability to copy the - // chrome-privileged IndexedDB databases under storage/permanent/chrome, so - // we'll just skip testing that for now. + // Bug 1890585 - we don't currently have the generalized ability to copy the + // chrome-privileged IndexedDB databases under storage/permanent/chrome, but + // we do support copying individual IndexedDB databases by manually exporting + // and re-importing their contents. + let snippetsBackupPath = PathUtils.join( + stagingPath, + "activity-stream-snippets.json" + ); + Assert.ok( + await IOUtils.exists(snippetsBackupPath), + "The activity-stream-snippets.json file should exist" + ); + let snippetsBackupContents = await IOUtils.readJSON(snippetsBackupPath); + Assert.deepEqual( + snippetsBackupContents, + { + key1: { key: "value for key1" }, + key2: { key: "value for key2" }, + }, + "The contents of the activity-stream-snippets.json file should be as expected" + ); await maybeRemovePath(stagingPath); await maybeRemovePath(sourcePath); sandbox.restore(); }); + +/** + * Test that the recover method correctly copies items from the recovery + * directory into the destination profile directory. + */ +add_task(async function test_recover() { + let miscBackupResource = new MiscDataBackupResource(); + let recoveryPath = await IOUtils.createUniqueDirectory( + PathUtils.tempDir, + "MiscDataBackupResource-recovery-test" + ); + let destProfilePath = await IOUtils.createUniqueDirectory( + PathUtils.tempDir, + "MiscDataBackupResource-test-profile" + ); + + // Write a dummy times.json into the xpcshell test profile directory. We + // expect it to be copied into the destination profile. + let originalProfileAge = await ProfileAge(PathUtils.profileDir); + await originalProfileAge.computeAndPersistCreated(); + Assert.ok( + await IOUtils.exists(PathUtils.join(PathUtils.profileDir, "times.json")) + ); + + const simpleCopyFiles = [ + { path: "enumerate_devices.txt" }, + { path: "protections.sqlite" }, + { path: "SiteSecurityServiceState.bin" }, + ]; + await createTestFiles(recoveryPath, simpleCopyFiles); + + const SNIPPETS_BACKUP_FILE = "activity-stream-snippets.json"; + + // We'll also separately create the activity-stream-snippets.json file, which + // is not expected to be copied into the profile directory, but is expected + // to exist in the recovery path. + await createTestFiles(recoveryPath, [{ path: SNIPPETS_BACKUP_FILE }]); + + // The backup method is expected to have returned a null ManifestEntry + let postRecoveryEntry = await miscBackupResource.recover( + null /* manifestEntry */, + recoveryPath, + destProfilePath + ); + Assert.deepEqual( + postRecoveryEntry, + { + snippetsBackupFile: PathUtils.join(recoveryPath, SNIPPETS_BACKUP_FILE), + }, + "MiscDataBackupResource.recover should return the snippets backup data " + + "path as its post recovery entry" + ); + + await assertFilesExist(destProfilePath, simpleCopyFiles); + + // The activity-stream-snippets.json path should _not_ have been written to + // the profile path. + Assert.ok( + !(await IOUtils.exists( + PathUtils.join(destProfilePath, SNIPPETS_BACKUP_FILE) + )), + "Snippets backup data should not have gone into the profile directory" + ); + + // The times.json file should have been copied over and a backup recovery + // time written into it. + Assert.ok( + await IOUtils.exists(PathUtils.join(destProfilePath, "times.json")) + ); + let copiedProfileAge = await ProfileAge(destProfilePath); + Assert.equal( + await originalProfileAge.created, + await copiedProfileAge.created, + "Created timestamp should match." + ); + Assert.equal( + await originalProfileAge.firstUse, + await copiedProfileAge.firstUse, + "First use timestamp should match." + ); + Assert.ok( + await copiedProfileAge.recoveredFromBackup, + "Backup recovery timestamp should have been set." + ); + + await maybeRemovePath(recoveryPath); + await maybeRemovePath(destProfilePath); +}); + +/** + * Test that the postRecovery method correctly writes the snippets backup data + * into the snippets IndexedDB table. + */ +add_task(async function test_postRecovery() { + let sandbox = sinon.createSandbox(); + + let fakeProfilePath = await IOUtils.createUniqueDirectory( + PathUtils.tempDir, + "MiscDataBackupResource-test-profile" + ); + let fakeSnippetsData = { + key1: "value1", + key2: "value2", + }; + const SNIPPEST_BACKUP_FILE = PathUtils.join( + fakeProfilePath, + "activity-stream-snippets.json" + ); + + await IOUtils.writeJSON(SNIPPEST_BACKUP_FILE, fakeSnippetsData); + + let snippetsTableStub = { + set: sandbox.stub(), + }; + + sandbox + .stub(ActivityStreamStorage.prototype, "getDbTable") + .withArgs("snippets") + .resolves(snippetsTableStub); + + let miscBackupResource = new MiscDataBackupResource(); + await miscBackupResource.postRecovery({ + snippetsBackupFile: SNIPPEST_BACKUP_FILE, + }); + + Assert.ok( + snippetsTableStub.set.calledTwice, + "The snippets table's set method was called twice" + ); + Assert.ok( + snippetsTableStub.set.firstCall.calledWith("key1", "value1"), + "The snippets table's set method was called with the first key-value pair" + ); + Assert.ok( + snippetsTableStub.set.secondCall.calledWith("key2", "value2"), + "The snippets table's set method was called with the second key-value pair" + ); + + sandbox.restore(); +}); diff --git a/browser/components/backup/tests/xpcshell/test_PlacesBackupResource.js b/browser/components/backup/tests/xpcshell/test_PlacesBackupResource.js index de97281372..7248a5c614 100644 --- a/browser/components/backup/tests/xpcshell/test_PlacesBackupResource.js +++ b/browser/components/backup/tests/xpcshell/test_PlacesBackupResource.js @@ -3,6 +3,9 @@ https://creativecommons.org/publicdomain/zero/1.0/ */ "use strict"; +const { BookmarkJSONUtils } = ChromeUtils.importESModule( + "resource://gre/modules/BookmarkJSONUtils.sys.mjs" +); const { PlacesBackupResource } = ChromeUtils.importESModule( "resource:///modules/backup/PlacesBackupResource.sys.mjs" ); @@ -93,13 +96,28 @@ add_task(async function test_backup() { "PlacesBackupResource-staging-test" ); + // Make sure these files exist in the source directory, otherwise + // BackupResource will skip attempting to back them up. + await createTestFiles(sourcePath, [ + { path: "places.sqlite" }, + { path: "favicons.sqlite" }, + ]); + let fakeConnection = { backup: sandbox.stub().resolves(true), close: sandbox.stub().resolves(true), }; sandbox.stub(Sqlite, "openConnection").returns(fakeConnection); - await placesBackupResource.backup(stagingPath, sourcePath); + let manifestEntry = await placesBackupResource.backup( + stagingPath, + sourcePath + ); + Assert.equal( + manifestEntry, + null, + "PlacesBackupResource.backup should return null as its ManifestEntry" + ); Assert.ok( fakeConnection.backup.calledTwice, @@ -154,7 +172,16 @@ add_task(async function test_backup_no_saved_history() { Services.prefs.setBoolPref(HISTORY_ENABLED_PREF, false); Services.prefs.setBoolPref(SANITIZE_ON_SHUTDOWN_PREF, false); - await placesBackupResource.backup(stagingPath, sourcePath); + let manifestEntry = await placesBackupResource.backup( + stagingPath, + sourcePath + ); + Assert.deepEqual( + manifestEntry, + { bookmarksOnly: true }, + "Should have gotten back a ManifestEntry indicating that we only copied " + + "bookmarks" + ); Assert.ok( fakeConnection.backup.notCalled, @@ -171,7 +198,13 @@ add_task(async function test_backup_no_saved_history() { Services.prefs.setBoolPref(SANITIZE_ON_SHUTDOWN_PREF, true); fakeConnection.backup.resetHistory(); - await placesBackupResource.backup(stagingPath, sourcePath); + manifestEntry = await placesBackupResource.backup(stagingPath, sourcePath); + Assert.deepEqual( + manifestEntry, + { bookmarksOnly: true }, + "Should have gotten back a ManifestEntry indicating that we only copied " + + "bookmarks" + ); Assert.ok( fakeConnection.backup.notCalled, @@ -211,7 +244,16 @@ add_task(async function test_backup_private_browsing() { sandbox.stub(Sqlite, "openConnection").returns(fakeConnection); sandbox.stub(PrivateBrowsingUtils, "permanentPrivateBrowsing").value(true); - await placesBackupResource.backup(stagingPath, sourcePath); + let manifestEntry = await placesBackupResource.backup( + stagingPath, + sourcePath + ); + Assert.deepEqual( + manifestEntry, + { bookmarksOnly: true }, + "Should have gotten back a ManifestEntry indicating that we only copied " + + "bookmarks" + ); Assert.ok( fakeConnection.backup.notCalled, @@ -224,3 +266,104 @@ add_task(async function test_backup_private_browsing() { sandbox.restore(); }); + +/** + * Test that the recover method correctly copies places.sqlite and favicons.sqlite + * from the recovery directory into the destination profile directory. + */ +add_task(async function test_recover() { + let placesBackupResource = new PlacesBackupResource(); + let recoveryPath = await IOUtils.createUniqueDirectory( + PathUtils.tempDir, + "PlacesBackupResource-recovery-test" + ); + let destProfilePath = await IOUtils.createUniqueDirectory( + PathUtils.tempDir, + "PlacesBackupResource-test-profile" + ); + + const simpleCopyFiles = [ + { path: "places.sqlite" }, + { path: "favicons.sqlite" }, + ]; + await createTestFiles(recoveryPath, simpleCopyFiles); + + // The backup method is expected to have returned a null ManifestEntry + let postRecoveryEntry = await placesBackupResource.recover( + null /* manifestEntry */, + recoveryPath, + destProfilePath + ); + Assert.equal( + postRecoveryEntry, + null, + "PlacesBackupResource.recover should return null as its post recovery entry" + ); + + await assertFilesExist(destProfilePath, simpleCopyFiles); + + await maybeRemovePath(recoveryPath); + await maybeRemovePath(destProfilePath); +}); + +/** + * Test that the recover method correctly copies bookmarks.jsonlz4 from the recovery + * directory into the destination profile directory. + */ +add_task(async function test_recover_bookmarks_only() { + let sandbox = sinon.createSandbox(); + let placesBackupResource = new PlacesBackupResource(); + let recoveryPath = await IOUtils.createUniqueDirectory( + PathUtils.tempDir, + "PlacesBackupResource-recovery-test" + ); + let destProfilePath = await IOUtils.createUniqueDirectory( + PathUtils.tempDir, + "PlacesBackupResource-test-profile" + ); + let bookmarksImportStub = sandbox + .stub(BookmarkJSONUtils, "importFromFile") + .resolves(true); + + await createTestFiles(recoveryPath, [{ path: "bookmarks.jsonlz4" }]); + + // The backup method is expected to detect bookmarks import only + let postRecoveryEntry = await placesBackupResource.recover( + { bookmarksOnly: true }, + recoveryPath, + destProfilePath + ); + + let expectedBookmarksPath = PathUtils.join(recoveryPath, "bookmarks.jsonlz4"); + + // Expect the bookmarks backup file path to be passed from recover() + Assert.deepEqual( + postRecoveryEntry, + { bookmarksBackupPath: expectedBookmarksPath }, + "PlacesBackupResource.recover should return the expected post recovery entry" + ); + + // Ensure that files stored in a places backup are not copied to the new profile during recovery + for (let placesFile of [ + "places.sqlite", + "favicons.sqlite", + "bookmarks.jsonlz4", + ]) { + Assert.ok( + !(await IOUtils.exists(PathUtils.join(destProfilePath, placesFile))), + `${placesFile} should not exist in the new profile` + ); + } + + // Now pretend that BackupService called the postRecovery method + await placesBackupResource.postRecovery(postRecoveryEntry); + Assert.ok( + bookmarksImportStub.calledOnce, + "BookmarkJSONUtils.importFromFile was called in the postRecovery step" + ); + + await maybeRemovePath(recoveryPath); + await maybeRemovePath(destProfilePath); + + sandbox.restore(); +}); diff --git a/browser/components/backup/tests/xpcshell/test_PreferencesBackupResource.js b/browser/components/backup/tests/xpcshell/test_PreferencesBackupResource.js index 6845431bb8..2075b57e91 100644 --- a/browser/components/backup/tests/xpcshell/test_PreferencesBackupResource.js +++ b/browser/components/backup/tests/xpcshell/test_PreferencesBackupResource.js @@ -86,6 +86,14 @@ add_task(async function test_backup() { ]; await createTestFiles(sourcePath, simpleCopyFiles); + // Create our fake database files. We don't expect these to be copied to the + // staging directory in this test due to our stubbing of the backup method, so + // we don't include it in `simpleCopyFiles`. + await createTestFiles(sourcePath, [ + { path: "permissions.sqlite" }, + { path: "content-prefs.sqlite" }, + ]); + // We have no need to test that Sqlite.sys.mjs's backup method is working - // this is something that is tested in Sqlite's own tests. We can just make // sure that it's being called using sinon. Unfortunately, we cannot do the @@ -96,7 +104,15 @@ add_task(async function test_backup() { }; sandbox.stub(Sqlite, "openConnection").returns(fakeConnection); - await preferencesBackupResource.backup(stagingPath, sourcePath); + let manifestEntry = await preferencesBackupResource.backup( + stagingPath, + sourcePath + ); + Assert.equal( + manifestEntry, + null, + "PreferencesBackupResource.backup should return null as its ManifestEntry" + ); await assertFilesExist(stagingPath, simpleCopyFiles); @@ -130,3 +146,51 @@ add_task(async function test_backup() { sandbox.restore(); }); + +/** + * Test that the recover method correctly copies items from the recovery + * directory into the destination profile directory. + */ +add_task(async function test_recover() { + let preferencesBackupResource = new PreferencesBackupResource(); + let recoveryPath = await IOUtils.createUniqueDirectory( + PathUtils.tempDir, + "PreferencesBackupResource-recovery-test" + ); + let destProfilePath = await IOUtils.createUniqueDirectory( + PathUtils.tempDir, + "PreferencesBackupResource-test-profile" + ); + + const simpleCopyFiles = [ + { path: "prefs.js" }, + { path: "xulstore.json" }, + { path: "permissions.sqlite" }, + { path: "content-prefs.sqlite" }, + { path: "containers.json" }, + { path: "handlers.json" }, + { path: "search.json.mozlz4" }, + { path: "user.js" }, + { path: ["chrome", "userChrome.css"] }, + { path: ["chrome", "userContent.css"] }, + { path: ["chrome", "childFolder", "someOtherStylesheet.css"] }, + ]; + await createTestFiles(recoveryPath, simpleCopyFiles); + + // The backup method is expected to have returned a null ManifestEntry + let postRecoveryEntry = await preferencesBackupResource.recover( + null /* manifestEntry */, + recoveryPath, + destProfilePath + ); + Assert.equal( + postRecoveryEntry, + null, + "PreferencesBackupResource.recover should return null as its post recovery entry" + ); + + await assertFilesExist(destProfilePath, simpleCopyFiles); + + await maybeRemovePath(recoveryPath); + await maybeRemovePath(destProfilePath); +}); diff --git a/browser/components/backup/tests/xpcshell/test_SessionStoreBackupResource.js b/browser/components/backup/tests/xpcshell/test_SessionStoreBackupResource.js new file mode 100644 index 0000000000..d57f2d3a25 --- /dev/null +++ b/browser/components/backup/tests/xpcshell/test_SessionStoreBackupResource.js @@ -0,0 +1,209 @@ +/* Any copyright is dedicated to the Public Domain. +https://creativecommons.org/publicdomain/zero/1.0/ */ + +"use strict"; + +const { SessionStoreBackupResource } = ChromeUtils.importESModule( + "resource:///modules/backup/SessionStoreBackupResource.sys.mjs" +); +const { SessionStore } = ChromeUtils.importESModule( + "resource:///modules/sessionstore/SessionStore.sys.mjs" +); + +/** + * Tests that we can measure the Session Store JSON and backups directory. + */ +add_task(async function test_measure() { + const EXPECTED_KILOBYTES_FOR_BACKUPS_DIR = 1000; + Services.fog.testResetFOG(); + + // Create the sessionstore-backups directory. + let tempDir = PathUtils.tempDir; + let sessionStoreBackupsPath = PathUtils.join( + tempDir, + "sessionstore-backups", + "restore.jsonlz4" + ); + await createKilobyteSizedFile( + sessionStoreBackupsPath, + EXPECTED_KILOBYTES_FOR_BACKUPS_DIR + ); + + let sessionStoreBackupResource = new SessionStoreBackupResource(); + await sessionStoreBackupResource.measure(tempDir); + + let sessionStoreBackupsDirectoryMeasurement = + Glean.browserBackup.sessionStoreBackupsDirectorySize.testGetValue(); + let sessionStoreMeasurement = + Glean.browserBackup.sessionStoreSize.testGetValue(); + let scalars = TelemetryTestUtils.getProcessScalars("parent", false, false); + + // Compare glean vs telemetry measurements + TelemetryTestUtils.assertScalar( + scalars, + "browser.backup.session_store_backups_directory_size", + sessionStoreBackupsDirectoryMeasurement, + "Glean and telemetry measurements for session store backups directory should be equal" + ); + TelemetryTestUtils.assertScalar( + scalars, + "browser.backup.session_store_size", + sessionStoreMeasurement, + "Glean and telemetry measurements for session store should be equal" + ); + + // Compare glean measurements vs actual file sizes + Assert.equal( + sessionStoreBackupsDirectoryMeasurement, + EXPECTED_KILOBYTES_FOR_BACKUPS_DIR, + "Should have collected the correct glean measurement for the sessionstore-backups directory" + ); + + // Session store measurement is from `getCurrentState`, so exact size is unknown. + Assert.greater( + sessionStoreMeasurement, + 0, + "Should have collected a measurement for the session store" + ); + + await IOUtils.remove(sessionStoreBackupsPath); +}); + +/** + * Test that the backup method correctly copies items from the profile directory + * into the staging directory. + */ +add_task(async function test_backup() { + let sandbox = sinon.createSandbox(); + + let sessionStoreBackupResource = new SessionStoreBackupResource(); + let sourcePath = await IOUtils.createUniqueDirectory( + PathUtils.tempDir, + "SessionStoreBackupResource-source-test" + ); + let stagingPath = await IOUtils.createUniqueDirectory( + PathUtils.tempDir, + "SessionStoreBackupResource-staging-test" + ); + + const simpleCopyFiles = [ + { path: ["sessionstore-backups", "test-sessionstore-backup.jsonlz4"] }, + { path: ["sessionstore-backups", "test-sessionstore-recovery.baklz4"] }, + ]; + await createTestFiles(sourcePath, simpleCopyFiles); + + let sessionStoreState = SessionStore.getCurrentState(true); + let manifestEntry = await sessionStoreBackupResource.backup( + stagingPath, + sourcePath + ); + Assert.equal( + manifestEntry, + null, + "SessionStoreBackupResource.backup should return null as its ManifestEntry" + ); + + /** + * We don't expect the actual file sessionstore.jsonlz4 to exist in the profile directory before calling the backup method. + * Instead, verify that it is created by the backup method and exists in the staging folder right after. + */ + await assertFilesExist(stagingPath, [ + ...simpleCopyFiles, + { path: "sessionstore.jsonlz4" }, + ]); + + /** + * Do a deep comparison between the recorded session state before backup and the file made in the staging folder + * to verify that information about session state was correctly written for backup. + */ + let sessionStoreStateStaged = await IOUtils.readJSON( + PathUtils.join(stagingPath, "sessionstore.jsonlz4"), + { decompress: true } + ); + + /** + * These timestamps might be slightly different from one another, so we'll exclude + * them from the comparison. + */ + delete sessionStoreStateStaged.session.lastUpdate; + delete sessionStoreState.session.lastUpdate; + Assert.deepEqual( + sessionStoreStateStaged, + sessionStoreState, + "sessionstore.jsonlz4 in the staging folder matches the recorded session state" + ); + + await maybeRemovePath(stagingPath); + await maybeRemovePath(sourcePath); + + sandbox.restore(); +}); + +/** + * Test that the recover method correctly copies items from the recovery + * directory into the destination profile directory. + */ +add_task(async function test_recover() { + let sessionStoreBackupResource = new SessionStoreBackupResource(); + let recoveryPath = await IOUtils.createUniqueDirectory( + PathUtils.tempDir, + "SessionStoreBackupResource-recovery-test" + ); + let destProfilePath = await IOUtils.createUniqueDirectory( + PathUtils.tempDir, + "SessionStoreBackupResource-test-profile" + ); + + const simpleCopyFiles = [ + { path: ["sessionstore-backups", "test-sessionstore-backup.jsonlz4"] }, + { path: ["sessionstore-backups", "test-sessionstore-recovery.baklz4"] }, + ]; + await createTestFiles(recoveryPath, simpleCopyFiles); + + // We backup a copy of sessionstore.jsonlz4, so ensure it exists in the recovery path + let sessionStoreState = SessionStore.getCurrentState(true); + let sessionStoreBackupPath = PathUtils.join( + recoveryPath, + "sessionstore.jsonlz4" + ); + await IOUtils.writeJSON(sessionStoreBackupPath, sessionStoreState, { + compress: true, + }); + + // The backup method is expected to have returned a null ManifestEntry + let postRecoveryEntry = await sessionStoreBackupResource.recover( + null /* manifestEntry */, + recoveryPath, + destProfilePath + ); + Assert.equal( + postRecoveryEntry, + null, + "SessionStoreBackupResource.recover should return null as its post recovery entry" + ); + + await assertFilesExist(destProfilePath, [ + ...simpleCopyFiles, + { path: "sessionstore.jsonlz4" }, + ]); + + let sessionStateCopied = await IOUtils.readJSON( + PathUtils.join(destProfilePath, "sessionstore.jsonlz4"), + { decompress: true } + ); + + /** + * These timestamps might be slightly different from one another, so we'll exclude + * them from the comparison. + */ + delete sessionStateCopied.session.lastUpdate; + delete sessionStoreState.session.lastUpdate; + Assert.deepEqual( + sessionStateCopied, + sessionStoreState, + "sessionstore.jsonlz4 in the destination profile folder matches the backed up session state" + ); + + await maybeRemovePath(recoveryPath); + await maybeRemovePath(destProfilePath); +}); diff --git a/browser/components/backup/tests/xpcshell/test_createBackup.js b/browser/components/backup/tests/xpcshell/test_createBackup.js deleted file mode 100644 index fcace695ef..0000000000 --- a/browser/components/backup/tests/xpcshell/test_createBackup.js +++ /dev/null @@ -1,74 +0,0 @@ -/* Any copyright is dedicated to the Public Domain. -https://creativecommons.org/publicdomain/zero/1.0/ */ - -"use strict"; - -/** - * Tests that calling BackupService.createBackup will call backup on each - * registered BackupResource, and that each BackupResource will have a folder - * created for them to write into. - */ -add_task(async function test_createBackup() { - let sandbox = sinon.createSandbox(); - sandbox - .stub(FakeBackupResource1.prototype, "backup") - .resolves({ fake1: "hello from 1" }); - sandbox - .stub(FakeBackupResource2.prototype, "backup") - .rejects(new Error("Some failure to backup")); - sandbox - .stub(FakeBackupResource3.prototype, "backup") - .resolves({ fake3: "hello from 3" }); - - let bs = new BackupService({ - FakeBackupResource1, - FakeBackupResource2, - FakeBackupResource3, - }); - - let fakeProfilePath = await IOUtils.createUniqueDirectory( - PathUtils.tempDir, - "createBackupTest" - ); - - await bs.createBackup({ profilePath: fakeProfilePath }); - - // For now, we expect a staging folder to exist under the fakeProfilePath, - // and we should find a folder for each fake BackupResource. - let stagingPath = PathUtils.join(fakeProfilePath, "backups", "staging"); - Assert.ok(await IOUtils.exists(stagingPath), "Staging folder exists"); - - for (let backupResourceClass of [ - FakeBackupResource1, - FakeBackupResource2, - FakeBackupResource3, - ]) { - let expectedResourceFolder = PathUtils.join( - stagingPath, - backupResourceClass.key - ); - Assert.ok( - await IOUtils.exists(expectedResourceFolder), - `BackupResource staging folder exists for ${backupResourceClass.key}` - ); - Assert.ok( - backupResourceClass.prototype.backup.calledOnce, - `Backup was called for ${backupResourceClass.key}` - ); - Assert.ok( - backupResourceClass.prototype.backup.calledWith( - expectedResourceFolder, - fakeProfilePath - ), - `Backup was passed the right paths for ${backupResourceClass.key}` - ); - } - - // After createBackup is more fleshed out, we're going to want to make sure - // that we're writing the manifest file and that it contains the expected - // ManifestEntry objects, and that the staging folder was successfully - // renamed with the current date. - await IOUtils.remove(fakeProfilePath, { recursive: true }); - - sandbox.restore(); -}); diff --git a/browser/components/backup/tests/xpcshell/test_measurements.js b/browser/components/backup/tests/xpcshell/test_measurements.js deleted file mode 100644 index 0dece6b370..0000000000 --- a/browser/components/backup/tests/xpcshell/test_measurements.js +++ /dev/null @@ -1,577 +0,0 @@ -/* Any copyright is dedicated to the Public Domain. -http://creativecommons.org/publicdomain/zero/1.0/ */ - -"use strict"; - -const { CredentialsAndSecurityBackupResource } = ChromeUtils.importESModule( - "resource:///modules/backup/CredentialsAndSecurityBackupResource.sys.mjs" -); -const { AddonsBackupResource } = ChromeUtils.importESModule( - "resource:///modules/backup/AddonsBackupResource.sys.mjs" -); -const { CookiesBackupResource } = ChromeUtils.importESModule( - "resource:///modules/backup/CookiesBackupResource.sys.mjs" -); - -const { FormHistoryBackupResource } = ChromeUtils.importESModule( - "resource:///modules/backup/FormHistoryBackupResource.sys.mjs" -); - -const { SessionStoreBackupResource } = ChromeUtils.importESModule( - "resource:///modules/backup/SessionStoreBackupResource.sys.mjs" -); - -add_setup(() => { - // FOG needs to be initialized in order for data to flow. - Services.fog.initializeFOG(); - Services.telemetry.clearScalars(); -}); - -/** - * Tests that calling `BackupService.takeMeasurements` will call the measure - * method of all registered BackupResource classes. - */ -add_task(async function test_takeMeasurements() { - let sandbox = sinon.createSandbox(); - sandbox.stub(FakeBackupResource1.prototype, "measure").resolves(); - sandbox - .stub(FakeBackupResource2.prototype, "measure") - .rejects(new Error("Some failure to measure")); - - let bs = new BackupService({ FakeBackupResource1, FakeBackupResource2 }); - await bs.takeMeasurements(); - - for (let backupResourceClass of [FakeBackupResource1, FakeBackupResource2]) { - Assert.ok( - backupResourceClass.prototype.measure.calledOnce, - "Measure was called" - ); - Assert.ok( - backupResourceClass.prototype.measure.calledWith(PathUtils.profileDir), - "Measure was called with the profile directory argument" - ); - } - - sandbox.restore(); -}); - -/** - * Tests that we can measure the disk space available in the profile directory. - */ -add_task(async function test_profDDiskSpace() { - let bs = new BackupService(); - await bs.takeMeasurements(); - let measurement = Glean.browserBackup.profDDiskSpace.testGetValue(); - TelemetryTestUtils.assertScalar( - TelemetryTestUtils.getProcessScalars("parent", false, true), - "browser.backup.prof_d_disk_space", - measurement - ); - - Assert.greater( - measurement, - 0, - "Should have collected a measurement for the profile directory storage " + - "device" - ); -}); - -/** - * Tests that we can measure credentials related files in the profile directory. - */ -add_task(async function test_credentialsAndSecurityBackupResource() { - Services.fog.testResetFOG(); - - const EXPECTED_CREDENTIALS_KILOBYTES_SIZE = 413; - const EXPECTED_SECURITY_KILOBYTES_SIZE = 231; - - // Create resource files in temporary directory - const tempDir = await IOUtils.createUniqueDirectory( - PathUtils.tempDir, - "CredentialsAndSecurityBackupResource-measurement-test" - ); - - const mockFiles = [ - // Set up credentials files - { path: "key4.db", sizeInKB: 300 }, - { path: "logins.json", sizeInKB: 1 }, - { path: "logins-backup.json", sizeInKB: 1 }, - { path: "autofill-profiles.json", sizeInKB: 1 }, - { path: "credentialstate.sqlite", sizeInKB: 100 }, - { path: "signedInUser.json", sizeInKB: 5 }, - // Set up security files - { path: "cert9.db", sizeInKB: 230 }, - { path: "pkcs11.txt", sizeInKB: 1 }, - ]; - - await createTestFiles(tempDir, mockFiles); - - let credentialsAndSecurityBackupResource = - new CredentialsAndSecurityBackupResource(); - await credentialsAndSecurityBackupResource.measure(tempDir); - - let credentialsMeasurement = - Glean.browserBackup.credentialsDataSize.testGetValue(); - let securityMeasurement = Glean.browserBackup.securityDataSize.testGetValue(); - let scalars = TelemetryTestUtils.getProcessScalars("parent", false, false); - - // Credentials measurements - TelemetryTestUtils.assertScalar( - scalars, - "browser.backup.credentials_data_size", - credentialsMeasurement, - "Glean and telemetry measurements for credentials data should be equal" - ); - - Assert.equal( - credentialsMeasurement, - EXPECTED_CREDENTIALS_KILOBYTES_SIZE, - "Should have collected the correct glean measurement for credentials files" - ); - - // Security measurements - TelemetryTestUtils.assertScalar( - scalars, - "browser.backup.security_data_size", - securityMeasurement, - "Glean and telemetry measurements for security data should be equal" - ); - Assert.equal( - securityMeasurement, - EXPECTED_SECURITY_KILOBYTES_SIZE, - "Should have collected the correct glean measurement for security files" - ); - - // Cleanup - await maybeRemovePath(tempDir); -}); - -/** - * Tests that we can measure the Cookies db in a profile directory. - */ -add_task(async function test_cookiesBackupResource() { - const EXPECTED_COOKIES_DB_SIZE = 1230; - - Services.fog.testResetFOG(); - - // Create resource files in temporary directory - let tempDir = PathUtils.tempDir; - let tempCookiesDBPath = PathUtils.join(tempDir, "cookies.sqlite"); - await createKilobyteSizedFile(tempCookiesDBPath, EXPECTED_COOKIES_DB_SIZE); - - let cookiesBackupResource = new CookiesBackupResource(); - await cookiesBackupResource.measure(tempDir); - - let cookiesMeasurement = Glean.browserBackup.cookiesSize.testGetValue(); - let scalars = TelemetryTestUtils.getProcessScalars("parent", false, false); - - // Compare glean vs telemetry measurements - TelemetryTestUtils.assertScalar( - scalars, - "browser.backup.cookies_size", - cookiesMeasurement, - "Glean and telemetry measurements for cookies.sqlite should be equal" - ); - - // Compare glean measurements vs actual file sizes - Assert.equal( - cookiesMeasurement, - EXPECTED_COOKIES_DB_SIZE, - "Should have collected the correct glean measurement for cookies.sqlite" - ); - - await maybeRemovePath(tempCookiesDBPath); -}); - -/** - * Tests that we can measure the Form History db in a profile directory. - */ -add_task(async function test_formHistoryBackupResource() { - const EXPECTED_FORM_HISTORY_DB_SIZE = 500; - - Services.fog.testResetFOG(); - - // Create resource files in temporary directory - let tempDir = PathUtils.tempDir; - let tempFormHistoryDBPath = PathUtils.join(tempDir, "formhistory.sqlite"); - await createKilobyteSizedFile( - tempFormHistoryDBPath, - EXPECTED_FORM_HISTORY_DB_SIZE - ); - - let formHistoryBackupResource = new FormHistoryBackupResource(); - await formHistoryBackupResource.measure(tempDir); - - let formHistoryMeasurement = - Glean.browserBackup.formHistorySize.testGetValue(); - let scalars = TelemetryTestUtils.getProcessScalars("parent", false, false); - - // Compare glean vs telemetry measurements - TelemetryTestUtils.assertScalar( - scalars, - "browser.backup.form_history_size", - formHistoryMeasurement, - "Glean and telemetry measurements for formhistory.sqlite should be equal" - ); - - // Compare glean measurements vs actual file sizes - Assert.equal( - formHistoryMeasurement, - EXPECTED_FORM_HISTORY_DB_SIZE, - "Should have collected the correct glean measurement for formhistory.sqlite" - ); - - await IOUtils.remove(tempFormHistoryDBPath); -}); - -/** - * Tests that we can measure the Session Store JSON and backups directory. - */ -add_task(async function test_sessionStoreBackupResource() { - const EXPECTED_KILOBYTES_FOR_BACKUPS_DIR = 1000; - Services.fog.testResetFOG(); - - // Create the sessionstore-backups directory. - let tempDir = PathUtils.tempDir; - let sessionStoreBackupsPath = PathUtils.join( - tempDir, - "sessionstore-backups", - "restore.jsonlz4" - ); - await createKilobyteSizedFile( - sessionStoreBackupsPath, - EXPECTED_KILOBYTES_FOR_BACKUPS_DIR - ); - - let sessionStoreBackupResource = new SessionStoreBackupResource(); - await sessionStoreBackupResource.measure(tempDir); - - let sessionStoreBackupsDirectoryMeasurement = - Glean.browserBackup.sessionStoreBackupsDirectorySize.testGetValue(); - let sessionStoreMeasurement = - Glean.browserBackup.sessionStoreSize.testGetValue(); - let scalars = TelemetryTestUtils.getProcessScalars("parent", false, false); - - // Compare glean vs telemetry measurements - TelemetryTestUtils.assertScalar( - scalars, - "browser.backup.session_store_backups_directory_size", - sessionStoreBackupsDirectoryMeasurement, - "Glean and telemetry measurements for session store backups directory should be equal" - ); - TelemetryTestUtils.assertScalar( - scalars, - "browser.backup.session_store_size", - sessionStoreMeasurement, - "Glean and telemetry measurements for session store should be equal" - ); - - // Compare glean measurements vs actual file sizes - Assert.equal( - sessionStoreBackupsDirectoryMeasurement, - EXPECTED_KILOBYTES_FOR_BACKUPS_DIR, - "Should have collected the correct glean measurement for the sessionstore-backups directory" - ); - - // Session store measurement is from `getCurrentState`, so exact size is unknown. - Assert.greater( - sessionStoreMeasurement, - 0, - "Should have collected a measurement for the session store" - ); - - await IOUtils.remove(sessionStoreBackupsPath); -}); - -/** - * Tests that we can measure the size of all the addons & extensions data. - */ -add_task(async function test_AddonsBackupResource() { - Services.fog.testResetFOG(); - Services.telemetry.clearScalars(); - - const EXPECTED_KILOBYTES_FOR_EXTENSIONS_JSON = 250; - const EXPECTED_KILOBYTES_FOR_EXTENSIONS_STORE = 500; - const EXPECTED_KILOBYTES_FOR_STORAGE_SYNC = 50; - const EXPECTED_KILOBYTES_FOR_EXTENSIONS_XPI_A = 600; - const EXPECTED_KILOBYTES_FOR_EXTENSIONS_XPI_B = 400; - const EXPECTED_KILOBYTES_FOR_EXTENSIONS_XPI_C = 150; - const EXPECTED_KILOBYTES_FOR_EXTENSIONS_DIRECTORY = 1000; - const EXPECTED_KILOBYTES_FOR_EXTENSION_DATA = 100; - const EXPECTED_KILOBYTES_FOR_EXTENSIONS_STORAGE = 200; - - let tempDir = PathUtils.tempDir; - - // Create extensions json files (all the same size). - const extensionsFilePath = PathUtils.join(tempDir, "extensions.json"); - await createKilobyteSizedFile( - extensionsFilePath, - EXPECTED_KILOBYTES_FOR_EXTENSIONS_JSON - ); - const extensionSettingsFilePath = PathUtils.join( - tempDir, - "extension-settings.json" - ); - await createKilobyteSizedFile( - extensionSettingsFilePath, - EXPECTED_KILOBYTES_FOR_EXTENSIONS_JSON - ); - const extensionsPrefsFilePath = PathUtils.join( - tempDir, - "extension-preferences.json" - ); - await createKilobyteSizedFile( - extensionsPrefsFilePath, - EXPECTED_KILOBYTES_FOR_EXTENSIONS_JSON - ); - const addonStartupFilePath = PathUtils.join(tempDir, "addonStartup.json.lz4"); - await createKilobyteSizedFile( - addonStartupFilePath, - EXPECTED_KILOBYTES_FOR_EXTENSIONS_JSON - ); - - // Create the extension store permissions data file. - let extensionStorePermissionsDataSize = PathUtils.join( - tempDir, - "extension-store-permissions", - "data.safe.bin" - ); - await createKilobyteSizedFile( - extensionStorePermissionsDataSize, - EXPECTED_KILOBYTES_FOR_EXTENSIONS_STORE - ); - - // Create the storage sync database file. - let storageSyncPath = PathUtils.join(tempDir, "storage-sync-v2.sqlite"); - await createKilobyteSizedFile( - storageSyncPath, - EXPECTED_KILOBYTES_FOR_STORAGE_SYNC - ); - - // Create the extensions directory with XPI files. - let extensionsXpiAPath = PathUtils.join( - tempDir, - "extensions", - "extension-b.xpi" - ); - let extensionsXpiBPath = PathUtils.join( - tempDir, - "extensions", - "extension-a.xpi" - ); - await createKilobyteSizedFile( - extensionsXpiAPath, - EXPECTED_KILOBYTES_FOR_EXTENSIONS_XPI_A - ); - await createKilobyteSizedFile( - extensionsXpiBPath, - EXPECTED_KILOBYTES_FOR_EXTENSIONS_XPI_B - ); - // Should be ignored. - let extensionsXpiStagedPath = PathUtils.join( - tempDir, - "extensions", - "staged", - "staged-test-extension.xpi" - ); - let extensionsXpiTrashPath = PathUtils.join( - tempDir, - "extensions", - "trash", - "trashed-test-extension.xpi" - ); - let extensionsXpiUnpackedPath = PathUtils.join( - tempDir, - "extensions", - "unpacked-extension.xpi", - "manifest.json" - ); - await createKilobyteSizedFile( - extensionsXpiStagedPath, - EXPECTED_KILOBYTES_FOR_EXTENSIONS_XPI_C - ); - await createKilobyteSizedFile( - extensionsXpiTrashPath, - EXPECTED_KILOBYTES_FOR_EXTENSIONS_XPI_C - ); - await createKilobyteSizedFile( - extensionsXpiUnpackedPath, - EXPECTED_KILOBYTES_FOR_EXTENSIONS_XPI_C - ); - - // Create the browser extension data directory. - let browserExtensionDataPath = PathUtils.join( - tempDir, - "browser-extension-data", - "test-file" - ); - await createKilobyteSizedFile( - browserExtensionDataPath, - EXPECTED_KILOBYTES_FOR_EXTENSION_DATA - ); - - // Create the extensions storage directory. - let extensionsStoragePath = PathUtils.join( - tempDir, - "storage", - "default", - "moz-extension+++test-extension-id", - "idb", - "data.sqlite" - ); - // Other storage files that should not be counted. - let otherStoragePath = PathUtils.join( - tempDir, - "storage", - "default", - "https+++accounts.firefox.com", - "ls", - "data.sqlite" - ); - - await createKilobyteSizedFile( - extensionsStoragePath, - EXPECTED_KILOBYTES_FOR_EXTENSIONS_STORAGE - ); - await createKilobyteSizedFile( - otherStoragePath, - EXPECTED_KILOBYTES_FOR_EXTENSIONS_STORAGE - ); - - // Measure all the extensions data. - let extensionsBackupResource = new AddonsBackupResource(); - await extensionsBackupResource.measure(tempDir); - - let extensionsJsonSizeMeasurement = - Glean.browserBackup.extensionsJsonSize.testGetValue(); - Assert.equal( - extensionsJsonSizeMeasurement, - EXPECTED_KILOBYTES_FOR_EXTENSIONS_JSON * 4, // There are 4 equally sized files. - "Should have collected the correct measurement of the total size of all extensions JSON files" - ); - - let extensionStorePermissionsDataSizeMeasurement = - Glean.browserBackup.extensionStorePermissionsDataSize.testGetValue(); - Assert.equal( - extensionStorePermissionsDataSizeMeasurement, - EXPECTED_KILOBYTES_FOR_EXTENSIONS_STORE, - "Should have collected the correct measurement of the size of the extension store permissions data" - ); - - let storageSyncSizeMeasurement = - Glean.browserBackup.storageSyncSize.testGetValue(); - Assert.equal( - storageSyncSizeMeasurement, - EXPECTED_KILOBYTES_FOR_STORAGE_SYNC, - "Should have collected the correct measurement of the size of the storage sync database" - ); - - let extensionsXpiDirectorySizeMeasurement = - Glean.browserBackup.extensionsXpiDirectorySize.testGetValue(); - Assert.equal( - extensionsXpiDirectorySizeMeasurement, - EXPECTED_KILOBYTES_FOR_EXTENSIONS_DIRECTORY, - "Should have collected the correct measurement of the size 2 equally sized XPI files in the extensions directory" - ); - - let browserExtensionDataSizeMeasurement = - Glean.browserBackup.browserExtensionDataSize.testGetValue(); - Assert.equal( - browserExtensionDataSizeMeasurement, - EXPECTED_KILOBYTES_FOR_EXTENSION_DATA, - "Should have collected the correct measurement of the size of the browser extension data directory" - ); - - let extensionsStorageSizeMeasurement = - Glean.browserBackup.extensionsStorageSize.testGetValue(); - Assert.equal( - extensionsStorageSizeMeasurement, - EXPECTED_KILOBYTES_FOR_EXTENSIONS_STORAGE, - "Should have collected the correct measurement of all the extensions storage" - ); - - // Compare glean vs telemetry measurements - let scalars = TelemetryTestUtils.getProcessScalars("parent", false, false); - TelemetryTestUtils.assertScalar( - scalars, - "browser.backup.extensions_json_size", - extensionsJsonSizeMeasurement, - "Glean and telemetry measurements for extensions JSON should be equal" - ); - TelemetryTestUtils.assertScalar( - scalars, - "browser.backup.extension_store_permissions_data_size", - extensionStorePermissionsDataSizeMeasurement, - "Glean and telemetry measurements for extension store permissions data should be equal" - ); - TelemetryTestUtils.assertScalar( - scalars, - "browser.backup.storage_sync_size", - storageSyncSizeMeasurement, - "Glean and telemetry measurements for storage sync database should be equal" - ); - TelemetryTestUtils.assertScalar( - scalars, - "browser.backup.extensions_xpi_directory_size", - extensionsXpiDirectorySizeMeasurement, - "Glean and telemetry measurements for extensions directory should be equal" - ); - TelemetryTestUtils.assertScalar( - scalars, - "browser.backup.browser_extension_data_size", - browserExtensionDataSizeMeasurement, - "Glean and telemetry measurements for browser extension data should be equal" - ); - TelemetryTestUtils.assertScalar( - scalars, - "browser.backup.extensions_storage_size", - extensionsStorageSizeMeasurement, - "Glean and telemetry measurements for extensions storage should be equal" - ); - - await maybeRemovePath(tempDir); -}); - -/** - * Tests that we can handle the extension store permissions data not existing. - */ -add_task( - async function test_AddonsBackupResource_no_extension_store_permissions_data() { - Services.fog.testResetFOG(); - - let tempDir = PathUtils.tempDir; - - let extensionsBackupResource = new AddonsBackupResource(); - await extensionsBackupResource.measure(tempDir); - - let extensionStorePermissionsDataSizeMeasurement = - Glean.browserBackup.extensionStorePermissionsDataSize.testGetValue(); - Assert.equal( - extensionStorePermissionsDataSizeMeasurement, - null, - "Should NOT have collected a measurement for the missing data" - ); - } -); - -/** - * Tests that we can handle a profile with no moz-extension IndexedDB databases. - */ -add_task( - async function test_AddonsBackupResource_no_extension_storage_databases() { - Services.fog.testResetFOG(); - - let tempDir = PathUtils.tempDir; - - let extensionsBackupResource = new AddonsBackupResource(); - await extensionsBackupResource.measure(tempDir); - - let extensionsStorageSizeMeasurement = - Glean.browserBackup.extensionsStorageSize.testGetValue(); - Assert.equal( - extensionsStorageSizeMeasurement, - null, - "Should NOT have collected a measurement for the missing data" - ); - } -); diff --git a/browser/components/backup/tests/xpcshell/xpcshell.toml b/browser/components/backup/tests/xpcshell/xpcshell.toml index 07e517f1f2..8a41c9e761 100644 --- a/browser/components/backup/tests/xpcshell/xpcshell.toml +++ b/browser/components/backup/tests/xpcshell/xpcshell.toml @@ -6,15 +6,25 @@ prefs = [ "browser.backup.log=true", ] +["test_AddonsBackupResource.js"] + ["test_BackupResource.js"] support-files = ["data/test_xulstore.json"] +["test_BackupService.js"] + +["test_BackupService_takeMeasurements.js"] + +["test_CookiesBackupResource.js"] + +["test_CredentialsAndSecurityBackupResource.js"] + +["test_FormHistoryBackupResource.js"] + ["test_MiscDataBackupResource.js"] ["test_PlacesBackupResource.js"] ["test_PreferencesBackupResource.js"] -["test_createBackup.js"] - -["test_measurements.js"] +["test_SessionStoreBackupResource.js"] |