summaryrefslogtreecommitdiffstats
path: root/services/settings/test
diff options
context:
space:
mode:
Diffstat (limited to 'services/settings/test')
-rw-r--r--services/settings/test/unit/test_attachments_downloader.js706
-rw-r--r--services/settings/test/unit/test_attachments_downloader/65650a0f-7c22-4c10-9744-2d67e301f5f4.pem26
-rw-r--r--services/settings/test/unit/test_attachments_downloader/settings/dump-bucket/dump-collection/filename-of-dump.txt1
-rw-r--r--services/settings/test/unit/test_attachments_downloader/settings/dump-bucket/dump-collection/filename-of-dump.txt.meta.json10
-rw-r--r--services/settings/test/unit/test_attachments_downloader/settings/dump-bucket/dump-collection/filename-without-content.txt.meta.json9
-rw-r--r--services/settings/test/unit/test_attachments_downloader/settings/dump-bucket/dump-collection/filename-without-meta.txt1
-rw-r--r--services/settings/test/unit/test_remote_settings.js1656
-rw-r--r--services/settings/test/unit/test_remote_settings_dump_lastmodified.js55
-rw-r--r--services/settings/test/unit/test_remote_settings_jexl_filters.js216
-rw-r--r--services/settings/test/unit/test_remote_settings_offline.js141
-rw-r--r--services/settings/test/unit/test_remote_settings_poll.js1385
-rw-r--r--services/settings/test/unit/test_remote_settings_recover_broken.js151
-rw-r--r--services/settings/test/unit/test_remote_settings_release_prefs.js202
-rw-r--r--services/settings/test/unit/test_remote_settings_signatures.js836
-rw-r--r--services/settings/test/unit/test_remote_settings_signatures/collection_signing_ee.pem16
-rw-r--r--services/settings/test/unit/test_remote_settings_signatures/collection_signing_ee.pem.certspec5
-rw-r--r--services/settings/test/unit/test_remote_settings_signatures/collection_signing_int.pem19
-rw-r--r--services/settings/test/unit/test_remote_settings_signatures/collection_signing_int.pem.certspec4
-rw-r--r--services/settings/test/unit/test_remote_settings_sync_history.js69
-rw-r--r--services/settings/test/unit/test_remote_settings_utils.js164
-rw-r--r--services/settings/test/unit/test_remote_settings_utils_telemetry.js88
-rw-r--r--services/settings/test/unit/test_remote_settings_worker.js140
-rw-r--r--services/settings/test/unit/test_shutdown_handling.js139
-rw-r--r--services/settings/test/unit/xpcshell.ini24
24 files changed, 6063 insertions, 0 deletions
diff --git a/services/settings/test/unit/test_attachments_downloader.js b/services/settings/test/unit/test_attachments_downloader.js
new file mode 100644
index 0000000000..3630e8ba40
--- /dev/null
+++ b/services/settings/test/unit/test_attachments_downloader.js
@@ -0,0 +1,706 @@
+/* import-globals-from ../../../common/tests/unit/head_helpers.js */
+
+const { RemoteSettings } = ChromeUtils.import(
+ "resource://services-settings/remote-settings.js"
+);
+const { UptakeTelemetry } = ChromeUtils.import(
+ "resource://services-common/uptake-telemetry.js"
+);
+const { Downloader } = ChromeUtils.import(
+ "resource://services-settings/Attachments.jsm"
+);
+const { TelemetryTestUtils } = ChromeUtils.importESModule(
+ "resource://testing-common/TelemetryTestUtils.sys.mjs"
+);
+const { OS } = ChromeUtils.import("resource://gre/modules/osfile.jsm");
+
+const RECORD = {
+ id: "1f3a0802-648d-11ea-bd79-876a8b69c377",
+ attachment: {
+ hash: "f41ed47d0f43325c9f089d03415c972ce1d3f1ecab6e4d6260665baf3db3ccee",
+ size: 1597,
+ filename: "test_file.pem",
+ location:
+ "main-workspace/some-collection/65650a0f-7c22-4c10-9744-2d67e301f5f4.pem",
+ mimetype: "application/x-pem-file",
+ },
+};
+
+const RECORD_OF_DUMP = {
+ id: "filename-of-dump.txt",
+ attachment: {
+ filename: "filename-of-dump.txt",
+ hash: "4c46ef7e4f1951d210fe54c21e07c09bab265fd122580083ed1d6121547a8c6b",
+ size: 25,
+ },
+ last_modified: 1234567,
+ some_key: "some metadata",
+};
+
+let downloader;
+let server;
+
+function pathFromURL(url) {
+ const uri = Services.io.newURI(url);
+ const file = uri.QueryInterface(Ci.nsIFileURL).file;
+ return file.path;
+}
+
+const PROFILE_URL =
+ "file://" +
+ OS.Path.split(OS.Constants.Path.localProfileDir).components.join("/");
+
+function run_test() {
+ server = new HttpServer();
+ server.start(-1);
+ registerCleanupFunction(() => server.stop(() => {}));
+
+ server.registerDirectory(
+ "/cdn/main-workspace/some-collection/",
+ do_get_file("test_attachments_downloader")
+ );
+
+ run_next_test();
+}
+
+async function clear_state() {
+ Services.prefs.setCharPref(
+ "services.settings.server",
+ `http://localhost:${server.identity.primaryPort}/v1`
+ );
+
+ downloader = new Downloader("main", "some-collection");
+ const dummyCacheImpl = {
+ get: async attachmentId => {},
+ set: async (attachmentId, attachment) => {},
+ delete: async attachmentId => {},
+ };
+ // The download() method requires a cacheImpl, but the Downloader
+ // class does not have one. Define a dummy no-op one.
+ Object.defineProperty(downloader, "cacheImpl", {
+ value: dummyCacheImpl,
+ // Writable to allow specific tests to override cacheImpl.
+ writable: true,
+ });
+ await downloader.deleteDownloaded(RECORD);
+
+ server.registerPathHandler("/v1/", (request, response) => {
+ response.write(
+ JSON.stringify({
+ capabilities: {
+ attachments: {
+ base_url: `http://localhost:${server.identity.primaryPort}/cdn/`,
+ },
+ },
+ })
+ );
+ response.setHeader("Content-Type", "application/json; charset=UTF-8");
+ response.setStatusLine(null, 200, "OK");
+ });
+}
+
+add_task(clear_state);
+
+add_task(async function test_base_attachment_url_depends_on_server() {
+ const before = await downloader._baseAttachmentsURL();
+
+ Services.prefs.setCharPref(
+ "services.settings.server",
+ `http://localhost:${server.identity.primaryPort}/v2`
+ );
+
+ server.registerPathHandler("/v2/", (request, response) => {
+ response.write(
+ JSON.stringify({
+ capabilities: {
+ attachments: {
+ base_url: "http://some-cdn-url.org",
+ },
+ },
+ })
+ );
+ response.setHeader("Content-Type", "application/json; charset=UTF-8");
+ response.setStatusLine(null, 200, "OK");
+ });
+
+ const after = await downloader._baseAttachmentsURL();
+
+ Assert.notEqual(before, after, "base URL was changed");
+ Assert.equal(after, "http://some-cdn-url.org/", "A trailing slash is added");
+});
+add_task(clear_state);
+
+add_task(
+ async function test_download_throws_server_info_error_if_invalid_response() {
+ server.registerPathHandler("/v1/", (request, response) => {
+ response.write("{bad json content");
+ response.setHeader("Content-Type", "application/json; charset=UTF-8");
+ response.setStatusLine(null, 200, "OK");
+ });
+
+ let error;
+ try {
+ await downloader.download(RECORD);
+ } catch (e) {
+ error = e;
+ }
+
+ Assert.ok(error instanceof Downloader.ServerInfoError);
+ }
+);
+add_task(clear_state);
+
+add_task(async function test_download_writes_file_in_profile() {
+ const fileURL = await downloader.downloadToDisk(RECORD);
+ const localFilePath = pathFromURL(fileURL);
+
+ Assert.equal(
+ fileURL,
+ PROFILE_URL + "/settings/main/some-collection/test_file.pem"
+ );
+ Assert.ok(await IOUtils.exists(localFilePath));
+ const stat = await IOUtils.stat(localFilePath);
+ Assert.equal(stat.size, 1597);
+});
+add_task(clear_state);
+
+add_task(async function test_download_as_bytes() {
+ const bytes = await downloader.downloadAsBytes(RECORD);
+
+ // See *.pem file in tests data.
+ Assert.ok(bytes.byteLength > 1500, `Wrong bytes size: ${bytes.byteLength}`);
+});
+add_task(clear_state);
+
+add_task(async function test_file_is_redownloaded_if_size_does_not_match() {
+ const fileURL = await downloader.downloadToDisk(RECORD);
+ const localFilePath = pathFromURL(fileURL);
+ await IOUtils.writeUTF8(localFilePath, "bad-content");
+ let stat = await IOUtils.stat(localFilePath);
+ Assert.notEqual(stat.size, 1597);
+
+ await downloader.downloadToDisk(RECORD);
+
+ stat = await IOUtils.stat(localFilePath);
+ Assert.equal(stat.size, 1597);
+});
+add_task(clear_state);
+
+add_task(async function test_file_is_redownloaded_if_corrupted() {
+ const fileURL = await downloader.downloadToDisk(RECORD);
+ const localFilePath = pathFromURL(fileURL);
+ const byteArray = await IOUtils.read(localFilePath);
+ byteArray[0] = 42;
+ await IOUtils.write(localFilePath, byteArray);
+ let content = await IOUtils.readUTF8(localFilePath);
+ Assert.notEqual(content.slice(0, 5), "-----");
+
+ await downloader.downloadToDisk(RECORD);
+
+ content = await IOUtils.readUTF8(localFilePath);
+ Assert.equal(content.slice(0, 5), "-----");
+});
+add_task(clear_state);
+
+add_task(async function test_download_is_retried_3_times_if_download_fails() {
+ const record = {
+ id: "abc",
+ attachment: {
+ ...RECORD.attachment,
+ location: "404-error.pem",
+ },
+ };
+
+ let called = 0;
+ const _fetchAttachment = downloader._fetchAttachment;
+ downloader._fetchAttachment = async url => {
+ called++;
+ return _fetchAttachment(url);
+ };
+
+ let error;
+ try {
+ await downloader.download(record);
+ } catch (e) {
+ error = e;
+ }
+
+ Assert.equal(called, 4); // 1 + 3 retries
+ Assert.ok(error instanceof Downloader.DownloadError);
+});
+add_task(clear_state);
+
+add_task(async function test_download_is_retried_3_times_if_content_fails() {
+ const record = {
+ id: "abc",
+ attachment: {
+ ...RECORD.attachment,
+ hash: "always-wrong",
+ },
+ };
+ let called = 0;
+ downloader._fetchAttachment = async () => {
+ called++;
+ return new ArrayBuffer();
+ };
+
+ let error;
+ try {
+ await downloader.download(record);
+ } catch (e) {
+ error = e;
+ }
+
+ Assert.equal(called, 4); // 1 + 3 retries
+ Assert.ok(error instanceof Downloader.BadContentError);
+});
+add_task(clear_state);
+
+add_task(async function test_delete_removes_local_file() {
+ const fileURL = await downloader.downloadToDisk(RECORD);
+ const localFilePath = pathFromURL(fileURL);
+ Assert.ok(await IOUtils.exists(localFilePath));
+
+ await downloader.deleteFromDisk(RECORD);
+
+ Assert.ok(!(await IOUtils.exists(localFilePath)));
+ // And removes parent folders.
+ const parentFolder = OS.Path.join(
+ OS.Constants.Path.localProfileDir,
+ ...downloader.folders
+ );
+ Assert.ok(!(await IOUtils.exists(parentFolder)));
+});
+add_task(clear_state);
+
+add_task(async function test_delete_all() {
+ const client = RemoteSettings("some-collection");
+ await client.db.create(RECORD);
+ await downloader.download(RECORD);
+ const fileURL = await downloader.downloadToDisk(RECORD);
+ const localFilePath = pathFromURL(fileURL);
+ Assert.ok(await IOUtils.exists(localFilePath));
+
+ await client.attachments.deleteAll();
+
+ Assert.ok(!(await IOUtils.exists(localFilePath)));
+ Assert.ok(!(await client.attachments.cacheImpl.get(RECORD.id)));
+});
+add_task(clear_state);
+
+add_task(async function test_downloader_is_accessible_via_client() {
+ const client = RemoteSettings("some-collection");
+
+ const fileURL = await client.attachments.downloadToDisk(RECORD);
+
+ Assert.equal(
+ fileURL,
+ [
+ PROFILE_URL,
+ "settings",
+ client.bucketName,
+ client.collectionName,
+ RECORD.attachment.filename,
+ ].join("/")
+ );
+});
+add_task(clear_state);
+
+add_task(async function test_downloader_reports_download_errors() {
+ await withFakeChannel("nightly", async () => {
+ const client = RemoteSettings("some-collection");
+
+ const record = {
+ attachment: {
+ ...RECORD.attachment,
+ location: "404-error.pem",
+ },
+ };
+
+ try {
+ await client.attachments.download(record, { retry: 0 });
+ } catch (e) {}
+
+ TelemetryTestUtils.assertEvents([
+ [
+ "uptake.remotecontent.result",
+ "uptake",
+ "remotesettings",
+ UptakeTelemetry.STATUS.DOWNLOAD_ERROR,
+ {
+ source: client.identifier,
+ },
+ ],
+ ]);
+ });
+});
+add_task(clear_state);
+
+add_task(async function test_downloader_reports_offline_error() {
+ const backupOffline = Services.io.offline;
+ Services.io.offline = true;
+
+ await withFakeChannel("nightly", async () => {
+ try {
+ const client = RemoteSettings("some-collection");
+ const record = {
+ attachment: {
+ ...RECORD.attachment,
+ location: "will-try-and-fail.pem",
+ },
+ };
+ try {
+ await client.attachments.download(record, { retry: 0 });
+ } catch (e) {}
+
+ TelemetryTestUtils.assertEvents([
+ [
+ "uptake.remotecontent.result",
+ "uptake",
+ "remotesettings",
+ UptakeTelemetry.STATUS.NETWORK_OFFLINE_ERROR,
+ {
+ source: client.identifier,
+ },
+ ],
+ ]);
+ } finally {
+ Services.io.offline = backupOffline;
+ }
+ });
+});
+add_task(clear_state);
+
+// Common code for test_download_cache_hit and test_download_cache_corruption.
+async function doTestDownloadCacheImpl({ simulateCorruption }) {
+ let readCount = 0;
+ let writeCount = 0;
+ const cacheImpl = {
+ async get(attachmentId) {
+ Assert.equal(attachmentId, RECORD.id, "expected attachmentId");
+ ++readCount;
+ if (simulateCorruption) {
+ throw new Error("Simulation of corrupted cache (read)");
+ }
+ },
+ async set(attachmentId, attachment) {
+ Assert.equal(attachmentId, RECORD.id, "expected attachmentId");
+ Assert.deepEqual(attachment.record, RECORD, "expected record");
+ ++writeCount;
+ if (simulateCorruption) {
+ throw new Error("Simulation of corrupted cache (write)");
+ }
+ },
+ async delete(attachmentId) {},
+ };
+ Object.defineProperty(downloader, "cacheImpl", { value: cacheImpl });
+
+ let downloadResult = await downloader.download(RECORD);
+ Assert.equal(downloadResult._source, "remote_match", "expected source");
+ Assert.equal(downloadResult.buffer.byteLength, 1597, "expected result");
+ Assert.equal(readCount, 1, "expected cache read attempts");
+ Assert.equal(writeCount, 1, "expected cache write attempts");
+}
+
+add_task(async function test_download_cache_hit() {
+ await doTestDownloadCacheImpl({ simulateCorruption: false });
+});
+add_task(clear_state);
+
+// Verify that the downloader works despite a broken cache implementation.
+add_task(async function test_download_cache_corruption() {
+ await doTestDownloadCacheImpl({ simulateCorruption: true });
+});
+add_task(clear_state);
+
+add_task(async function test_download_cached() {
+ const client = RemoteSettings("main", "some-collection");
+ const attachmentId = "dummy filename";
+ const badRecord = {
+ attachment: {
+ ...RECORD.attachment,
+ hash: "non-matching hash",
+ location: "non-existing-location-should-fail.bin",
+ },
+ };
+ async function downloadWithCache(record, options) {
+ options = { ...options, useCache: true };
+ return client.attachments.download(record, options);
+ }
+ function checkInfo(downloadResult, expectedSource, msg) {
+ Assert.deepEqual(
+ downloadResult.record,
+ RECORD,
+ `${msg} : expected identical record`
+ );
+ // Simple check: assume that content is identical if the size matches.
+ Assert.equal(
+ downloadResult.buffer.byteLength,
+ RECORD.attachment.size,
+ `${msg} : expected buffer`
+ );
+ Assert.equal(
+ downloadResult._source,
+ expectedSource,
+ `${msg} : expected source of the result`
+ );
+ }
+
+ await Assert.rejects(
+ downloadWithCache(null, { attachmentId }),
+ /DownloadError: Could not download dummy filename/,
+ "Download without record or cache should fail."
+ );
+
+ // Populate cache.
+ const info1 = await downloadWithCache(RECORD, { attachmentId });
+ checkInfo(info1, "remote_match", "first time download");
+
+ await Assert.rejects(
+ downloadWithCache(null, { attachmentId }),
+ /DownloadError: Could not download dummy filename/,
+ "Download without record still fails even if there is a cache."
+ );
+
+ await Assert.rejects(
+ downloadWithCache(badRecord, { attachmentId }),
+ /DownloadError: Could not download .*non-existing-location-should-fail.bin/,
+ "Download with non-matching record still fails even if there is a cache."
+ );
+
+ // Download from cache.
+ const info2 = await downloadWithCache(RECORD, { attachmentId });
+ checkInfo(info2, "cache_match", "download matching record from cache");
+
+ const info3 = await downloadWithCache(RECORD, {
+ attachmentId,
+ fallbackToCache: true,
+ });
+ checkInfo(info3, "cache_match", "fallbackToCache accepts matching record");
+
+ const info4 = await downloadWithCache(null, {
+ attachmentId,
+ fallbackToCache: true,
+ });
+ checkInfo(info4, "cache_fallback", "fallbackToCache accepts null record");
+
+ const info5 = await downloadWithCache(badRecord, {
+ attachmentId,
+ fallbackToCache: true,
+ });
+ checkInfo(info5, "cache_fallback", "fallbackToCache ignores bad record");
+
+ // Bye bye cache.
+ await client.attachments.deleteDownloaded({ id: attachmentId });
+ await Assert.rejects(
+ downloadWithCache(null, { attachmentId, fallbackToCache: true }),
+ /DownloadError: Could not download dummy filename/,
+ "Download without cache should fail again."
+ );
+ await Assert.rejects(
+ downloadWithCache(badRecord, { attachmentId, fallbackToCache: true }),
+ /DownloadError: Could not download .*non-existing-location-should-fail.bin/,
+ "Download should fail to fall back to a download of a non-existing record"
+ );
+});
+add_task(clear_state);
+
+add_task(async function test_download_from_dump() {
+ const client = RemoteSettings("dump-collection", {
+ bucketName: "dump-bucket",
+ });
+
+ // Temporarily replace the resource:-URL with another resource:-URL.
+ const orig_RESOURCE_BASE_URL = Downloader._RESOURCE_BASE_URL;
+ Downloader._RESOURCE_BASE_URL = "resource://rs-downloader-test";
+ const resProto = Services.io
+ .getProtocolHandler("resource")
+ .QueryInterface(Ci.nsIResProtocolHandler);
+ resProto.setSubstitution(
+ "rs-downloader-test",
+ Services.io.newFileURI(do_get_file("test_attachments_downloader"))
+ );
+
+ function checkInfo(result, expectedSource, expectedRecord = RECORD_OF_DUMP) {
+ Assert.equal(
+ new TextDecoder().decode(new Uint8Array(result.buffer)),
+ "This would be a RS dump.\n",
+ "expected content from dump"
+ );
+ Assert.deepEqual(result.record, expectedRecord, "expected record for dump");
+ Assert.equal(result._source, expectedSource, "expected source of dump");
+ }
+
+ // If record matches, should happen before network request.
+ const dump1 = await client.attachments.download(RECORD_OF_DUMP, {
+ // Note: attachmentId not set, so should fall back to record.id.
+ fallbackToDump: true,
+ });
+ checkInfo(dump1, "dump_match");
+
+ // If no record given, should try network first, but then fall back to dump.
+ const dump2 = await client.attachments.download(null, {
+ attachmentId: RECORD_OF_DUMP.id,
+ fallbackToDump: true,
+ });
+ checkInfo(dump2, "dump_fallback");
+
+ // Fill the cache with the same data as the dump for the next part.
+ await client.db.saveAttachment(RECORD_OF_DUMP.id, {
+ record: RECORD_OF_DUMP,
+ blob: new Blob([dump1.buffer]),
+ });
+ // The dump should take precedence over the cache.
+ const dump3 = await client.attachments.download(RECORD_OF_DUMP, {
+ fallbackToCache: true,
+ fallbackToDump: true,
+ });
+ checkInfo(dump3, "dump_match");
+
+ // When the record is not given, the dump takes precedence over the cache
+ // as a fallback (when the cache and dump are identical).
+ const dump4 = await client.attachments.download(null, {
+ attachmentId: RECORD_OF_DUMP.id,
+ fallbackToCache: true,
+ fallbackToDump: true,
+ });
+ checkInfo(dump4, "dump_fallback");
+
+ // Store a record in the cache that is newer than the dump.
+ const RECORD_NEWER_THAN_DUMP = {
+ ...RECORD_OF_DUMP,
+ last_modified: RECORD_OF_DUMP.last_modified + 1,
+ };
+ await client.db.saveAttachment(RECORD_OF_DUMP.id, {
+ record: RECORD_NEWER_THAN_DUMP,
+ blob: new Blob([dump1.buffer]),
+ });
+
+ // When the record is not given, use the cache if it has a more recent record.
+ const dump5 = await client.attachments.download(null, {
+ attachmentId: RECORD_OF_DUMP.id,
+ fallbackToCache: true,
+ fallbackToDump: true,
+ });
+ checkInfo(dump5, "cache_fallback", RECORD_NEWER_THAN_DUMP);
+
+ // When a record is given, use whichever that has the matching last_modified.
+ const dump6 = await client.attachments.download(RECORD_OF_DUMP, {
+ fallbackToCache: true,
+ fallbackToDump: true,
+ });
+ checkInfo(dump6, "dump_match");
+ const dump7 = await client.attachments.download(RECORD_NEWER_THAN_DUMP, {
+ fallbackToCache: true,
+ fallbackToDump: true,
+ });
+ checkInfo(dump7, "cache_match", RECORD_NEWER_THAN_DUMP);
+
+ await client.attachments.deleteDownloaded(RECORD_OF_DUMP);
+
+ await Assert.rejects(
+ client.attachments.download(null, {
+ attachmentId: "filename-without-meta.txt",
+ fallbackToDump: true,
+ }),
+ /DownloadError: Could not download filename-without-meta.txt/,
+ "Cannot download dump that lacks a .meta.json file"
+ );
+
+ await Assert.rejects(
+ client.attachments.download(null, {
+ attachmentId: "filename-without-content.txt",
+ fallbackToDump: true,
+ }),
+ /Could not download resource:\/\/rs-downloader-test\/settings\/dump-bucket\/dump-collection\/filename-without-content\.txt(?!\.meta\.json)/,
+ "Cannot download dump that is missing, despite the existing .meta.json"
+ );
+
+ // Restore, just in case.
+ Downloader._RESOURCE_BASE_URL = orig_RESOURCE_BASE_URL;
+ resProto.setSubstitution("rs-downloader-test", null);
+});
+// Not really needed because the last test doesn't modify the main collection,
+// but added for consistency with other tests tasks around here.
+add_task(clear_state);
+
+add_task(async function test_obsolete_attachments_are_pruned() {
+ const RECORD2 = {
+ ...RECORD,
+ id: "another-id",
+ };
+ const client = RemoteSettings("some-collection");
+ // Store records and related attachments directly in the cache.
+ await client.db.importChanges({}, 42, [RECORD, RECORD2], { clear: true });
+ await client.db.saveAttachment(RECORD.id, {
+ record: RECORD,
+ blob: new Blob(["123"]),
+ });
+ await client.db.saveAttachment("custom-id", {
+ record: RECORD2,
+ blob: new Blob(["456"]),
+ });
+ // Store an extraneous cached attachment.
+ await client.db.saveAttachment("bar", {
+ record: { id: "bar" },
+ blob: new Blob(["789"]),
+ });
+
+ const recordAttachment = await client.attachments.cacheImpl.get(RECORD.id);
+ Assert.equal(
+ await recordAttachment.blob.text(),
+ "123",
+ "Record has a cached attachment"
+ );
+ const record2Attachment = await client.attachments.cacheImpl.get("custom-id");
+ Assert.equal(
+ await record2Attachment.blob.text(),
+ "456",
+ "Record 2 has a cached attachment"
+ );
+ const { blob: cachedExtra } = await client.attachments.cacheImpl.get("bar");
+ Assert.equal(await cachedExtra.text(), "789", "There is an extra attachment");
+
+ await client.attachments.prune([]);
+
+ Assert.ok(
+ await client.attachments.cacheImpl.get(RECORD.id),
+ "Record attachment was kept"
+ );
+ Assert.ok(
+ await client.attachments.cacheImpl.get("custom-id"),
+ "Record 2 attachment was kept"
+ );
+ Assert.ok(
+ !(await client.attachments.cacheImpl.get("bar")),
+ "Extra was deleted"
+ );
+});
+add_task(clear_state);
+
+add_task(
+ async function test_obsolete_attachments_listed_as_excluded_are_not_pruned() {
+ const client = RemoteSettings("some-collection");
+ // Store records and related attachments directly in the cache.
+ await client.db.importChanges({}, 42, [], { clear: true });
+ await client.db.saveAttachment(RECORD.id, {
+ record: RECORD,
+ blob: new Blob(["123"]),
+ });
+
+ const recordAttachment = await client.attachments.cacheImpl.get(RECORD.id);
+ Assert.equal(
+ await recordAttachment.blob.text(),
+ "123",
+ "Record has a cached attachment"
+ );
+
+ await client.attachments.prune([RECORD.id]);
+
+ Assert.ok(
+ await client.attachments.cacheImpl.get(RECORD.id),
+ "Record attachment was kept"
+ );
+ }
+);
+add_task(clear_state);
diff --git a/services/settings/test/unit/test_attachments_downloader/65650a0f-7c22-4c10-9744-2d67e301f5f4.pem b/services/settings/test/unit/test_attachments_downloader/65650a0f-7c22-4c10-9744-2d67e301f5f4.pem
new file mode 100644
index 0000000000..502e8c9ce0
--- /dev/null
+++ b/services/settings/test/unit/test_attachments_downloader/65650a0f-7c22-4c10-9744-2d67e301f5f4.pem
@@ -0,0 +1,26 @@
+-----BEGIN CERTIFICATE-----
+MIIEbjCCA1agAwIBAgIQBg3WwdBnkBtUdfz/wp4xNzANBgkqhkiG9w0BAQsFADBa
+MQswCQYDVQQGEwJJRTESMBAGA1UEChMJQmFsdGltb3JlMRMwEQYDVQQLEwpDeWJl
+clRydXN0MSIwIAYDVQQDExlCYWx0aW1vcmUgQ3liZXJUcnVzdCBSb290MB4XDTE1
+MTAxNDEyMDAwMFoXDTIwMTAxNDEyMDAwMFowbzELMAkGA1UEBhMCVVMxCzAJBgNV
+BAgTAkNBMRYwFAYDVQQHEw1TYW4gRnJhbmNpc2NvMRkwFwYDVQQKExBDbG91ZEZs
+YXJlLCBJbmMuMSAwHgYDVQQDExdDbG91ZEZsYXJlIEluYyBSU0EgQ0EtMTCCASIw
+DQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAJGiNOIE4s0M4wdhDeV9aMfAYY9l
+yG9cfGQqt7a5UgrRA81bi4istCyhzfzRWUW+NAmf6X2HEnA3xLI1M+pH/xEbk9pw
+jc8/1CPy9jUjBwb89zt5PWh2I1KxZVg/Bnx2yYdVcKTUMKt0GLDXfZXN+RYZHJQo
+lDlzjH5xV0IpDMv/FsMEZWcfx1JorBf08bRnRVkl9RY00y2ujVr+492ze+zYQ9s7
+HcidpR+7ret3jzLSvojsaA5+fOaCG0ctVJcLfnkQ5lWR95ByBdO1NapfqZ1+kmCL
+3baVSeUpYQriBwznxfLuGs8POo4QdviYVtSPBWjOEfb+o1c6Mbo8p4noFzUCAwEA
+AaOCARkwggEVMBIGA1UdEwEB/wQIMAYBAf8CAQAwDgYDVR0PAQH/BAQDAgGGMDQG
+CCsGAQUFBwEBBCgwJjAkBggrBgEFBQcwAYYYaHR0cDovL29jc3AuZGlnaWNlcnQu
+Y29tMDoGA1UdHwQzMDEwL6AtoCuGKWh0dHA6Ly9jcmwzLmRpZ2ljZXJ0LmNvbS9P
+bW5pcm9vdDIwMjUuY3JsMD0GA1UdIAQ2MDQwMgYEVR0gADAqMCgGCCsGAQUFBwIB
+FhxodHRwczovL3d3dy5kaWdpY2VydC5jb20vQ1BTMB0GA1UdDgQWBBSRBYrfTCLG
+bYuUTBZFfu5vAvu3wDAfBgNVHSMEGDAWgBTlnVkwgkdYzKz6CFQ2hns6tQRN8DAN
+BgkqhkiG9w0BAQsFAAOCAQEAVJle3ar9NSnTrLAhgfkcpClIY6/kabDIEa8cOnu1
+SOXf4vbtZakSmmIbFbmYDUGIU5XwwVdF/FKNzNBRf9G4EL/S0NXytBKj4A34UGQA
+InaV+DgVLzCifN9cAHi8EFEAfbglUvPvLPFXF0bwffElYm7QBSiHYSZmfOKLCyiv
+3zlQsf7ozNBAxfbmnRMRSUBcIhRwnaFoFgDs7yU6R1Yk4pO7eMgWpdPGhymDTIvv
+RnauKStzKsAli9i5hQ4nTDITUpMAmeJoXodgwRkC3Civw32UR2rxObIyxPpbfODb
+sZKNGO9K5Sjj6turB1zwbd2wI8MhtUCY9tGmSYhe7G6Bkw==
+-----END CERTIFICATE----- \ No newline at end of file
diff --git a/services/settings/test/unit/test_attachments_downloader/settings/dump-bucket/dump-collection/filename-of-dump.txt b/services/settings/test/unit/test_attachments_downloader/settings/dump-bucket/dump-collection/filename-of-dump.txt
new file mode 100644
index 0000000000..77d7b4154f
--- /dev/null
+++ b/services/settings/test/unit/test_attachments_downloader/settings/dump-bucket/dump-collection/filename-of-dump.txt
@@ -0,0 +1 @@
+This would be a RS dump.
diff --git a/services/settings/test/unit/test_attachments_downloader/settings/dump-bucket/dump-collection/filename-of-dump.txt.meta.json b/services/settings/test/unit/test_attachments_downloader/settings/dump-bucket/dump-collection/filename-of-dump.txt.meta.json
new file mode 100644
index 0000000000..de7681940d
--- /dev/null
+++ b/services/settings/test/unit/test_attachments_downloader/settings/dump-bucket/dump-collection/filename-of-dump.txt.meta.json
@@ -0,0 +1,10 @@
+{
+ "id": "filename-of-dump.txt",
+ "attachment": {
+ "filename": "filename-of-dump.txt",
+ "hash": "4c46ef7e4f1951d210fe54c21e07c09bab265fd122580083ed1d6121547a8c6b",
+ "size": 25
+ },
+ "last_modified": 1234567,
+ "some_key": "some metadata"
+}
diff --git a/services/settings/test/unit/test_attachments_downloader/settings/dump-bucket/dump-collection/filename-without-content.txt.meta.json b/services/settings/test/unit/test_attachments_downloader/settings/dump-bucket/dump-collection/filename-without-content.txt.meta.json
new file mode 100644
index 0000000000..33fd28a710
--- /dev/null
+++ b/services/settings/test/unit/test_attachments_downloader/settings/dump-bucket/dump-collection/filename-without-content.txt.meta.json
@@ -0,0 +1,9 @@
+{
+ "fyi": "This .meta.json file describes an attachment, but that attachment is missing.",
+ "attachment": {
+ "filename": "filename-without-content.txt",
+ "hash": "...",
+ "size": "..."
+ }
+}
+
diff --git a/services/settings/test/unit/test_attachments_downloader/settings/dump-bucket/dump-collection/filename-without-meta.txt b/services/settings/test/unit/test_attachments_downloader/settings/dump-bucket/dump-collection/filename-without-meta.txt
new file mode 100644
index 0000000000..5fbfd11c9e
--- /dev/null
+++ b/services/settings/test/unit/test_attachments_downloader/settings/dump-bucket/dump-collection/filename-without-meta.txt
@@ -0,0 +1 @@
+The filename-without-meta.txt.meta.json file is missing.
diff --git a/services/settings/test/unit/test_remote_settings.js b/services/settings/test/unit/test_remote_settings.js
new file mode 100644
index 0000000000..3407244b38
--- /dev/null
+++ b/services/settings/test/unit/test_remote_settings.js
@@ -0,0 +1,1656 @@
+/* import-globals-from ../../../common/tests/unit/head_helpers.js */
+
+const { AppConstants } = ChromeUtils.importESModule(
+ "resource://gre/modules/AppConstants.sys.mjs"
+);
+const { ObjectUtils } = ChromeUtils.import(
+ "resource://gre/modules/ObjectUtils.jsm"
+);
+const { setTimeout } = ChromeUtils.importESModule(
+ "resource://gre/modules/Timer.sys.mjs"
+);
+
+const { RemoteSettings } = ChromeUtils.import(
+ "resource://services-settings/remote-settings.js"
+);
+const { Utils } = ChromeUtils.import("resource://services-settings/Utils.jsm");
+const { UptakeTelemetry, Policy } = ChromeUtils.import(
+ "resource://services-common/uptake-telemetry.js"
+);
+const { TelemetryTestUtils } = ChromeUtils.importESModule(
+ "resource://testing-common/TelemetryTestUtils.sys.mjs"
+);
+
+const IS_ANDROID = AppConstants.platform == "android";
+
+const TELEMETRY_COMPONENT = "remotesettings";
+const TELEMETRY_EVENTS_FILTERS = {
+ category: "uptake.remotecontent.result",
+ method: "uptake",
+};
+
+let server;
+let client;
+let clientWithDump;
+
+async function clear_state() {
+ // Reset preview mode.
+ RemoteSettings.enablePreviewMode(undefined);
+ Services.prefs.clearUserPref("services.settings.preview_enabled");
+
+ client.verifySignature = false;
+ clientWithDump.verifySignature = false;
+
+ // Clear local DB.
+ await client.db.clear();
+ // Reset event listeners.
+ client._listeners.set("sync", []);
+
+ await clientWithDump.db.clear();
+
+ // Clear events snapshot.
+ TelemetryTestUtils.assertEvents([], {}, { process: "dummy" });
+}
+
+function run_test() {
+ // Set up an HTTP Server
+ server = new HttpServer();
+ server.start(-1);
+
+ // Pretend we are in nightly channel to make sure all telemetry events are sent.
+ let oldGetChannel = Policy.getChannel;
+ Policy.getChannel = () => "nightly";
+
+ // Point the blocklist clients to use this local HTTP server.
+ Services.prefs.setCharPref(
+ "services.settings.server",
+ `http://localhost:${server.identity.primaryPort}/v1`
+ );
+
+ Services.prefs.setCharPref("services.settings.loglevel", "debug");
+
+ client = RemoteSettings("password-fields");
+ clientWithDump = RemoteSettings("language-dictionaries");
+
+ server.registerPathHandler("/v1/", handleResponse);
+ server.registerPathHandler(
+ "/v1/buckets/monitor/collections/changes/changeset",
+ handleResponse
+ );
+ server.registerPathHandler(
+ "/v1/buckets/main/collections/password-fields/changeset",
+ handleResponse
+ );
+ server.registerPathHandler(
+ "/v1/buckets/main/collections/language-dictionaries/changeset",
+ handleResponse
+ );
+ server.registerPathHandler(
+ "/v1/buckets/main/collections/with-local-fields/changeset",
+ handleResponse
+ );
+ server.registerPathHandler("/fake-x5u", handleResponse);
+
+ run_next_test();
+
+ registerCleanupFunction(() => {
+ Policy.getChannel = oldGetChannel;
+ server.stop(() => {});
+ });
+}
+add_task(clear_state);
+
+add_task(async function test_records_obtained_from_server_are_stored_in_db() {
+ // Test an empty db populates
+ await client.maybeSync(2000);
+
+ // Open the collection, verify it's been populated:
+ // Our test data has a single record; it should be in the local collection
+ const list = await client.get();
+ equal(list.length, 1);
+
+ const timestamp = await client.db.getLastModified();
+ equal(timestamp, 3000, "timestamp was stored");
+
+ const { signature } = await client.db.getMetadata();
+ equal(signature.signature, "abcdef", "metadata was stored");
+});
+add_task(clear_state);
+
+add_task(
+ async function test_records_from_dump_are_listed_as_created_in_event() {
+ if (IS_ANDROID) {
+ // Skip test: we don't ship remote settings dumps on Android (see package-manifest).
+ return;
+ }
+ let received;
+ clientWithDump.on("sync", ({ data }) => (received = data));
+ // Use a timestamp superior to latest record in dump.
+ const timestamp = 5000000000000; // Fri Jun 11 2128
+
+ await clientWithDump.maybeSync(timestamp);
+
+ const list = await clientWithDump.get();
+ ok(list.length > 20, `The dump was loaded (${list.length} records)`);
+ equal(received.created[0].id, "xx", "Record from the sync come first.");
+
+ const createdById = received.created.reduce((acc, r) => {
+ acc[r.id] = r;
+ return acc;
+ }, {});
+
+ ok(
+ !(received.deleted[0].id in createdById),
+ "Deleted records are not listed as created"
+ );
+ equal(
+ createdById[received.updated[0].new.id],
+ received.updated[0].new,
+ "The records that were updated should appear as created in their newest form."
+ );
+
+ equal(
+ received.created.length,
+ list.length,
+ "The list of created records contains the dump"
+ );
+ equal(received.current.length, received.created.length);
+ }
+);
+add_task(clear_state);
+
+add_task(async function test_throws_when_network_is_offline() {
+ const backupOffline = Services.io.offline;
+ try {
+ Services.io.offline = true;
+ const startSnapshot = getUptakeTelemetrySnapshot(
+ TELEMETRY_COMPONENT,
+ clientWithDump.identifier
+ );
+ let error;
+ try {
+ await clientWithDump.maybeSync(2000);
+ } catch (e) {
+ error = e;
+ }
+ equal(error.name, "NetworkOfflineError");
+
+ const endSnapshot = getUptakeTelemetrySnapshot(
+ TELEMETRY_COMPONENT,
+ clientWithDump.identifier
+ );
+ const expectedIncrements = {
+ [UptakeTelemetry.STATUS.NETWORK_OFFLINE_ERROR]: 1,
+ };
+ checkUptakeTelemetry(startSnapshot, endSnapshot, expectedIncrements);
+ } finally {
+ Services.io.offline = backupOffline;
+ }
+});
+add_task(clear_state);
+
+add_task(async function test_sync_event_is_sent_even_if_up_to_date() {
+ if (IS_ANDROID) {
+ // Skip test: we don't ship remote settings dumps on Android (see package-manifest).
+ return;
+ }
+ // First, determine what is the dump timestamp. Sync will load it.
+ // Use a timestamp inferior to latest record in dump.
+ await clientWithDump._importJSONDump();
+ const uptodateTimestamp = await clientWithDump.db.getLastModified();
+ await clear_state();
+
+ // Now, simulate that server data wasn't changed since dump was released.
+ const startSnapshot = getUptakeTelemetrySnapshot(
+ TELEMETRY_COMPONENT,
+ clientWithDump.identifier
+ );
+ let received;
+ clientWithDump.on("sync", ({ data }) => (received = data));
+
+ await clientWithDump.maybeSync(uptodateTimestamp);
+
+ ok(!!received.current.length, "Dump records are listed as created");
+ equal(received.current.length, received.created.length);
+
+ const endSnapshot = getUptakeTelemetrySnapshot(
+ TELEMETRY_COMPONENT,
+ clientWithDump.identifier
+ );
+ const expectedIncrements = { [UptakeTelemetry.STATUS.UP_TO_DATE]: 1 };
+ checkUptakeTelemetry(startSnapshot, endSnapshot, expectedIncrements);
+});
+add_task(clear_state);
+
+add_task(async function test_records_can_have_local_fields() {
+ const c = RemoteSettings("with-local-fields", { localFields: ["accepted"] });
+ c.verifySignature = false;
+
+ await c.maybeSync(2000);
+
+ await c.db.update({
+ id: "c74279ce-fb0a-42a6-ae11-386b567a6119",
+ accepted: true,
+ });
+ await c.maybeSync(3000); // Does not fail.
+});
+add_task(clear_state);
+
+add_task(
+ async function test_records_changes_are_overwritten_by_server_changes() {
+ // Create some local conflicting data, and make sure it syncs without error.
+ await client.db.create({
+ website: "",
+ id: "9d500963-d80e-3a91-6e74-66f3811b99cc",
+ });
+
+ await client.maybeSync(2000);
+
+ const data = await client.get();
+ equal(data[0].website, "https://some-website.com");
+ }
+);
+add_task(clear_state);
+
+add_task(
+ async function test_get_loads_default_records_from_a_local_dump_when_database_is_empty() {
+ if (IS_ANDROID) {
+ // Skip test: we don't ship remote settings dumps on Android (see package-manifest).
+ return;
+ }
+
+ // When collection has a dump in services/settings/dumps/{bucket}/{collection}.json
+ const data = await clientWithDump.get();
+ notEqual(data.length, 0);
+ // No synchronization happened (responses are not mocked).
+ }
+);
+add_task(clear_state);
+
+add_task(async function test_get_loads_dump_only_once_if_called_in_parallel() {
+ const backup = clientWithDump._importJSONDump;
+ let callCount = 0;
+ clientWithDump._importJSONDump = async () => {
+ callCount++;
+ // eslint-disable-next-line mozilla/no-arbitrary-setTimeout
+ await new Promise(resolve => setTimeout(resolve, 100));
+ return 42;
+ };
+ await Promise.all([clientWithDump.get(), clientWithDump.get()]);
+ equal(callCount, 1, "JSON dump was called more than once");
+ clientWithDump._importJSONDump = backup;
+});
+add_task(clear_state);
+
+add_task(async function test_get_falls_back_to_dump_if_db_fails() {
+ if (IS_ANDROID) {
+ // Skip test: we don't ship remote settings dumps on Android (see package-manifest).
+ return;
+ }
+ const backup = clientWithDump.db.getLastModified;
+ clientWithDump.db.getLastModified = () => {
+ throw new Error("Unknown error");
+ };
+
+ const records = await clientWithDump.get({ dumpFallback: true });
+ ok(!!records.length, "dump content is returned");
+
+ // If fallback is disabled, error is thrown.
+ let error;
+ try {
+ await clientWithDump.get({ dumpFallback: false });
+ } catch (e) {
+ error = e;
+ }
+ equal(error.message, "Unknown error");
+
+ clientWithDump.db.getLastModified = backup;
+});
+add_task(clear_state);
+
+add_task(async function test_get_sorts_results_if_specified() {
+ await client.db.importChanges(
+ {},
+ 42,
+ [
+ {
+ field: 12,
+ id: "9d500963-d80e-3a91-6e74-66f3811b99cc",
+ },
+ {
+ field: 7,
+ id: "d83444a4-f348-4cd8-8228-842cb927db9f",
+ },
+ ],
+ { clear: true }
+ );
+
+ const records = await client.get({ order: "field" });
+ ok(
+ records[0].field < records[records.length - 1].field,
+ "records are sorted"
+ );
+});
+add_task(clear_state);
+
+add_task(async function test_get_falls_back_sorts_results() {
+ if (IS_ANDROID) {
+ // Skip test: we don't ship remote settings dumps on Android (see package-manifest).
+ return;
+ }
+ const backup = clientWithDump.db.getLastModified;
+ clientWithDump.db.getLastModified = () => {
+ throw new Error("Unknown error");
+ };
+
+ const records = await clientWithDump.get({
+ dumpFallback: true,
+ order: "-id",
+ });
+
+ ok(records[0].id > records[records.length - 1].id, "records are sorted");
+
+ clientWithDump.db.getLastModified = backup;
+});
+add_task(clear_state);
+
+add_task(async function test_get_falls_back_to_dump_if_db_fails_later() {
+ if (IS_ANDROID) {
+ // Skip test: we don't ship remote settings dumps on Android (see package-manifest).
+ return;
+ }
+ const backup = clientWithDump.db.list;
+ clientWithDump.db.list = () => {
+ throw new Error("Unknown error");
+ };
+
+ const records = await clientWithDump.get({ dumpFallback: true });
+ ok(!!records.length, "dump content is returned");
+
+ // If fallback is disabled, error is thrown.
+ let error;
+ try {
+ await clientWithDump.get({ dumpFallback: false });
+ } catch (e) {
+ error = e;
+ }
+ equal(error.message, "Unknown error");
+
+ clientWithDump.db.list = backup;
+});
+add_task(clear_state);
+
+add_task(async function test_get_falls_back_to_dump_if_network_fails() {
+ if (IS_ANDROID) {
+ // Skip test: we don't ship remote settings dumps on Android (see package-manifest).
+ return;
+ }
+ const backup = clientWithDump.sync;
+ clientWithDump.sync = () => {
+ throw new Error("Sync error");
+ };
+
+ const records = await clientWithDump.get();
+ ok(!!records.length, "dump content is returned");
+
+ clientWithDump.sync = backup;
+});
+add_task(clear_state);
+
+add_task(async function test_get_does_not_sync_if_empty_dump_is_provided() {
+ if (IS_ANDROID) {
+ // Skip test: we don't ship remote settings dumps on Android (see package-manifest).
+ return;
+ }
+
+ const clientWithEmptyDump = RemoteSettings("example");
+ Assert.ok(!(await Utils.hasLocalData(clientWithEmptyDump)));
+
+ const data = await clientWithEmptyDump.get();
+
+ equal(data.length, 0);
+ Assert.ok(await Utils.hasLocalData(clientWithEmptyDump));
+});
+add_task(clear_state);
+
+add_task(async function test_get_synchronization_can_be_disabled() {
+ const data = await client.get({ syncIfEmpty: false });
+
+ equal(data.length, 0);
+});
+add_task(clear_state);
+
+add_task(
+ async function test_get_triggers_synchronization_when_database_is_empty() {
+ // The "password-fields" collection has no local dump, and no local data.
+ // Therefore a synchronization will happen.
+ const data = await client.get();
+
+ // Data comes from mocked HTTP response (see below).
+ equal(data.length, 1);
+ equal(data[0].selector, "#webpage[field-pwd]");
+ }
+);
+add_task(clear_state);
+
+add_task(async function test_get_ignores_synchronization_errors_by_default() {
+ // The monitor endpoint won't contain any information about this collection.
+ let data = await RemoteSettings("some-unknown-key").get();
+ equal(data.length, 0);
+ // The sync endpoints are not mocked, this fails internally.
+ data = await RemoteSettings("no-mocked-responses").get();
+ equal(data.length, 0);
+});
+add_task(clear_state);
+
+add_task(async function test_get_throws_if_no_empty_fallback() {
+ // The monitor endpoint won't contain any information about this collection.
+ try {
+ await RemoteSettings("some-unknown-key").get({
+ emptyListFallback: false,
+ });
+ Assert.ok(false, ".get() should throw");
+ } catch (error) {
+ Assert.ok(
+ error.message.includes("Response from server unparseable"),
+ "Server error was thrown"
+ );
+ }
+});
+add_task(clear_state);
+
+add_task(async function test_get_verify_signature_no_sync() {
+ // No signature in metadata, and no sync if empty.
+ let error;
+ try {
+ await client.get({ verifySignature: true, syncIfEmpty: false });
+ } catch (e) {
+ error = e;
+ }
+ equal(error.message, "Missing signature (main/password-fields)");
+});
+add_task(clear_state);
+
+add_task(async function test_get_can_verify_signature_pulled() {
+ // Populate the local DB (only records, eg. loaded from dump previously)
+ await client._importJSONDump();
+
+ let calledSignature;
+ client._verifier = {
+ async asyncVerifyContentSignature(serialized, signature) {
+ calledSignature = signature;
+ return true;
+ },
+ };
+ client.verifySignature = true;
+
+ // No metadata in local DB, but gets pulled and then verifies.
+ ok(ObjectUtils.isEmpty(await client.db.getMetadata()), "Metadata is empty");
+
+ await client.get({ verifySignature: true });
+
+ ok(
+ !ObjectUtils.isEmpty(await client.db.getMetadata()),
+ "Metadata was pulled"
+ );
+ ok(calledSignature.endsWith("some-sig"), "Signature was verified");
+});
+add_task(clear_state);
+
+add_task(async function test_get_can_verify_signature() {
+ // Populate the local DB (record and metadata)
+ await client.maybeSync(2000);
+
+ // It validates signature that was stored in local DB.
+ let calledSignature;
+ client._verifier = {
+ async asyncVerifyContentSignature(serialized, signature) {
+ calledSignature = signature;
+ return JSON.parse(serialized).data.length == 1;
+ },
+ };
+ ok(await Utils.hasLocalData(client), "Local data was populated");
+ await client.get({ verifySignature: true });
+
+ ok(calledSignature.endsWith("abcdef"), "Signature was verified");
+
+ // It throws when signature does not verify.
+ await client.db.delete("9d500963-d80e-3a91-6e74-66f3811b99cc");
+ let error = null;
+ try {
+ await client.get({ verifySignature: true });
+ } catch (e) {
+ error = e;
+ }
+ equal(
+ error.message,
+ "Invalid content signature (main/password-fields) using 'fake-x5u'"
+ );
+});
+add_task(clear_state);
+
+add_task(async function test_get_does_not_verify_signature_if_load_dump() {
+ if (IS_ANDROID) {
+ // Skip test: we don't ship remote settings dumps on Android (see package-manifest).
+ return;
+ }
+
+ let called;
+ clientWithDump._verifier = {
+ async asyncVerifyContentSignature(serialized, signature) {
+ called = true;
+ return true;
+ },
+ };
+
+ // When dump is loaded, signature is not verified.
+ const records = await clientWithDump.get({ verifySignature: true });
+ ok(!!records.length, "dump is loaded");
+ ok(!called, "signature is missing but not verified");
+
+ // If metadata is missing locally, it is not fetched if `syncIfEmpty` is disabled.
+ let error;
+ try {
+ await clientWithDump.get({ verifySignature: true, syncIfEmpty: false });
+ } catch (e) {
+ error = e;
+ }
+ ok(!called, "signer was not called");
+ equal(
+ error.message,
+ "Missing signature (main/language-dictionaries)",
+ "signature is missing locally"
+ );
+
+ // If metadata is missing locally, it is fetched by default (`syncIfEmpty: true`)
+ await clientWithDump.get({ verifySignature: true });
+ const metadata = await clientWithDump.db.getMetadata();
+ ok(!!Object.keys(metadata).length, "metadata was fetched");
+ ok(called, "signature was verified for the data that was in dump");
+});
+add_task(clear_state);
+
+add_task(
+ async function test_get_does_verify_signature_if_json_loaded_in_parallel() {
+ const backup = clientWithDump._verifier;
+ let callCount = 0;
+ clientWithDump._verifier = {
+ async asyncVerifyContentSignature(serialized, signature) {
+ callCount++;
+ return true;
+ },
+ };
+ await Promise.all([
+ clientWithDump.get({ verifySignature: true }),
+ clientWithDump.get({ verifySignature: true }),
+ ]);
+ equal(callCount, 0, "No need to verify signatures if JSON dump is loaded");
+ clientWithDump._verifier = backup;
+ }
+);
+add_task(clear_state);
+
+add_task(async function test_get_can_force_a_sync() {
+ const step0 = await client.db.getLastModified();
+ await client.get({ forceSync: true });
+ const step1 = await client.db.getLastModified();
+ await client.get();
+ const step2 = await client.db.getLastModified();
+ await client.get({ forceSync: true });
+ const step3 = await client.db.getLastModified();
+
+ equal(step0, null);
+ equal(step1, 3000);
+ equal(step2, 3000);
+ equal(step3, 3001);
+});
+add_task(clear_state);
+
+add_task(async function test_sync_runs_once_only() {
+ const backup = Utils.log.warn;
+ const messages = [];
+ Utils.log.warn = m => {
+ messages.push(m);
+ };
+
+ await Promise.all([client.maybeSync(2000), client.maybeSync(2000)]);
+
+ ok(
+ messages.includes("main/password-fields sync already running"),
+ "warning is shown about sync already running"
+ );
+ Utils.log.warn = backup;
+});
+add_task(clear_state);
+
+add_task(
+ async function test_sync_pulls_metadata_if_missing_with_dump_is_up_to_date() {
+ if (IS_ANDROID) {
+ // Skip test: we don't ship remote settings dumps on Android (see package-manifest).
+ return;
+ }
+
+ let called;
+ clientWithDump._verifier = {
+ async asyncVerifyContentSignature(serialized, signature) {
+ called = true;
+ return true;
+ },
+ };
+ // When dump is loaded, signature is not verified.
+ const records = await clientWithDump.get({ verifySignature: true });
+ ok(!!records.length, "dump is loaded");
+ ok(!called, "signature is missing but not verified");
+
+ // Synchronize the collection (local data is up-to-date).
+ // Signature verification is disabled (see `clear_state()`), so we don't bother with
+ // fetching metadata.
+ const uptodateTimestamp = await clientWithDump.db.getLastModified();
+ await clientWithDump.maybeSync(uptodateTimestamp);
+ let metadata = await clientWithDump.db.getMetadata();
+ ok(!metadata, "metadata was not fetched");
+
+ // Synchronize again the collection (up-to-date, since collection last modified still > 42)
+ clientWithDump.verifySignature = true;
+ await clientWithDump.maybeSync(42);
+
+ // With signature verification, metadata was fetched.
+ metadata = await clientWithDump.db.getMetadata();
+ ok(!!Object.keys(metadata).length, "metadata was fetched");
+ ok(called, "signature was verified for the data that was in dump");
+
+ // Metadata is present, signature will now verified.
+ called = false;
+ await clientWithDump.get({ verifySignature: true });
+ ok(called, "local signature is verified");
+ }
+);
+add_task(clear_state);
+
+add_task(async function test_sync_event_provides_information_about_records() {
+ let eventData;
+ client.on("sync", ({ data }) => (eventData = data));
+
+ await client.maybeSync(2000);
+ equal(eventData.current.length, 1);
+
+ await client.maybeSync(3001);
+ equal(eventData.current.length, 2);
+ equal(eventData.created.length, 1);
+ equal(eventData.created[0].website, "https://www.other.org/signin");
+ equal(eventData.updated.length, 1);
+ equal(eventData.updated[0].old.website, "https://some-website.com");
+ equal(eventData.updated[0].new.website, "https://some-website.com/login");
+ equal(eventData.deleted.length, 0);
+
+ await client.maybeSync(4001);
+ equal(eventData.current.length, 1);
+ equal(eventData.created.length, 0);
+ equal(eventData.updated.length, 0);
+ equal(eventData.deleted.length, 1);
+ equal(eventData.deleted[0].website, "https://www.other.org/signin");
+});
+add_task(clear_state);
+
+add_task(async function test_inspect_method() {
+ // Synchronize the `password-fields` collection in order to have
+ // some local data when .inspect() is called.
+ await client.maybeSync(2000);
+
+ const inspected = await RemoteSettings.inspect();
+
+ // Assertion for global attributes.
+ const {
+ mainBucket,
+ serverURL,
+ defaultSigner,
+ collections,
+ serverTimestamp,
+ } = inspected;
+ const rsSigner = "remote-settings.content-signature.mozilla.org";
+ equal(mainBucket, "main");
+ equal(serverURL, `http://localhost:${server.identity.primaryPort}/v1`);
+ equal(defaultSigner, rsSigner);
+ equal(serverTimestamp, '"5000"');
+
+ // A collection is listed in .inspect() if it has local data or if there
+ // is a JSON dump for it.
+ // "password-fields" has no dump but was synchronized above and thus has local data.
+ let col = collections.pop();
+ equal(col.collection, "password-fields");
+ equal(col.serverTimestamp, 3000);
+ equal(col.localTimestamp, 3000);
+
+ if (!IS_ANDROID) {
+ // "language-dictionaries" has a local dump (not on Android)
+ col = collections.pop();
+ equal(col.collection, "language-dictionaries");
+ equal(col.serverTimestamp, 4000);
+ ok(!col.localTimestamp); // not synchronized.
+ }
+});
+add_task(clear_state);
+
+add_task(async function test_clearAll_method() {
+ // Make sure we have some local data.
+ await client.maybeSync(2000);
+ await clientWithDump.maybeSync(2000);
+
+ await RemoteSettings.clearAll();
+
+ ok(!(await Utils.hasLocalData(client)), "Local data was deleted");
+ ok(!(await Utils.hasLocalData(clientWithDump)), "Local data was deleted");
+ ok(
+ !Services.prefs.prefHasUserValue(client.lastCheckTimePref),
+ "Pref was cleaned"
+ );
+
+ // Synchronization is not broken after resuming.
+ await client.maybeSync(2000);
+ await clientWithDump.maybeSync(2000);
+ ok(await Utils.hasLocalData(client), "Local data was populated");
+ ok(await Utils.hasLocalData(clientWithDump), "Local data was populated");
+});
+add_task(clear_state);
+
+add_task(async function test_listeners_are_not_deduplicated() {
+ let count = 0;
+ const plus1 = () => {
+ count += 1;
+ };
+
+ client.on("sync", plus1);
+ client.on("sync", plus1);
+ client.on("sync", plus1);
+
+ await client.maybeSync(2000);
+
+ equal(count, 3);
+});
+add_task(clear_state);
+
+add_task(async function test_listeners_can_be_removed() {
+ let count = 0;
+ const onSync = () => {
+ count += 1;
+ };
+
+ client.on("sync", onSync);
+ client.off("sync", onSync);
+
+ await client.maybeSync(2000);
+
+ equal(count, 0);
+});
+add_task(clear_state);
+
+add_task(async function test_all_listeners_are_executed_if_one_fails() {
+ let count = 0;
+ client.on("sync", () => {
+ count += 1;
+ });
+ client.on("sync", () => {
+ throw new Error("boom");
+ });
+ client.on("sync", () => {
+ count += 2;
+ });
+
+ let error;
+ try {
+ await client.maybeSync(2000);
+ } catch (e) {
+ error = e;
+ }
+
+ equal(count, 3);
+ equal(error.message, "boom");
+});
+add_task(clear_state);
+
+add_task(async function test_telemetry_reports_up_to_date() {
+ await client.maybeSync(2000);
+ const startSnapshot = getUptakeTelemetrySnapshot(
+ TELEMETRY_COMPONENT,
+ client.identifier
+ );
+
+ await client.maybeSync(3000);
+
+ // No Telemetry was sent.
+ const endSnapshot = getUptakeTelemetrySnapshot(
+ TELEMETRY_COMPONENT,
+ client.identifier
+ );
+ const expectedIncrements = { [UptakeTelemetry.STATUS.UP_TO_DATE]: 1 };
+ checkUptakeTelemetry(startSnapshot, endSnapshot, expectedIncrements);
+});
+add_task(clear_state);
+
+add_task(async function test_telemetry_if_sync_succeeds() {
+ // We test each client because Telemetry requires preleminary declarations.
+ const startSnapshot = getUptakeTelemetrySnapshot(
+ TELEMETRY_COMPONENT,
+ client.identifier
+ );
+
+ await client.maybeSync(2000);
+
+ const endSnapshot = getUptakeTelemetrySnapshot(
+ TELEMETRY_COMPONENT,
+ client.identifier
+ );
+ const expectedIncrements = { [UptakeTelemetry.STATUS.SUCCESS]: 1 };
+ checkUptakeTelemetry(startSnapshot, endSnapshot, expectedIncrements);
+});
+add_task(clear_state);
+
+add_task(
+ async function test_synchronization_duration_is_reported_in_uptake_status() {
+ await client.maybeSync(2000);
+
+ TelemetryTestUtils.assertEvents(
+ [
+ [
+ "uptake.remotecontent.result",
+ "uptake",
+ "remotesettings",
+ UptakeTelemetry.STATUS.SUCCESS,
+ {
+ source: client.identifier,
+ duration: v => v > 0,
+ trigger: "manual",
+ },
+ ],
+ ],
+ TELEMETRY_EVENTS_FILTERS
+ );
+ }
+);
+add_task(clear_state);
+
+add_task(async function test_telemetry_reports_if_application_fails() {
+ const startSnapshot = getUptakeTelemetrySnapshot(
+ TELEMETRY_COMPONENT,
+ client.identifier
+ );
+ client.on("sync", () => {
+ throw new Error("boom");
+ });
+
+ try {
+ await client.maybeSync(2000);
+ } catch (e) {}
+
+ const endSnapshot = getUptakeTelemetrySnapshot(
+ TELEMETRY_COMPONENT,
+ client.identifier
+ );
+ const expectedIncrements = { [UptakeTelemetry.STATUS.APPLY_ERROR]: 1 };
+ checkUptakeTelemetry(startSnapshot, endSnapshot, expectedIncrements);
+});
+add_task(clear_state);
+
+add_task(async function test_telemetry_reports_if_sync_fails() {
+ await client.db.importChanges({}, 9999);
+
+ const startSnapshot = getUptakeTelemetrySnapshot(
+ TELEMETRY_COMPONENT,
+ client.identifier
+ );
+
+ try {
+ await client.maybeSync(10000);
+ } catch (e) {}
+
+ const endSnapshot = getUptakeTelemetrySnapshot(
+ TELEMETRY_COMPONENT,
+ client.identifier
+ );
+ const expectedIncrements = { [UptakeTelemetry.STATUS.SERVER_ERROR]: 1 };
+ checkUptakeTelemetry(startSnapshot, endSnapshot, expectedIncrements);
+});
+add_task(clear_state);
+
+add_task(async function test_telemetry_reports_if_parsing_fails() {
+ await client.db.importChanges({}, 10000);
+
+ const startSnapshot = getUptakeTelemetrySnapshot(
+ TELEMETRY_COMPONENT,
+ client.identifier
+ );
+
+ try {
+ await client.maybeSync(10001);
+ } catch (e) {}
+
+ const endSnapshot = getUptakeTelemetrySnapshot(
+ TELEMETRY_COMPONENT,
+ client.identifier
+ );
+ const expectedIncrements = { [UptakeTelemetry.STATUS.PARSE_ERROR]: 1 };
+ checkUptakeTelemetry(startSnapshot, endSnapshot, expectedIncrements);
+});
+add_task(clear_state);
+
+add_task(async function test_telemetry_reports_if_fetching_signature_fails() {
+ await client.db.importChanges({}, 11000);
+
+ const startSnapshot = getUptakeTelemetrySnapshot(
+ TELEMETRY_COMPONENT,
+ client.identifier
+ );
+
+ try {
+ await client.maybeSync(11001);
+ } catch (e) {}
+
+ const endSnapshot = getUptakeTelemetrySnapshot(
+ TELEMETRY_COMPONENT,
+ client.identifier
+ );
+ const expectedIncrements = { [UptakeTelemetry.STATUS.SERVER_ERROR]: 1 };
+ checkUptakeTelemetry(startSnapshot, endSnapshot, expectedIncrements);
+});
+add_task(clear_state);
+
+add_task(async function test_telemetry_reports_unknown_errors() {
+ const backup = client.db.list;
+ client.db.list = () => {
+ throw new Error("Internal");
+ };
+ const startSnapshot = getUptakeTelemetrySnapshot(
+ TELEMETRY_COMPONENT,
+ client.identifier
+ );
+
+ try {
+ await client.maybeSync(2000);
+ } catch (e) {}
+
+ client.db.list = backup;
+ const endSnapshot = getUptakeTelemetrySnapshot(
+ TELEMETRY_COMPONENT,
+ client.identifier
+ );
+ const expectedIncrements = { [UptakeTelemetry.STATUS.UNKNOWN_ERROR]: 1 };
+ checkUptakeTelemetry(startSnapshot, endSnapshot, expectedIncrements);
+});
+add_task(clear_state);
+
+add_task(async function test_telemetry_reports_indexeddb_as_custom_1() {
+ const backup = client.db.getLastModified;
+ const msg =
+ "IndexedDB getLastModified() The operation failed for reasons unrelated to the database itself";
+ client.db.getLastModified = () => {
+ throw new Error(msg);
+ };
+ const startSnapshot = getUptakeTelemetrySnapshot(
+ TELEMETRY_COMPONENT,
+ client.identifier
+ );
+
+ try {
+ await client.maybeSync(2000);
+ } catch (e) {}
+
+ client.db.getLastModified = backup;
+ const endSnapshot = getUptakeTelemetrySnapshot(
+ TELEMETRY_COMPONENT,
+ client.identifier
+ );
+ const expectedIncrements = { [UptakeTelemetry.STATUS.CUSTOM_1_ERROR]: 1 };
+ checkUptakeTelemetry(startSnapshot, endSnapshot, expectedIncrements);
+});
+add_task(clear_state);
+
+add_task(async function test_telemetry_reports_error_name_as_event_nightly() {
+ const backup = client.db.list;
+ client.db.list = () => {
+ const e = new Error("Some unknown error");
+ e.name = "ThrownError";
+ throw e;
+ };
+
+ try {
+ await client.maybeSync(2000);
+ } catch (e) {}
+
+ TelemetryTestUtils.assertEvents(
+ [
+ [
+ "uptake.remotecontent.result",
+ "uptake",
+ "remotesettings",
+ UptakeTelemetry.STATUS.UNKNOWN_ERROR,
+ {
+ source: client.identifier,
+ trigger: "manual",
+ duration: v => v >= 0,
+ errorName: "ThrownError",
+ },
+ ],
+ ],
+ TELEMETRY_EVENTS_FILTERS
+ );
+
+ client.db.list = backup;
+});
+add_task(clear_state);
+
+add_task(async function test_bucketname_changes_when_preview_mode_is_enabled() {
+ equal(client.bucketName, "main");
+
+ RemoteSettings.enablePreviewMode(true);
+
+ equal(client.bucketName, "main-preview");
+});
+add_task(clear_state);
+
+add_task(
+ async function test_preview_mode_pref_affects_bucket_names_before_instantiated() {
+ Services.prefs.setBoolPref("services.settings.preview_enabled", true);
+
+ let clientWithDefaultBucket = RemoteSettings("other");
+ let clientWithBucket = RemoteSettings("coll", { bucketName: "buck" });
+
+ equal(clientWithDefaultBucket.bucketName, "main-preview");
+ equal(clientWithBucket.bucketName, "buck-preview");
+ }
+);
+add_task(clear_state);
+
+add_task(
+ async function test_preview_enabled_pref_ignored_when_mode_is_set_explicitly() {
+ Services.prefs.setBoolPref("services.settings.preview_enabled", true);
+
+ let clientWithDefaultBucket = RemoteSettings("other");
+ let clientWithBucket = RemoteSettings("coll", { bucketName: "buck" });
+
+ equal(clientWithDefaultBucket.bucketName, "main-preview");
+ equal(clientWithBucket.bucketName, "buck-preview");
+
+ RemoteSettings.enablePreviewMode(false);
+
+ equal(clientWithDefaultBucket.bucketName, "main");
+ equal(clientWithBucket.bucketName, "buck");
+ }
+);
+add_task(clear_state);
+
+add_task(
+ async function test_get_loads_default_records_from_a_local_dump_when_preview_mode_is_enabled() {
+ if (IS_ANDROID) {
+ // Skip test: we don't ship remote settings dumps on Android (see package-manifest).
+ return;
+ }
+ RemoteSettings.enablePreviewMode(true);
+ // When collection has a dump in services/settings/dumps/{bucket}/{collection}.json
+ const data = await clientWithDump.get();
+ notEqual(data.length, 0);
+ // No synchronization happened (responses are not mocked).
+ }
+);
+add_task(clear_state);
+
+add_task(async function test_local_db_distinguishes_preview_records() {
+ RemoteSettings.enablePreviewMode(true);
+ client.db.importChanges({}, Date.now(), [{ id: "record-1" }], {
+ clear: true,
+ });
+
+ RemoteSettings.enablePreviewMode(false);
+ client.db.importChanges({}, Date.now(), [{ id: "record-2" }], {
+ clear: true,
+ });
+
+ deepEqual(await client.get(), [{ id: "record-2" }]);
+});
+add_task(clear_state);
+
+add_task(
+ async function test_inspect_changes_the_list_when_preview_mode_is_enabled() {
+ if (IS_ANDROID) {
+ // Skip test: we don't ship remote settings dumps on Android (see package-manifest),
+ // and this test relies on the fact that clients are instantiated if a dump is packaged.
+ return;
+ }
+
+ // Register a client only listed in -preview...
+ RemoteSettings("crash-rate");
+
+ const {
+ collections: before,
+ previewMode: previewModeBefore,
+ } = await RemoteSettings.inspect();
+
+ Assert.ok(!previewModeBefore, "preview is not enabled");
+
+ // These two collections are listed in the main bucket in monitor/changes (one with dump, one registered).
+ deepEqual(before.map(c => c.collection).sort(), [
+ "language-dictionaries",
+ "password-fields",
+ ]);
+
+ // Switch to preview mode.
+ RemoteSettings.enablePreviewMode(true);
+
+ const {
+ collections: after,
+ mainBucket,
+ previewMode,
+ } = await RemoteSettings.inspect();
+
+ Assert.ok(previewMode, "preview is enabled");
+
+ // These two collections are listed in the main bucket in monitor/changes (both are registered).
+ deepEqual(after.map(c => c.collection).sort(), [
+ "crash-rate",
+ "password-fields",
+ ]);
+ equal(mainBucket, "main-preview");
+ }
+);
+add_task(clear_state);
+
+add_task(async function test_sync_event_is_not_sent_from_get_when_no_dump() {
+ let called = false;
+ client.on("sync", e => {
+ called = true;
+ });
+
+ await client.get();
+
+ Assert.ok(!called, "sync event is not sent from .get()");
+});
+add_task(clear_state);
+
+add_task(async function test_get_can_be_called_from_sync_event_callback() {
+ let fromGet;
+ let fromEvent;
+
+ client.on("sync", async ({ data: { current } }) => {
+ // Before fixing Bug 1761953 this would result in a deadlock.
+ fromGet = await client.get();
+ fromEvent = current;
+ });
+
+ await client.maybeSync(2000);
+
+ Assert.ok(fromGet, "sync callback was called");
+ Assert.deepEqual(fromGet, fromEvent, ".get() gives current records list");
+});
+add_task(clear_state);
+
+add_task(async function test_attachments_are_pruned_when_sync_from_timer() {
+ await client.db.saveAttachment("bar", {
+ record: { id: "bar" },
+ blob: new Blob(["456"]),
+ });
+
+ await client.maybeSync(2000, { trigger: "broadcast" });
+
+ Assert.ok(
+ await client.attachments.cacheImpl.get("bar"),
+ "Extra attachment was not deleted on broadcast"
+ );
+
+ await client.maybeSync(3001, { trigger: "timer" });
+
+ Assert.ok(
+ !(await client.attachments.cacheImpl.get("bar")),
+ "Extra attachment was deleted on timer"
+ );
+});
+add_task(clear_state);
+
+function handleResponse(request, response) {
+ try {
+ const sample = getSampleResponse(request, server.identity.primaryPort);
+ if (!sample) {
+ do_throw(
+ `unexpected ${request.method} request for ${request.path}?${request.queryString}`
+ );
+ }
+
+ response.setStatusLine(
+ null,
+ sample.status.status,
+ sample.status.statusText
+ );
+ // send the headers
+ for (let headerLine of sample.sampleHeaders) {
+ let headerElements = headerLine.split(":");
+ response.setHeader(headerElements[0], headerElements[1].trimLeft());
+ }
+ response.setHeader("Date", new Date().toUTCString());
+
+ const body =
+ typeof sample.responseBody == "string"
+ ? sample.responseBody
+ : JSON.stringify(sample.responseBody);
+ response.write(body);
+ response.finish();
+ } catch (e) {
+ info(e);
+ }
+}
+
+function getSampleResponse(req, port) {
+ const responses = {
+ OPTIONS: {
+ sampleHeaders: [
+ "Access-Control-Allow-Headers: Content-Length,Expires,Backoff,Retry-After,Last-Modified,Total-Records,ETag,Pragma,Cache-Control,authorization,content-type,if-none-match,Alert,Next-Page",
+ "Access-Control-Allow-Methods: GET,HEAD,OPTIONS,POST,DELETE,OPTIONS",
+ "Access-Control-Allow-Origin: *",
+ "Content-Type: application/json; charset=UTF-8",
+ "Server: waitress",
+ ],
+ status: { status: 200, statusText: "OK" },
+ responseBody: null,
+ },
+ "GET:/v1/": {
+ sampleHeaders: [
+ "Access-Control-Allow-Origin: *",
+ "Access-Control-Expose-Headers: Retry-After, Content-Length, Alert, Backoff",
+ "Content-Type: application/json; charset=UTF-8",
+ "Server: waitress",
+ ],
+ status: { status: 200, statusText: "OK" },
+ responseBody: {
+ settings: {
+ batch_max_requests: 25,
+ },
+ url: `http://localhost:${port}/v1/`,
+ documentation: "https://kinto.readthedocs.org/",
+ version: "1.5.1",
+ commit: "cbc6f58",
+ hello: "kinto",
+ },
+ },
+ "GET:/v1/buckets/monitor/collections/changes/changeset": {
+ sampleHeaders: [
+ "Access-Control-Allow-Origin: *",
+ "Access-Control-Expose-Headers: Retry-After, Content-Length, Alert, Backoff",
+ "Content-Type: application/json; charset=UTF-8",
+ "Server: waitress",
+ `Date: ${new Date().toUTCString()}`,
+ 'Etag: "5000"',
+ ],
+ status: { status: 200, statusText: "OK" },
+ responseBody: {
+ timestamp: 5000,
+ changes: [
+ {
+ id: "4676f0c7-9757-4796-a0e8-b40a5a37a9c9",
+ bucket: "main",
+ collection: "unknown-locally",
+ last_modified: 5000,
+ },
+ {
+ id: "4676f0c7-9757-4796-a0e8-b40a5a37a9c9",
+ bucket: "main",
+ collection: "language-dictionaries",
+ last_modified: 4000,
+ },
+ {
+ id: "0af8da0b-3e03-48fb-8d0d-2d8e4cb7514d",
+ bucket: "main",
+ collection: "password-fields",
+ last_modified: 3000,
+ },
+ {
+ id: "4acda969-3bd3-4074-a678-ff311eeb076e",
+ bucket: "main-preview",
+ collection: "password-fields",
+ last_modified: 2000,
+ },
+ {
+ id: "58697bd1-315f-4185-9bee-3371befc2585",
+ bucket: "main-preview",
+ collection: "crash-rate",
+ last_modified: 1000,
+ },
+ ],
+ },
+ },
+ "GET:/fake-x5u": {
+ sampleHeaders: ["Content-Type: application/octet-stream"],
+ status: { status: 200, statusText: "OK" },
+ responseBody: `-----BEGIN CERTIFICATE-----
+MIIGYTCCBEmgAwIBAgIBATANBgkqhkiG9w0BAQwFADB9MQswCQYDVQQGEwJVU
+ZARKjbu1TuYQHf0fs+GwID8zeLc2zJL7UzcHFwwQ6Nda9OJN4uPAuC/BKaIpxCLL
+26b24/tRam4SJjqpiq20lynhUrmTtt6hbG3E1Hpy3bmkt2DYnuMFwEx2gfXNcnbT
+wNuvFqc=
+-----END CERTIFICATE-----`,
+ },
+ "GET:/v1/buckets/main/collections/password-fields/changeset?_expected=2000": {
+ sampleHeaders: [
+ "Access-Control-Allow-Origin: *",
+ "Access-Control-Expose-Headers: Retry-After, Content-Length, Alert, Backoff",
+ "Content-Type: application/json; charset=UTF-8",
+ "Server: waitress",
+ 'Etag: "3000"',
+ ],
+ status: { status: 200, statusText: "OK" },
+ responseBody: {
+ timestamp: 3000,
+ metadata: {
+ id: "password-fields",
+ last_modified: 1234,
+ signature: {
+ signature: "abcdef",
+ x5u: `http://localhost:${port}/fake-x5u`,
+ },
+ },
+ changes: [
+ {
+ id: "9d500963-d80e-3a91-6e74-66f3811b99cc",
+ last_modified: 3000,
+ website: "https://some-website.com",
+ selector: "#user[password]",
+ },
+ ],
+ },
+ },
+ "GET:/v1/buckets/main/collections/password-fields/changeset?_expected=3001&_since=%223000%22": {
+ sampleHeaders: [
+ "Access-Control-Allow-Origin: *",
+ "Access-Control-Expose-Headers: Retry-After, Content-Length, Alert, Backoff",
+ "Content-Type: application/json; charset=UTF-8",
+ "Server: waitress",
+ 'Etag: "4000"',
+ ],
+ status: { status: 200, statusText: "OK" },
+ responseBody: {
+ metadata: {
+ signature: {},
+ },
+ timestamp: 4000,
+ changes: [
+ {
+ id: "aabad965-e556-ffe7-4191-074f5dee3df3",
+ last_modified: 4000,
+ website: "https://www.other.org/signin",
+ selector: "#signinpassword",
+ },
+ {
+ id: "9d500963-d80e-3a91-6e74-66f3811b99cc",
+ last_modified: 3500,
+ website: "https://some-website.com/login",
+ selector: "input#user[password]",
+ },
+ ],
+ },
+ },
+ "GET:/v1/buckets/main/collections/password-fields/changeset?_expected=4001&_since=%224000%22": {
+ sampleHeaders: [
+ "Access-Control-Allow-Origin: *",
+ "Access-Control-Expose-Headers: Retry-After, Content-Length, Alert, Backoff",
+ "Content-Type: application/json; charset=UTF-8",
+ "Server: waitress",
+ 'Etag: "5000"',
+ ],
+ status: { status: 200, statusText: "OK" },
+ responseBody: {
+ metadata: {
+ signature: {},
+ },
+ timestamp: 5000,
+ changes: [
+ {
+ id: "aabad965-e556-ffe7-4191-074f5dee3df3",
+ deleted: true,
+ },
+ ],
+ },
+ },
+ "GET:/v1/buckets/main/collections/password-fields/changeset?_expected=10000&_since=%229999%22": {
+ sampleHeaders: [
+ "Access-Control-Allow-Origin: *",
+ "Access-Control-Expose-Headers: Retry-After, Content-Length, Alert, Backoff",
+ "Content-Type: application/json; charset=UTF-8",
+ "Server: waitress",
+ ],
+ status: { status: 503, statusText: "Service Unavailable" },
+ responseBody: {
+ code: 503,
+ errno: 999,
+ error: "Service Unavailable",
+ },
+ },
+ "GET:/v1/buckets/main/collections/password-fields/changeset?_expected=10001&_since=%2210000%22": {
+ sampleHeaders: [
+ "Access-Control-Allow-Origin: *",
+ "Access-Control-Expose-Headers: Retry-After, Content-Length, Alert, Backoff",
+ "Content-Type: application/json; charset=UTF-8",
+ "Server: waitress",
+ 'Etag: "10001"',
+ ],
+ status: { status: 200, statusText: "OK" },
+ responseBody: "<invalid json",
+ },
+ "GET:/v1/buckets/main/collections/password-fields/changeset?_expected=11001&_since=%2211000%22": {
+ sampleHeaders: [
+ "Access-Control-Allow-Origin: *",
+ "Access-Control-Expose-Headers: Retry-After, Content-Length, Alert, Backoff",
+ "Content-Type: application/json; charset=UTF-8",
+ "Server: waitress",
+ ],
+ status: { status: 503, statusText: "Service Unavailable" },
+ responseBody: {
+ changes: [
+ {
+ id: "c4f021e3-f68c-4269-ad2a-d4ba87762b35",
+ last_modified: 4000,
+ website: "https://www.eff.org",
+ selector: "#pwd",
+ },
+ ],
+ },
+ },
+ "GET:/v1/buckets/main/collections/password-fields?_expected=11001": {
+ sampleHeaders: [
+ "Access-Control-Allow-Origin: *",
+ "Access-Control-Expose-Headers: Retry-After, Content-Length, Alert, Backoff",
+ "Content-Type: application/json; charset=UTF-8",
+ "Server: waitress",
+ ],
+ status: { status: 503, statusText: "Service Unavailable" },
+ responseBody: {
+ code: 503,
+ errno: 999,
+ error: "Service Unavailable",
+ },
+ },
+ "GET:/v1/buckets/monitor/collections/changes/changeset?collection=password-fields&bucket=main&_expected=0": {
+ sampleHeaders: [
+ "Access-Control-Allow-Origin: *",
+ "Access-Control-Expose-Headers: Retry-After, Content-Length, Alert, Backoff",
+ "Content-Type: application/json; charset=UTF-8",
+ "Server: waitress",
+ `Date: ${new Date().toUTCString()}`,
+ 'Etag: "1338"',
+ ],
+ status: { status: 200, statusText: "OK" },
+ responseBody: {
+ timestamp: 1338,
+ changes: [
+ {
+ id: "fe5758d0-c67a-42d0-bb4f-8f2d75106b65",
+ bucket: "main",
+ collection: "password-fields",
+ last_modified: 1337,
+ },
+ ],
+ },
+ },
+ "GET:/v1/buckets/main/collections/password-fields/changeset?_expected=1337": {
+ sampleHeaders: [
+ "Access-Control-Allow-Origin: *",
+ "Access-Control-Expose-Headers: Retry-After, Content-Length, Alert, Backoff",
+ "Content-Type: application/json; charset=UTF-8",
+ "Server: waitress",
+ 'Etag: "3000"',
+ ],
+ status: { status: 200, statusText: "OK" },
+ responseBody: {
+ metadata: {
+ signature: {
+ signature: "some-sig",
+ x5u: `http://localhost:${port}/fake-x5u`,
+ },
+ },
+ timestamp: 3000,
+ changes: [
+ {
+ id: "312cc78d-9c1f-4291-a4fa-a1be56f6cc69",
+ last_modified: 3000,
+ website: "https://some-website.com",
+ selector: "#webpage[field-pwd]",
+ },
+ ],
+ },
+ },
+ "GET:/v1/buckets/main/collections/password-fields/changeset?_expected=1337&_since=%223000%22": {
+ sampleHeaders: [
+ "Access-Control-Allow-Origin: *",
+ "Access-Control-Expose-Headers: Retry-After, Content-Length, Alert, Backoff",
+ "Content-Type: application/json; charset=UTF-8",
+ "Server: waitress",
+ 'Etag: "3001"',
+ ],
+ status: { status: 200, statusText: "OK" },
+ responseBody: {
+ metadata: {
+ signature: {
+ signature: "some-sig",
+ x5u: `http://localhost:${port}/fake-x5u`,
+ },
+ },
+ timestamp: 3001,
+ changes: [
+ {
+ id: "312cc78d-9c1f-4291-a4fa-a1be56f6cc69",
+ last_modified: 3001,
+ website: "https://some-website-2.com",
+ selector: "#webpage[field-pwd]",
+ },
+ ],
+ },
+ },
+ "GET:/v1/buckets/main/collections/language-dictionaries/changeset": {
+ sampleHeaders: [
+ "Access-Control-Allow-Origin: *",
+ "Access-Control-Expose-Headers: Retry-After, Content-Length, Alert, Backoff",
+ "Content-Type: application/json; charset=UTF-8",
+ "Server: waitress",
+ 'Etag: "5000000000000"',
+ ],
+ status: { status: 200, statusText: "OK" },
+ responseBody: {
+ timestamp: 5000000000000,
+ metadata: {
+ id: "language-dictionaries",
+ last_modified: 1234,
+ signature: {
+ signature: "xyz",
+ x5u: `http://localhost:${port}/fake-x5u`,
+ },
+ },
+ changes: [
+ {
+ id: "xx",
+ last_modified: 5000000000000,
+ dictionaries: ["xx-XX@dictionaries.addons.mozilla.org"],
+ },
+ {
+ id: "fr",
+ last_modified: 5000000000000 - 1,
+ deleted: true,
+ },
+ {
+ id: "pt-BR",
+ last_modified: 5000000000000 - 2,
+ dictionaries: ["pt-BR@for-tests"],
+ },
+ ],
+ },
+ },
+ "GET:/v1/buckets/main/collections/with-local-fields/changeset?_expected=2000": {
+ sampleHeaders: [
+ "Access-Control-Allow-Origin: *",
+ "Access-Control-Expose-Headers: Retry-After, Content-Length, Alert, Backoff",
+ "Content-Type: application/json; charset=UTF-8",
+ "Server: waitress",
+ 'Etag: "2000"',
+ ],
+ status: { status: 200, statusText: "OK" },
+ responseBody: {
+ timestamp: 2000,
+ metadata: {
+ id: "with-local-fields",
+ last_modified: 1234,
+ signature: {
+ signature: "xyz",
+ x5u: `http://localhost:${port}/fake-x5u`,
+ },
+ },
+ changes: [
+ {
+ id: "c74279ce-fb0a-42a6-ae11-386b567a6119",
+ last_modified: 2000,
+ },
+ ],
+ },
+ },
+ "GET:/v1/buckets/main/collections/with-local-fields/changeset?_expected=3000&_since=%222000%22": {
+ sampleHeaders: [
+ "Access-Control-Allow-Origin: *",
+ "Access-Control-Expose-Headers: Retry-After, Content-Length, Alert, Backoff",
+ "Content-Type: application/json; charset=UTF-8",
+ "Server: waitress",
+ 'Etag: "3000"',
+ ],
+ status: { status: 200, statusText: "OK" },
+ responseBody: {
+ timestamp: 3000,
+ metadata: {
+ signature: {},
+ },
+ changes: [
+ {
+ id: "1f5c98b9-6d93-4c13-aa26-978b38695096",
+ last_modified: 3000,
+ },
+ ],
+ },
+ },
+ "GET:/v1/buckets/monitor/collections/changes/changeset?collection=no-mocked-responses&bucket=main&_expected=0": {
+ sampleHeaders: [
+ "Access-Control-Allow-Origin: *",
+ "Access-Control-Expose-Headers: Retry-After, Content-Length, Alert, Backoff",
+ "Content-Type: application/json; charset=UTF-8",
+ "Server: waitress",
+ `Date: ${new Date().toUTCString()}`,
+ 'Etag: "713705"',
+ ],
+ status: { status: 200, statusText: "OK" },
+ responseBody: {
+ data: [
+ {
+ id: "07a98d1b-7c62-4344-ab18-76856b3facd8",
+ bucket: "main",
+ collection: "no-mocked-responses",
+ last_modified: 713705,
+ },
+ ],
+ },
+ },
+ };
+ return (
+ responses[`${req.method}:${req.path}?${req.queryString}`] ||
+ responses[`${req.method}:${req.path}`] ||
+ responses[req.method]
+ );
+}
diff --git a/services/settings/test/unit/test_remote_settings_dump_lastmodified.js b/services/settings/test/unit/test_remote_settings_dump_lastmodified.js
new file mode 100644
index 0000000000..875cef8b27
--- /dev/null
+++ b/services/settings/test/unit/test_remote_settings_dump_lastmodified.js
@@ -0,0 +1,55 @@
+"use strict";
+
+const { Utils } = ChromeUtils.import("resource://services-settings/Utils.jsm");
+
+Cu.importGlobalProperties(["fetch"]);
+
+async function getLocalDumpLastModified(bucket, collection) {
+ let res;
+ try {
+ res = await fetch(
+ `resource://app/defaults/settings/${bucket}/${collection}.json`
+ );
+ } catch (e) {
+ return -1;
+ }
+ const { timestamp } = await res.json();
+ ok(timestamp >= 0, `${bucket}/${collection} dump has timestamp`);
+ return timestamp;
+}
+
+add_task(async function lastModified_of_non_existing_dump() {
+ ok(!Utils._dumpStats, "_dumpStats not initialized");
+ equal(
+ await Utils.getLocalDumpLastModified("did not", "exist"),
+ -1,
+ "A non-existent dump has value -1"
+ );
+ ok(Utils._dumpStats, "_dumpStats was initialized");
+
+ ok("did not/exist" in Utils._dumpStats, "cached non-existing dump result");
+ delete Utils._dumpStats["did not/exist"];
+});
+
+add_task(async function lastModified_summary_is_correct() {
+ ok(!!Object.keys(Utils._dumpStats).length, "Contains summary of dumps");
+
+ let checked = 0;
+ for (let [identifier, lastModified] of Object.entries(Utils._dumpStats)) {
+ let [bucket, collection] = identifier.split("/");
+ let actual = await getLocalDumpLastModified(bucket, collection);
+ if (actual < 0) {
+ info(`${identifier} has no dump, skip.`);
+ continue;
+ }
+ info(`Checking correctness of ${identifier}`);
+ equal(
+ await Utils.getLocalDumpLastModified(bucket, collection),
+ lastModified,
+ `Expected last_modified value for ${identifier}`
+ );
+ equal(lastModified, actual, `last_modified should match collection`);
+ checked++;
+ }
+ ok(checked > 0, "At least one dump was packaged and checked.");
+});
diff --git a/services/settings/test/unit/test_remote_settings_jexl_filters.js b/services/settings/test/unit/test_remote_settings_jexl_filters.js
new file mode 100644
index 0000000000..c08bfe0b79
--- /dev/null
+++ b/services/settings/test/unit/test_remote_settings_jexl_filters.js
@@ -0,0 +1,216 @@
+const { RemoteSettings } = ChromeUtils.import(
+ "resource://services-settings/remote-settings.js"
+);
+
+let client;
+
+async function createRecords(records) {
+ await client.db.importChanges(
+ {},
+ 42,
+ records.map((record, i) => ({
+ id: `record-${i}`,
+ ...record,
+ })),
+ {
+ clear: true,
+ }
+ );
+}
+
+function run_test() {
+ client = RemoteSettings("some-key");
+
+ run_next_test();
+}
+
+add_task(async function test_returns_all_without_target() {
+ await createRecords([
+ {
+ passwordSelector: "#pass-signin",
+ },
+ {
+ filter_expression: null,
+ },
+ {
+ filter_expression: "",
+ },
+ ]);
+
+ const list = await client.get();
+ equal(list.length, 3);
+});
+
+add_task(async function test_filters_can_be_disabled() {
+ const c = RemoteSettings("no-jexl", { filterFunc: null });
+ await c.db.importChanges({}, 42, [
+ {
+ id: "abc",
+ filter_expression: "1 == 2",
+ },
+ ]);
+
+ const list = await c.get();
+ equal(list.length, 1);
+});
+
+add_task(async function test_returns_entries_where_jexl_is_true() {
+ await createRecords([
+ {
+ willMatch: true,
+ filter_expression: "1",
+ },
+ {
+ willMatch: true,
+ filter_expression: "[42]",
+ },
+ {
+ willMatch: true,
+ filter_expression: "1 == 2 || 1 == 1",
+ },
+ {
+ willMatch: true,
+ filter_expression: 'env.appinfo.ID == "xpcshell@tests.mozilla.org"',
+ },
+ {
+ willMatch: false,
+ filter_expression: "env.version == undefined",
+ },
+ {
+ willMatch: true,
+ filter_expression: "env.unknown == undefined",
+ },
+ {
+ willMatch: false,
+ filter_expression: "1 == 2",
+ },
+ ]);
+
+ const list = await client.get();
+ equal(list.length, 5);
+ ok(list.every(e => e.willMatch));
+});
+
+add_task(async function test_ignores_entries_where_jexl_is_invalid() {
+ await createRecords([
+ {
+ filter_expression: "true === true", // JavaScript Error: "Invalid expression token: ="
+ },
+ {
+ filter_expression: "Objects.keys({}) == []", // Token ( (openParen) unexpected in expression
+ },
+ ]);
+
+ const list = await client.get();
+ equal(list.length, 0);
+});
+
+add_task(async function test_support_of_date_filters() {
+ await createRecords([
+ {
+ willMatch: true,
+ filter_expression: '"1982-05-08"|date < "2016-03-22"|date',
+ },
+ {
+ willMatch: false,
+ filter_expression: '"2000-01-01"|date < "1970-01-01"|date',
+ },
+ ]);
+
+ const list = await client.get();
+ equal(list.length, 1);
+ ok(list.every(e => e.willMatch));
+});
+
+add_task(async function test_support_of_preferences_filters() {
+ await createRecords([
+ {
+ willMatch: true,
+ filter_expression: '"services.settings.last_etag"|preferenceValue == 42',
+ },
+ {
+ willMatch: true,
+ filter_expression:
+ '"services.settings.poll_interval"|preferenceExists == true',
+ },
+ {
+ willMatch: true,
+ filter_expression:
+ '"services.settings.poll_interval"|preferenceIsUserSet == false',
+ },
+ {
+ willMatch: true,
+ filter_expression:
+ '"services.settings.last_etag"|preferenceIsUserSet == true',
+ },
+ ]);
+
+ // Set a pref for the user.
+ Services.prefs.setIntPref("services.settings.last_etag", 42);
+
+ const list = await client.get();
+ equal(list.length, 4);
+ ok(list.every(e => e.willMatch));
+});
+
+add_task(async function test_support_of_intersect_operator() {
+ await createRecords([
+ {
+ willMatch: true,
+ filter_expression: '{foo: 1, bar: 2}|keys intersect ["foo"]',
+ },
+ {
+ willMatch: true,
+ filter_expression: '(["a", "b"] intersect ["a", 1, 4]) == "a"',
+ },
+ {
+ willMatch: false,
+ filter_expression: '(["a", "b"] intersect [3, 1, 4]) == "c"',
+ },
+ {
+ willMatch: true,
+ filter_expression: `
+ [1, 2, 3]
+ intersect
+ [3, 4, 5]
+ `,
+ },
+ ]);
+
+ const list = await client.get();
+ equal(list.length, 3);
+ ok(list.every(e => e.willMatch));
+});
+
+add_task(async function test_support_of_samples() {
+ await createRecords([
+ {
+ willMatch: true,
+ filter_expression: '"always-true"|stableSample(1)',
+ },
+ {
+ willMatch: false,
+ filter_expression: '"always-false"|stableSample(0)',
+ },
+ {
+ willMatch: true,
+ filter_expression: '"turns-to-true-0"|stableSample(0.5)',
+ },
+ {
+ willMatch: false,
+ filter_expression: '"turns-to-false-1"|stableSample(0.5)',
+ },
+ {
+ willMatch: true,
+ filter_expression: '"turns-to-true-0"|bucketSample(0, 50, 100)',
+ },
+ {
+ willMatch: false,
+ filter_expression: '"turns-to-false-1"|bucketSample(0, 50, 100)',
+ },
+ ]);
+
+ const list = await client.get();
+ equal(list.length, 3);
+ ok(list.every(e => e.willMatch));
+});
diff --git a/services/settings/test/unit/test_remote_settings_offline.js b/services/settings/test/unit/test_remote_settings_offline.js
new file mode 100644
index 0000000000..2dbb76642a
--- /dev/null
+++ b/services/settings/test/unit/test_remote_settings_offline.js
@@ -0,0 +1,141 @@
+const { RemoteSettingsClient } = ChromeUtils.import(
+ "resource://services-settings/RemoteSettingsClient.jsm"
+);
+const { RemoteSettingsWorker } = ChromeUtils.import(
+ "resource://services-settings/RemoteSettingsWorker.jsm"
+);
+const { SharedUtils } = ChromeUtils.import(
+ "resource://services-settings/SharedUtils.jsm"
+);
+
+// A collection with a dump that's packaged on all builds where this test runs,
+// including on Android at mobile/android/installer/package-manifest.in
+const TEST_BUCKET = "main";
+const TEST_COLLECTION = "password-recipes";
+
+let client;
+let DUMP_RECORDS;
+let DUMP_LAST_MODIFIED;
+
+add_task(async function setup() {
+ // "services.settings.server" pref is not set.
+ // Test defaults to an unreachable server,
+ // and will only load from the dump if any.
+
+ client = new RemoteSettingsClient(TEST_COLLECTION, {
+ bucketName: TEST_BUCKET,
+ });
+
+ const dump = await SharedUtils.loadJSONDump(TEST_BUCKET, TEST_COLLECTION);
+ DUMP_RECORDS = dump.data;
+ DUMP_LAST_MODIFIED = dump.timestamp;
+
+ // Dumps are fetched via the following, which sorts the records, newest first.
+ // https://searchfox.org/mozilla-central/rev/5b3444ad300e244b5af4214212e22bd9e4b7088a/taskcluster/docker/periodic-updates/scripts/periodic_file_updates.sh#304
+ equal(
+ DUMP_LAST_MODIFIED,
+ DUMP_RECORDS[0].last_modified,
+ "records in dump ought to be sorted by last_modified"
+ );
+});
+
+async function importData(records) {
+ await RemoteSettingsWorker._execute("_test_only_import", [
+ TEST_BUCKET,
+ TEST_COLLECTION,
+ records,
+ records[0]?.last_modified || 0,
+ ]);
+}
+
+async function clear_state() {
+ await client.db.clear();
+}
+
+add_task(async function test_load_from_dump_when_offline() {
+ // Baseline: verify that the collection is empty at first,
+ // but non-empty after loading from the dump.
+ const before = await client.get({ syncIfEmpty: false });
+ equal(before.length, 0, "collection empty when offline");
+
+ // should import from dump since collection was not initialized.
+ const after = await client.get();
+ equal(after.length, DUMP_RECORDS.length, "collection loaded from dump");
+ equal(await client.getLastModified(), DUMP_LAST_MODIFIED, "dump's timestamp");
+});
+add_task(clear_state);
+
+add_task(async function test_optional_skip_dump_after_empty_import() {
+ // clear_state should have wiped the database.
+ const before = await client.get({ syncIfEmpty: false });
+ equal(before.length, 0, "collection empty after clearing");
+
+ // Verify that the dump is not imported again by client.get()
+ // when the database is initialized with an empty dump
+ // with `loadDumpIfNewer` disabled.
+ await importData([]); // <-- Empty set of records.
+
+ const after = await client.get({ loadDumpIfNewer: false });
+ equal(after.length, 0, "collection still empty due to import");
+ equal(await client.getLastModified(), 0, "Empty dump has no timestamp");
+});
+add_task(clear_state);
+
+add_task(async function test_optional_skip_dump_after_non_empty_import() {
+ await importData([{ last_modified: 1234, id: "dummy" }]);
+
+ const after = await client.get({ loadDumpIfNewer: false });
+ equal(after.length, 1, "Imported dummy data");
+ equal(await client.getLastModified(), 1234, "Expected timestamp of import");
+
+ await importData([]);
+ const after2 = await client.get({ loadDumpIfNewer: false });
+ equal(after2.length, 0, "Previous data wiped on duplicate import");
+ equal(await client.getLastModified(), 0, "Timestamp of empty collection");
+});
+add_task(clear_state);
+
+add_task(async function test_load_dump_after_empty_import() {
+ await importData([]); // <-- Empty set of records, i.e. last_modified = 0.
+
+ const after = await client.get();
+ equal(after.length, DUMP_RECORDS.length, "Imported dump");
+ equal(await client.getLastModified(), DUMP_LAST_MODIFIED, "dump's timestamp");
+});
+add_task(clear_state);
+
+add_task(async function test_load_dump_after_non_empty_import() {
+ // Dump is updated regularly, verify that the dump matches our expectations
+ // before running the test.
+ ok(DUMP_LAST_MODIFIED > 1234, "Assuming dump to be newer than dummy 1234");
+
+ await importData([{ last_modified: 1234, id: "dummy" }]);
+
+ const after = await client.get();
+ equal(after.length, DUMP_RECORDS.length, "Imported dump");
+ equal(await client.getLastModified(), DUMP_LAST_MODIFIED, "dump's timestamp");
+});
+add_task(clear_state);
+
+add_task(async function test_load_dump_after_import_from_broken_distro() {
+ // Dump is updated regularly, verify that the dump matches our expectations
+ // before running the test.
+ ok(DUMP_LAST_MODIFIED > 1234, "Assuming dump to be newer than dummy 1234");
+
+ // No last_modified time.
+ await importData([{ id: "dummy" }]);
+
+ const after = await client.get();
+ equal(after.length, DUMP_RECORDS.length, "Imported dump");
+ equal(await client.getLastModified(), DUMP_LAST_MODIFIED, "dump's timestamp");
+});
+add_task(clear_state);
+
+add_task(async function test_skip_dump_if_same_last_modified() {
+ await importData([{ last_modified: DUMP_LAST_MODIFIED, id: "dummy" }]);
+
+ const after = await client.get();
+ equal(after.length, 1, "Not importing dump when time matches");
+ equal(await client.getLastModified(), DUMP_LAST_MODIFIED, "Same timestamp");
+});
+add_task(clear_state);
diff --git a/services/settings/test/unit/test_remote_settings_poll.js b/services/settings/test/unit/test_remote_settings_poll.js
new file mode 100644
index 0000000000..b1d374c1c2
--- /dev/null
+++ b/services/settings/test/unit/test_remote_settings_poll.js
@@ -0,0 +1,1385 @@
+/* import-globals-from ../../../common/tests/unit/head_helpers.js */
+
+const { AppConstants } = ChromeUtils.importESModule(
+ "resource://gre/modules/AppConstants.sys.mjs"
+);
+const { setTimeout } = ChromeUtils.importESModule(
+ "resource://gre/modules/Timer.sys.mjs"
+);
+
+const { UptakeTelemetry, Policy } = ChromeUtils.import(
+ "resource://services-common/uptake-telemetry.js"
+);
+const { RemoteSettingsClient } = ChromeUtils.import(
+ "resource://services-settings/RemoteSettingsClient.jsm"
+);
+const { pushBroadcastService } = ChromeUtils.import(
+ "resource://gre/modules/PushBroadcastService.jsm"
+);
+const { SyncHistory } = ChromeUtils.import(
+ "resource://services-settings/SyncHistory.jsm"
+);
+const {
+ RemoteSettings,
+ remoteSettingsBroadcastHandler,
+ BROADCAST_ID,
+} = ChromeUtils.import("resource://services-settings/remote-settings.js");
+const { Utils } = ChromeUtils.import("resource://services-settings/Utils.jsm");
+const { TelemetryTestUtils } = ChromeUtils.importESModule(
+ "resource://testing-common/TelemetryTestUtils.sys.mjs"
+);
+
+const IS_ANDROID = AppConstants.platform == "android";
+
+const PREF_SETTINGS_SERVER = "services.settings.server";
+const PREF_SETTINGS_SERVER_BACKOFF = "services.settings.server.backoff";
+const PREF_LAST_UPDATE = "services.settings.last_update_seconds";
+const PREF_LAST_ETAG = "services.settings.last_etag";
+const PREF_CLOCK_SKEW_SECONDS = "services.settings.clock_skew_seconds";
+
+// Telemetry report result.
+const TELEMETRY_COMPONENT = "remotesettings";
+const TELEMETRY_SOURCE_POLL = "settings-changes-monitoring";
+const TELEMETRY_SOURCE_SYNC = "settings-sync";
+const CHANGES_PATH = "/v1" + Utils.CHANGES_PATH;
+
+var server;
+
+async function clear_state() {
+ // set up prefs so the kinto updater talks to the test server
+ Services.prefs.setCharPref(
+ PREF_SETTINGS_SERVER,
+ `http://localhost:${server.identity.primaryPort}/v1`
+ );
+
+ // set some initial values so we can check these are updated appropriately
+ Services.prefs.setIntPref(PREF_LAST_UPDATE, 0);
+ Services.prefs.setIntPref(PREF_CLOCK_SKEW_SECONDS, 0);
+ Services.prefs.clearUserPref(PREF_LAST_ETAG);
+
+ // Clear events snapshot.
+ TelemetryTestUtils.assertEvents([], {}, { process: "dummy" });
+
+ // Clear sync history.
+ await new SyncHistory("").clear();
+}
+
+function serveChangesEntries(serverTime, entriesOrFunc) {
+ return (request, response) => {
+ response.setStatusLine(null, 200, "OK");
+ response.setHeader("Content-Type", "application/json; charset=UTF-8");
+ response.setHeader("Date", new Date(serverTime).toUTCString());
+ const entries =
+ typeof entriesOrFunc == "function" ? entriesOrFunc() : entriesOrFunc;
+ const latest = entries[0]?.last_modified ?? 42;
+ if (entries.length) {
+ response.setHeader("ETag", `"${latest}"`);
+ }
+ response.write(JSON.stringify({ timestamp: latest, changes: entries }));
+ };
+}
+
+function run_test() {
+ // Set up an HTTP Server
+ server = new HttpServer();
+ server.start(-1);
+
+ // Pretend we are in nightly channel to make sure all telemetry events are sent.
+ let oldGetChannel = Policy.getChannel;
+ Policy.getChannel = () => "nightly";
+
+ run_next_test();
+
+ registerCleanupFunction(() => {
+ Policy.getChannel = oldGetChannel;
+ server.stop(() => {});
+ });
+}
+
+add_task(clear_state);
+
+add_task(async function test_an_event_is_sent_on_start() {
+ server.registerPathHandler(CHANGES_PATH, (request, response) => {
+ response.write(JSON.stringify({ timestamp: 42, changes: [] }));
+ response.setHeader("Content-Type", "application/json; charset=UTF-8");
+ response.setHeader("ETag", '"42"');
+ response.setHeader("Date", new Date().toUTCString());
+ response.setStatusLine(null, 200, "OK");
+ });
+ let notificationObserved = null;
+ const observer = {
+ observe(aSubject, aTopic, aData) {
+ Services.obs.removeObserver(this, "remote-settings:changes-poll-start");
+ notificationObserved = JSON.parse(aData);
+ },
+ };
+ Services.obs.addObserver(observer, "remote-settings:changes-poll-start");
+
+ await RemoteSettings.pollChanges({ expectedTimestamp: 13 });
+
+ Assert.equal(
+ notificationObserved.expectedTimestamp,
+ 13,
+ "start notification should have been observed"
+ );
+});
+add_task(clear_state);
+
+add_task(async function test_offline_is_reported_if_relevant() {
+ const startSnapshot = getUptakeTelemetrySnapshot(
+ TELEMETRY_COMPONENT,
+ TELEMETRY_SOURCE_POLL
+ );
+ const offlineBackup = Services.io.offline;
+ try {
+ Services.io.offline = true;
+
+ await RemoteSettings.pollChanges();
+
+ const endSnapshot = getUptakeTelemetrySnapshot(
+ TELEMETRY_COMPONENT,
+ TELEMETRY_SOURCE_POLL
+ );
+ const expectedIncrements = {
+ [UptakeTelemetry.STATUS.NETWORK_OFFLINE_ERROR]: 1,
+ };
+ checkUptakeTelemetry(startSnapshot, endSnapshot, expectedIncrements);
+ } finally {
+ Services.io.offline = offlineBackup;
+ }
+});
+add_task(clear_state);
+
+add_task(async function test_check_success() {
+ const serverTime = 8000;
+
+ server.registerPathHandler(
+ CHANGES_PATH,
+ serveChangesEntries(serverTime, [
+ {
+ id: "330a0c5f-fadf-ff0b-40c8-4eb0d924ff6a",
+ last_modified: 1100,
+ host: "localhost",
+ bucket: "some-other-bucket",
+ collection: "test-collection",
+ },
+ {
+ id: "254cbb9e-6888-4d9f-8e60-58b74faa8778",
+ last_modified: 1000,
+ host: "localhost",
+ bucket: "test-bucket",
+ collection: "test-collection",
+ },
+ ])
+ );
+
+ // add a test kinto client that will respond to lastModified information
+ // for a collection called 'test-collection'.
+ // Let's use a bucket that is not the default one (`test-bucket`).
+ const c = RemoteSettings("test-collection", {
+ bucketName: "test-bucket",
+ });
+ let maybeSyncCalled = false;
+ c.maybeSync = () => {
+ maybeSyncCalled = true;
+ };
+
+ // Ensure that the remote-settings:changes-poll-end notification works
+ let notificationObserved = false;
+ const observer = {
+ observe(aSubject, aTopic, aData) {
+ Services.obs.removeObserver(this, "remote-settings:changes-poll-end");
+ notificationObserved = true;
+ },
+ };
+ Services.obs.addObserver(observer, "remote-settings:changes-poll-end");
+
+ await RemoteSettings.pollChanges();
+
+ // It didn't fail, hence we are sure that the unknown collection ``some-other-bucket/test-collection``
+ // was ignored, otherwise it would have tried to reach the network.
+
+ Assert.ok(maybeSyncCalled, "maybeSync was called");
+ Assert.ok(notificationObserved, "a notification should have been observed");
+ // Last timestamp was saved. An ETag header value is a quoted string.
+ Assert.equal(Services.prefs.getCharPref(PREF_LAST_ETAG), '"1100"');
+ // check the last_update is updated
+ Assert.equal(Services.prefs.getIntPref(PREF_LAST_UPDATE), serverTime / 1000);
+
+ // ensure that we've accumulated the correct telemetry
+ TelemetryTestUtils.assertEvents(
+ [
+ [
+ "uptake.remotecontent.result",
+ "uptake",
+ "remotesettings",
+ UptakeTelemetry.STATUS.SUCCESS,
+ {
+ source: TELEMETRY_SOURCE_POLL,
+ trigger: "manual",
+ },
+ ],
+ [
+ "uptake.remotecontent.result",
+ "uptake",
+ "remotesettings",
+ UptakeTelemetry.STATUS.SUCCESS,
+ {
+ source: TELEMETRY_SOURCE_SYNC,
+ trigger: "manual",
+ },
+ ],
+ ],
+ TELEMETRY_EVENTS_FILTERS
+ );
+});
+add_task(clear_state);
+
+add_task(async function test_update_timer_interface() {
+ const remoteSettings = Cc["@mozilla.org/services/settings;1"].getService(
+ Ci.nsITimerCallback
+ );
+
+ const serverTime = 8000;
+ server.registerPathHandler(
+ CHANGES_PATH,
+ serveChangesEntries(serverTime, [
+ {
+ id: "028261ad-16d4-40c2-a96a-66f72914d125",
+ last_modified: 42,
+ host: "localhost",
+ bucket: "main",
+ collection: "whatever-collection",
+ },
+ ])
+ );
+
+ await new Promise(resolve => {
+ const e = "remote-settings:changes-poll-end";
+ const changesPolledObserver = {
+ observe(aSubject, aTopic, aData) {
+ Services.obs.removeObserver(this, e);
+ resolve();
+ },
+ };
+ Services.obs.addObserver(changesPolledObserver, e);
+ remoteSettings.notify(null);
+ });
+
+ // Everything went fine.
+ Assert.equal(Services.prefs.getCharPref(PREF_LAST_ETAG), '"42"');
+ Assert.equal(Services.prefs.getIntPref(PREF_LAST_UPDATE), serverTime / 1000);
+});
+add_task(clear_state);
+
+add_task(async function test_check_up_to_date() {
+ // Simulate a poll with up-to-date collection.
+ const startSnapshot = getUptakeTelemetrySnapshot(
+ TELEMETRY_COMPONENT,
+ TELEMETRY_SOURCE_POLL
+ );
+
+ const serverTime = 4000;
+ server.registerPathHandler(CHANGES_PATH, serveChangesEntries(serverTime, []));
+
+ Services.prefs.setCharPref(PREF_LAST_ETAG, '"1100"');
+
+ // Ensure that the remote-settings:changes-poll-end notification is sent.
+ let notificationObserved = false;
+ const observer = {
+ observe(aSubject, aTopic, aData) {
+ Services.obs.removeObserver(this, "remote-settings:changes-poll-end");
+ notificationObserved = true;
+ },
+ };
+ Services.obs.addObserver(observer, "remote-settings:changes-poll-end");
+
+ // If server has no change, maybeSync() is not called.
+ let maybeSyncCalled = false;
+ const c = RemoteSettings("test-collection", {
+ bucketName: "test-bucket",
+ });
+ c.maybeSync = () => {
+ maybeSyncCalled = true;
+ };
+
+ await RemoteSettings.pollChanges();
+
+ Assert.ok(notificationObserved, "a notification should have been observed");
+ Assert.ok(!maybeSyncCalled, "maybeSync should not be called");
+ // Last update is overwritten
+ Assert.equal(Services.prefs.getIntPref(PREF_LAST_UPDATE), serverTime / 1000);
+
+ // ensure that we've accumulated the correct telemetry
+ const endSnapshot = getUptakeTelemetrySnapshot(
+ TELEMETRY_COMPONENT,
+ TELEMETRY_SOURCE_POLL
+ );
+ const expectedIncrements = {
+ [UptakeTelemetry.STATUS.UP_TO_DATE]: 1,
+ };
+ checkUptakeTelemetry(startSnapshot, endSnapshot, expectedIncrements);
+});
+add_task(clear_state);
+
+add_task(async function test_expected_timestamp() {
+ function withCacheBust(request, response) {
+ const entries = [
+ {
+ id: "695c2407-de79-4408-91c7-70720dd59d78",
+ last_modified: 1100,
+ host: "localhost",
+ bucket: "main",
+ collection: "with-cache-busting",
+ },
+ ];
+ if (
+ request.queryString.includes(`_expected=${encodeURIComponent('"42"')}`)
+ ) {
+ response.write(
+ JSON.stringify({
+ timestamp: 1110,
+ changes: entries,
+ })
+ );
+ }
+ response.setHeader("Content-Type", "application/json; charset=UTF-8");
+ response.setHeader("ETag", '"1100"');
+ response.setHeader("Date", new Date().toUTCString());
+ response.setStatusLine(null, 200, "OK");
+ }
+ server.registerPathHandler(CHANGES_PATH, withCacheBust);
+
+ const c = RemoteSettings("with-cache-busting");
+ let maybeSyncCalled = false;
+ c.maybeSync = () => {
+ maybeSyncCalled = true;
+ };
+
+ await RemoteSettings.pollChanges({ expectedTimestamp: '"42"' });
+
+ Assert.ok(maybeSyncCalled, "maybeSync was called");
+});
+add_task(clear_state);
+
+add_task(async function test_client_last_check_is_saved() {
+ server.registerPathHandler(CHANGES_PATH, (request, response) => {
+ response.write(
+ JSON.stringify({
+ timestamp: 42,
+ changes: [
+ {
+ id: "695c2407-de79-4408-91c7-70720dd59d78",
+ last_modified: 1100,
+ host: "localhost",
+ bucket: "main",
+ collection: "models-recipes",
+ },
+ ],
+ })
+ );
+ response.setHeader("Content-Type", "application/json; charset=UTF-8");
+ response.setHeader("ETag", '"42"');
+ response.setHeader("Date", new Date().toUTCString());
+ response.setStatusLine(null, 200, "OK");
+ });
+
+ const c = RemoteSettings("models-recipes");
+ c.maybeSync = () => {};
+
+ equal(
+ c.lastCheckTimePref,
+ "services.settings.main.models-recipes.last_check"
+ );
+ Services.prefs.setIntPref(c.lastCheckTimePref, 0);
+
+ await RemoteSettings.pollChanges({ expectedTimestamp: '"42"' });
+
+ notEqual(Services.prefs.getIntPref(c.lastCheckTimePref), 0);
+});
+add_task(clear_state);
+
+const TELEMETRY_EVENTS_FILTERS = {
+ category: "uptake.remotecontent.result",
+ method: "uptake",
+};
+add_task(async function test_age_of_data_is_reported_in_uptake_status() {
+ const serverTime = 1552323900000;
+ const recordsTimestamp = serverTime - 3600 * 1000;
+ server.registerPathHandler(
+ CHANGES_PATH,
+ serveChangesEntries(serverTime, [
+ {
+ id: "b6ba7fab-a40a-4d03-a4af-6b627f3c5b36",
+ last_modified: recordsTimestamp,
+ host: "localhost",
+ bucket: "main",
+ collection: "some-entry",
+ },
+ ])
+ );
+
+ await RemoteSettings.pollChanges();
+
+ TelemetryTestUtils.assertEvents(
+ [
+ [
+ "uptake.remotecontent.result",
+ "uptake",
+ "remotesettings",
+ UptakeTelemetry.STATUS.SUCCESS,
+ {
+ source: TELEMETRY_SOURCE_POLL,
+ age: "3600",
+ trigger: "manual",
+ },
+ ],
+ [
+ "uptake.remotecontent.result",
+ "uptake",
+ "remotesettings",
+ UptakeTelemetry.STATUS.SUCCESS,
+ {
+ source: TELEMETRY_SOURCE_SYNC,
+ duration: () => true,
+ trigger: "manual",
+ timestamp: `"${recordsTimestamp}"`,
+ },
+ ],
+ ],
+ TELEMETRY_EVENTS_FILTERS
+ );
+});
+add_task(clear_state);
+
+add_task(
+ async function test_synchronization_duration_is_reported_in_uptake_status() {
+ server.registerPathHandler(
+ CHANGES_PATH,
+ serveChangesEntries(10000, [
+ {
+ id: "b6ba7fab-a40a-4d03-a4af-6b627f3c5b36",
+ last_modified: 42,
+ host: "localhost",
+ bucket: "main",
+ collection: "some-entry",
+ },
+ ])
+ );
+ const c = RemoteSettings("some-entry");
+ // Simulate a synchronization that lasts 1 sec.
+ // eslint-disable-next-line mozilla/no-arbitrary-setTimeout
+ c.maybeSync = () => new Promise(resolve => setTimeout(resolve, 1000));
+
+ await RemoteSettings.pollChanges();
+
+ TelemetryTestUtils.assertEvents(
+ [
+ [
+ "uptake.remotecontent.result",
+ "uptake",
+ "remotesettings",
+ "success",
+ {
+ source: TELEMETRY_SOURCE_POLL,
+ age: () => true,
+ trigger: "manual",
+ },
+ ],
+ [
+ "uptake.remotecontent.result",
+ "uptake",
+ "remotesettings",
+ "success",
+ {
+ source: TELEMETRY_SOURCE_SYNC,
+ duration: v => v >= 1000,
+ trigger: "manual",
+ },
+ ],
+ ],
+ TELEMETRY_EVENTS_FILTERS
+ );
+ }
+);
+add_task(clear_state);
+
+add_task(async function test_success_with_partial_list() {
+ function partialList(request, response) {
+ const entries = [
+ {
+ id: "028261ad-16d4-40c2-a96a-66f72914d125",
+ last_modified: 43,
+ host: "localhost",
+ bucket: "main",
+ collection: "cid-1",
+ },
+ {
+ id: "98a34576-bcd6-423f-abc2-1d290b776ed8",
+ last_modified: 42,
+ host: "localhost",
+ bucket: "main",
+ collection: "poll-test-collection",
+ },
+ ];
+ if (request.queryString.includes(`_since=${encodeURIComponent('"42"')}`)) {
+ response.write(
+ JSON.stringify({
+ timestamp: 43,
+ changes: entries.slice(0, 1),
+ })
+ );
+ } else {
+ response.write(
+ JSON.stringify({
+ timestamp: 42,
+ changes: entries,
+ })
+ );
+ }
+ response.setHeader("Content-Type", "application/json; charset=UTF-8");
+ response.setHeader("Date", new Date().toUTCString());
+ response.setStatusLine(null, 200, "OK");
+ }
+ server.registerPathHandler(CHANGES_PATH, partialList);
+
+ const c = RemoteSettings("poll-test-collection");
+ let maybeSyncCount = 0;
+ c.maybeSync = () => {
+ maybeSyncCount++;
+ };
+
+ await RemoteSettings.pollChanges();
+ await RemoteSettings.pollChanges();
+
+ // On the second call, the server does not mention the poll-test-collection
+ // and maybeSync() is not called.
+ Assert.equal(maybeSyncCount, 1, "maybeSync should not be called twice");
+});
+add_task(clear_state);
+
+add_task(async function test_full_polling() {
+ server.registerPathHandler(
+ CHANGES_PATH,
+ serveChangesEntries(10000, [
+ {
+ id: "b6ba7fab-a40a-4d03-a4af-6b627f3c5b36",
+ last_modified: 42,
+ host: "localhost",
+ bucket: "main",
+ collection: "poll-test-collection",
+ },
+ ])
+ );
+
+ const c = RemoteSettings("poll-test-collection");
+ let maybeSyncCount = 0;
+ c.maybeSync = () => {
+ maybeSyncCount++;
+ };
+
+ await RemoteSettings.pollChanges();
+ await RemoteSettings.pollChanges({ full: true });
+
+ // Since the second call is full, clients are called
+ Assert.equal(maybeSyncCount, 2, "maybeSync should be called twice");
+});
+add_task(clear_state);
+
+add_task(async function test_server_bad_json() {
+ const startSnapshot = getUptakeTelemetrySnapshot(
+ TELEMETRY_COMPONENT,
+ TELEMETRY_SOURCE_POLL
+ );
+
+ function simulateBadJSON(request, response) {
+ response.setHeader("Content-Type", "application/json; charset=UTF-8");
+ response.write("<html></html>");
+ response.setStatusLine(null, 200, "OK");
+ }
+ server.registerPathHandler(CHANGES_PATH, simulateBadJSON);
+
+ let error;
+ try {
+ await RemoteSettings.pollChanges();
+ } catch (e) {
+ error = e;
+ }
+ Assert.ok(/JSON.parse: unexpected character/.test(error.message));
+
+ const endSnapshot = getUptakeTelemetrySnapshot(
+ TELEMETRY_COMPONENT,
+ TELEMETRY_SOURCE_POLL
+ );
+ const expectedIncrements = {
+ [UptakeTelemetry.STATUS.PARSE_ERROR]: 1,
+ };
+ checkUptakeTelemetry(startSnapshot, endSnapshot, expectedIncrements);
+});
+add_task(clear_state);
+
+add_task(async function test_server_bad_content_type() {
+ const startSnapshot = getUptakeTelemetrySnapshot(
+ TELEMETRY_COMPONENT,
+ TELEMETRY_SOURCE_POLL
+ );
+
+ function simulateBadContentType(request, response) {
+ response.setHeader("Content-Type", "text/html");
+ response.write("<html></html>");
+ response.setStatusLine(null, 200, "OK");
+ }
+ server.registerPathHandler(CHANGES_PATH, simulateBadContentType);
+
+ let error;
+ try {
+ await RemoteSettings.pollChanges();
+ } catch (e) {
+ error = e;
+ }
+ Assert.ok(/Unexpected content-type/.test(error.message));
+
+ const endSnapshot = getUptakeTelemetrySnapshot(
+ TELEMETRY_COMPONENT,
+ TELEMETRY_SOURCE_POLL
+ );
+ const expectedIncrements = {
+ [UptakeTelemetry.STATUS.CONTENT_ERROR]: 1,
+ };
+ checkUptakeTelemetry(startSnapshot, endSnapshot, expectedIncrements);
+});
+add_task(clear_state);
+
+add_task(async function test_server_404_response() {
+ function simulateDummy404(request, response) {
+ response.setHeader("Content-Type", "text/html; charset=UTF-8");
+ response.write("<html></html>");
+ response.setStatusLine(null, 404, "OK");
+ }
+ server.registerPathHandler(CHANGES_PATH, simulateDummy404);
+
+ await RemoteSettings.pollChanges(); // Does not fail when running from tests.
+});
+add_task(clear_state);
+
+add_task(async function test_server_error() {
+ const startSnapshot = getUptakeTelemetrySnapshot(
+ TELEMETRY_COMPONENT,
+ TELEMETRY_SOURCE_POLL
+ );
+
+ // Simulate a server error.
+ function simulateErrorResponse(request, response) {
+ response.setHeader("Date", new Date(3000).toUTCString());
+ response.setHeader("Content-Type", "application/json; charset=UTF-8");
+ response.write(
+ JSON.stringify({
+ code: 503,
+ errno: 999,
+ error: "Service Unavailable",
+ })
+ );
+ response.setStatusLine(null, 503, "Service Unavailable");
+ }
+ server.registerPathHandler(CHANGES_PATH, simulateErrorResponse);
+
+ let notificationObserved = false;
+ const observer = {
+ observe(aSubject, aTopic, aData) {
+ Services.obs.removeObserver(this, "remote-settings:changes-poll-end");
+ notificationObserved = true;
+ },
+ };
+ Services.obs.addObserver(observer, "remote-settings:changes-poll-end");
+ Services.prefs.setIntPref(PREF_LAST_UPDATE, 42);
+
+ // pollChanges() fails with adequate error and no notification.
+ let error;
+ try {
+ await RemoteSettings.pollChanges();
+ } catch (e) {
+ error = e;
+ }
+
+ Assert.ok(
+ !notificationObserved,
+ "a notification should not have been observed"
+ );
+ Assert.ok(/Polling for changes failed/.test(error.message));
+ // When an error occurs, last update was not overwritten.
+ Assert.equal(Services.prefs.getIntPref(PREF_LAST_UPDATE), 42);
+ // ensure that we've accumulated the correct telemetry
+ const endSnapshot = getUptakeTelemetrySnapshot(
+ TELEMETRY_COMPONENT,
+ TELEMETRY_SOURCE_POLL
+ );
+ const expectedIncrements = {
+ [UptakeTelemetry.STATUS.SERVER_ERROR]: 1,
+ };
+ checkUptakeTelemetry(startSnapshot, endSnapshot, expectedIncrements);
+});
+add_task(clear_state);
+
+add_task(async function test_server_error_5xx() {
+ const startSnapshot = getUptakeTelemetrySnapshot(
+ TELEMETRY_COMPONENT,
+ TELEMETRY_SOURCE_POLL
+ );
+
+ function simulateErrorResponse(request, response) {
+ response.setHeader("Date", new Date(3000).toUTCString());
+ response.setHeader("Content-Type", "text/html; charset=UTF-8");
+ response.write("<html></html>");
+ response.setStatusLine(null, 504, "Gateway Timeout");
+ }
+ server.registerPathHandler(CHANGES_PATH, simulateErrorResponse);
+
+ try {
+ await RemoteSettings.pollChanges();
+ } catch (e) {}
+
+ const endSnapshot = getUptakeTelemetrySnapshot(
+ TELEMETRY_COMPONENT,
+ TELEMETRY_SOURCE_POLL
+ );
+ const expectedIncrements = {
+ [UptakeTelemetry.STATUS.SERVER_ERROR]: 1,
+ };
+ checkUptakeTelemetry(startSnapshot, endSnapshot, expectedIncrements);
+});
+add_task(clear_state);
+
+add_task(async function test_server_error_4xx() {
+ function simulateErrorResponse(request, response) {
+ response.setHeader("Date", new Date(3000).toUTCString());
+ response.setHeader("Content-Type", "application/json; charset=UTF-8");
+ if (request.queryString.includes(`_since=${encodeURIComponent('"abc"')}`)) {
+ response.setStatusLine(null, 400, "Bad Request");
+ response.write(JSON.stringify({}));
+ } else {
+ response.setStatusLine(null, 200, "OK");
+ response.write(JSON.stringify({ changes: [] }));
+ }
+ }
+ server.registerPathHandler(CHANGES_PATH, simulateErrorResponse);
+
+ Services.prefs.setCharPref(PREF_LAST_ETAG, '"abc"');
+
+ let error;
+ try {
+ await RemoteSettings.pollChanges();
+ } catch (e) {
+ error = e;
+ }
+
+ Assert.ok(error.message.includes("400 Bad Request"), "Polling failed");
+ Assert.ok(
+ !Services.prefs.prefHasUserValue(PREF_LAST_ETAG),
+ "Last ETag pref was cleared"
+ );
+
+ await RemoteSettings.pollChanges(); // Does not raise.
+});
+add_task(clear_state);
+
+add_task(async function test_client_error() {
+ const startSnapshot = getUptakeTelemetrySnapshot(
+ TELEMETRY_COMPONENT,
+ TELEMETRY_SOURCE_SYNC
+ );
+
+ const collectionDetails = {
+ id: "b6ba7fab-a40a-4d03-a4af-6b627f3c5b36",
+ last_modified: 42,
+ host: "localhost",
+ bucket: "main",
+ collection: "some-entry",
+ };
+ server.registerPathHandler(
+ CHANGES_PATH,
+ serveChangesEntries(10000, [collectionDetails])
+ );
+ const c = RemoteSettings("some-entry");
+ c.maybeSync = () => {
+ throw new RemoteSettingsClient.CorruptedDataError("main/some-entry");
+ };
+
+ let notificationsObserved = [];
+ const observer = {
+ observe(aSubject, aTopic, aData) {
+ Services.obs.removeObserver(this, aTopic);
+ notificationsObserved.push([aTopic, aSubject.wrappedJSObject]);
+ },
+ };
+ Services.obs.addObserver(observer, "remote-settings:changes-poll-end");
+ Services.obs.addObserver(observer, "remote-settings:sync-error");
+ Services.prefs.setIntPref(PREF_LAST_ETAG, 42);
+
+ // pollChanges() fails with adequate error and a sync-error notification.
+ let error;
+ try {
+ await RemoteSettings.pollChanges();
+ } catch (e) {
+ error = e;
+ }
+
+ Assert.equal(
+ notificationsObserved.length,
+ 1,
+ "only the error notification should not have been observed"
+ );
+ console.log(notificationsObserved);
+ let [topicObserved, subjectObserved] = notificationsObserved[0];
+ Assert.equal(topicObserved, "remote-settings:sync-error");
+ Assert.ok(
+ subjectObserved.error instanceof RemoteSettingsClient.CorruptedDataError,
+ `original error is provided (got ${subjectObserved.error})`
+ );
+ Assert.deepEqual(
+ subjectObserved.error.details,
+ collectionDetails,
+ "information about collection is provided"
+ );
+
+ Assert.ok(/Corrupted/.test(error.message), "original client error is thrown");
+ // When an error occurs, last etag was not overwritten.
+ Assert.equal(Services.prefs.getIntPref(PREF_LAST_ETAG), 42);
+ // ensure that we've accumulated the correct telemetry
+ const endSnapshot = getUptakeTelemetrySnapshot(
+ TELEMETRY_COMPONENT,
+ TELEMETRY_SOURCE_SYNC
+ );
+ const expectedIncrements = {
+ [UptakeTelemetry.STATUS.SYNC_ERROR]: 1,
+ };
+ checkUptakeTelemetry(startSnapshot, endSnapshot, expectedIncrements);
+});
+add_task(clear_state);
+
+add_task(async function test_sync_success_is_stored_in_history() {
+ const collectionDetails = {
+ last_modified: 444,
+ bucket: "main",
+ collection: "desktop-manager",
+ };
+ server.registerPathHandler(
+ CHANGES_PATH,
+ serveChangesEntries(10000, [collectionDetails])
+ );
+ const c = RemoteSettings("desktop-manager");
+ c.maybeSync = () => {};
+ try {
+ await RemoteSettings.pollChanges({ expectedTimestamp: 555 });
+ } catch (e) {}
+
+ const { history } = await RemoteSettings.inspect();
+
+ Assert.deepEqual(history, {
+ [TELEMETRY_SOURCE_SYNC]: [
+ {
+ timestamp: 444,
+ status: "success",
+ infos: {},
+ datetime: new Date(444),
+ },
+ ],
+ });
+});
+add_task(clear_state);
+
+add_task(async function test_sync_error_is_stored_in_history() {
+ const collectionDetails = {
+ last_modified: 1337,
+ bucket: "main",
+ collection: "desktop-manager",
+ };
+ server.registerPathHandler(
+ CHANGES_PATH,
+ serveChangesEntries(10000, [collectionDetails])
+ );
+ const c = RemoteSettings("desktop-manager");
+ c.maybeSync = () => {
+ throw new RemoteSettingsClient.MissingSignatureError(
+ "main/desktop-manager"
+ );
+ };
+ try {
+ await RemoteSettings.pollChanges({ expectedTimestamp: 123456 });
+ } catch (e) {}
+
+ const { history } = await RemoteSettings.inspect();
+
+ Assert.deepEqual(history, {
+ [TELEMETRY_SOURCE_SYNC]: [
+ {
+ timestamp: 1337,
+ status: "sync_error",
+ infos: {
+ expectedTimestamp: 123456,
+ errorName: "MissingSignatureError",
+ },
+ datetime: new Date(1337),
+ },
+ ],
+ });
+});
+add_task(clear_state);
+
+add_task(
+ async function test_sync_broken_signal_is_sent_on_consistent_failure() {
+ const startSnapshot = getUptakeTelemetrySnapshot(
+ TELEMETRY_COMPONENT,
+ TELEMETRY_SOURCE_POLL
+ );
+ // Wait for the "sync-broken-error" notification.
+ let notificationObserved = false;
+ const observer = {
+ observe(aSubject, aTopic, aData) {
+ notificationObserved = true;
+ },
+ };
+ Services.obs.addObserver(observer, "remote-settings:broken-sync-error");
+ // Register a client with a failing sync method.
+ const c = RemoteSettings("desktop-manager");
+ c.maybeSync = () => {
+ throw new RemoteSettingsClient.InvalidSignatureError(
+ "main/desktop-manager"
+ );
+ };
+ // Simulate a response whose ETag gets incremented on each call
+ // (in order to generate several history entries, indexed by timestamp).
+ let timestamp = 1337;
+ server.registerPathHandler(
+ CHANGES_PATH,
+ serveChangesEntries(10000, () => {
+ return [
+ {
+ last_modified: ++timestamp,
+ bucket: "main",
+ collection: "desktop-manager",
+ },
+ ];
+ })
+ );
+
+ // Now obtain several failures in a row (less than threshold).
+ for (var i = 0; i < 9; i++) {
+ try {
+ await RemoteSettings.pollChanges();
+ } catch (e) {}
+ }
+ Assert.ok(!notificationObserved, "Not notified yet");
+
+ // Fail again once. Will now notify.
+ try {
+ await RemoteSettings.pollChanges();
+ } catch (e) {}
+ Assert.ok(notificationObserved, "Broken sync notified");
+ // Uptake event to notify broken sync is sent.
+ const endSnapshot = getUptakeTelemetrySnapshot(
+ TELEMETRY_COMPONENT,
+ TELEMETRY_SOURCE_SYNC
+ );
+ const expectedIncrements = {
+ [UptakeTelemetry.STATUS.SYNC_ERROR]: 10,
+ [UptakeTelemetry.STATUS.SYNC_BROKEN_ERROR]: 1,
+ };
+ checkUptakeTelemetry(startSnapshot, endSnapshot, expectedIncrements);
+
+ // Synchronize successfully.
+ notificationObserved = false;
+ const failingSync = c.maybeSync;
+ c.maybeSync = () => {};
+ await RemoteSettings.pollChanges();
+
+ const { history } = await RemoteSettings.inspect();
+ Assert.equal(
+ history[TELEMETRY_SOURCE_SYNC][0].status,
+ UptakeTelemetry.STATUS.SUCCESS,
+ "Last sync is success"
+ );
+ Assert.ok(!notificationObserved, "Not notified after success");
+
+ // Now fail again. Broken sync isn't notified, we need several in a row.
+ c.maybeSync = failingSync;
+ try {
+ await RemoteSettings.pollChanges();
+ } catch (e) {}
+ Assert.ok(!notificationObserved, "Not notified on single error");
+ Services.obs.removeObserver(observer, "remote-settings:broken-sync-error");
+ }
+);
+add_task(clear_state);
+
+add_task(async function test_check_clockskew_is_updated() {
+ const serverTime = 2000;
+
+ function serverResponse(request, response) {
+ response.setHeader("Content-Type", "application/json; charset=UTF-8");
+ response.setHeader("Date", new Date(serverTime).toUTCString());
+ response.write(JSON.stringify({ timestamp: 42, changes: [] }));
+ response.setStatusLine(null, 200, "OK");
+ }
+ server.registerPathHandler(CHANGES_PATH, serverResponse);
+
+ let startTime = Date.now();
+
+ await RemoteSettings.pollChanges();
+
+ // How does the clock difference look?
+ let endTime = Date.now();
+ let clockDifference = Services.prefs.getIntPref(PREF_CLOCK_SKEW_SECONDS);
+ // we previously set the serverTime to 2 (seconds past epoch)
+ Assert.ok(
+ clockDifference <= endTime / 1000 &&
+ clockDifference >= Math.floor(startTime / 1000) - serverTime / 1000
+ );
+
+ // check negative clock skew times
+ // set to a time in the future
+ server.registerPathHandler(
+ CHANGES_PATH,
+ serveChangesEntries(Date.now() + 10000, [])
+ );
+
+ await RemoteSettings.pollChanges();
+
+ clockDifference = Services.prefs.getIntPref(PREF_CLOCK_SKEW_SECONDS);
+ // we previously set the serverTime to Date.now() + 10000 ms past epoch
+ Assert.ok(clockDifference <= 0 && clockDifference >= -10);
+});
+add_task(clear_state);
+
+add_task(async function test_check_clockskew_takes_age_into_account() {
+ const currentTime = Date.now();
+ const skewSeconds = 5;
+ const ageCDNSeconds = 3600;
+ const serverTime = currentTime - skewSeconds * 1000 - ageCDNSeconds * 1000;
+
+ function serverResponse(request, response) {
+ response.setHeader("Content-Type", "application/json; charset=UTF-8");
+ response.setHeader("Date", new Date(serverTime).toUTCString());
+ response.setHeader("Age", `${ageCDNSeconds}`);
+ response.write(JSON.stringify({ timestamp: 42, changes: [] }));
+ response.setStatusLine(null, 200, "OK");
+ }
+ server.registerPathHandler(CHANGES_PATH, serverResponse);
+
+ await RemoteSettings.pollChanges();
+
+ const clockSkew = Services.prefs.getIntPref(PREF_CLOCK_SKEW_SECONDS);
+ Assert.ok(clockSkew >= skewSeconds, `clockSkew is ${clockSkew}`);
+});
+add_task(clear_state);
+
+add_task(async function test_backoff() {
+ const startSnapshot = getUptakeTelemetrySnapshot(
+ TELEMETRY_COMPONENT,
+ TELEMETRY_SOURCE_POLL
+ );
+
+ function simulateBackoffResponse(request, response) {
+ response.setHeader("Content-Type", "application/json; charset=UTF-8");
+ response.setHeader("Backoff", "10");
+ response.write(JSON.stringify({ timestamp: 42, changes: [] }));
+ response.setStatusLine(null, 200, "OK");
+ }
+ server.registerPathHandler(CHANGES_PATH, simulateBackoffResponse);
+
+ // First will work.
+ await RemoteSettings.pollChanges();
+ // Second will fail because we haven't waited.
+ try {
+ await RemoteSettings.pollChanges();
+ // The previous line should have thrown an error.
+ Assert.ok(false);
+ } catch (e) {
+ Assert.ok(
+ /Server is asking clients to back off; retry in \d+s./.test(e.message)
+ );
+ }
+
+ // Once backoff time has expired, polling for changes can start again.
+ server.registerPathHandler(
+ CHANGES_PATH,
+ serveChangesEntries(12000, [
+ {
+ id: "6a733d4a-601e-11e8-837a-0f85257529a1",
+ last_modified: 1300,
+ host: "localhost",
+ bucket: "some-bucket",
+ collection: "some-collection",
+ },
+ ])
+ );
+ Services.prefs.setCharPref(
+ PREF_SETTINGS_SERVER_BACKOFF,
+ `${Date.now() - 1000}`
+ );
+
+ await RemoteSettings.pollChanges();
+
+ // Backoff tracking preference was cleared.
+ Assert.ok(!Services.prefs.prefHasUserValue(PREF_SETTINGS_SERVER_BACKOFF));
+
+ // Ensure that we've accumulated the correct telemetry
+ const endSnapshot = getUptakeTelemetrySnapshot(
+ TELEMETRY_COMPONENT,
+ TELEMETRY_SOURCE_POLL
+ );
+ const expectedIncrements = {
+ [UptakeTelemetry.STATUS.SUCCESS]: 1,
+ [UptakeTelemetry.STATUS.UP_TO_DATE]: 1,
+ [UptakeTelemetry.STATUS.BACKOFF]: 1,
+ };
+ checkUptakeTelemetry(startSnapshot, endSnapshot, expectedIncrements);
+});
+add_task(clear_state);
+
+add_task(async function test_network_error() {
+ const startSnapshot = getUptakeTelemetrySnapshot(
+ TELEMETRY_COMPONENT,
+ TELEMETRY_SOURCE_POLL
+ );
+
+ // Simulate a network error (to check telemetry report).
+ Services.prefs.setCharPref(PREF_SETTINGS_SERVER, "http://localhost:42/v1");
+ try {
+ await RemoteSettings.pollChanges();
+ } catch (e) {}
+
+ // ensure that we've accumulated the correct telemetry
+ const endSnapshot = getUptakeTelemetrySnapshot(
+ TELEMETRY_COMPONENT,
+ TELEMETRY_SOURCE_POLL
+ );
+ const expectedIncrements = {
+ [UptakeTelemetry.STATUS.NETWORK_ERROR]: 1,
+ };
+ checkUptakeTelemetry(startSnapshot, endSnapshot, expectedIncrements);
+});
+add_task(clear_state);
+
+add_task(async function test_syncs_clients_with_local_database() {
+ server.registerPathHandler(
+ CHANGES_PATH,
+ serveChangesEntries(42000, [
+ {
+ id: "d4a14f44-601f-11e8-8b8a-030f3dc5b844",
+ last_modified: 10000,
+ host: "localhost",
+ bucket: "main",
+ collection: "some-unknown",
+ },
+ {
+ id: "39f57e4e-6023-11e8-8b74-77c8dedfb389",
+ last_modified: 9000,
+ host: "localhost",
+ bucket: "blocklists",
+ collection: "addons",
+ },
+ {
+ id: "9a594c1a-601f-11e8-9c8a-33b2239d9113",
+ last_modified: 8000,
+ host: "localhost",
+ bucket: "main",
+ collection: "recipes",
+ },
+ ])
+ );
+
+ // This simulates what remote-settings would do when initializing a local database.
+ // We don't want to instantiate a client using the RemoteSettings() API
+ // since we want to test «unknown» clients that have a local database.
+ new RemoteSettingsClient("addons", {
+ bucketName: "blocklists",
+ }).db.importChanges({}, 42);
+ new RemoteSettingsClient("recipes").db.importChanges({}, 43);
+
+ let error;
+ try {
+ await RemoteSettings.pollChanges();
+ Assert.ok(false, "pollChange() should throw when pulling recipes");
+ } catch (e) {
+ error = e;
+ }
+
+ // The `main/some-unknown` should be skipped because it has no local database.
+ // The `blocklists/addons` should be skipped because it is not the main bucket.
+ // The `recipes` has a local database, and should cause a network error because the test
+ // does not setup the server to receive the requests of `maybeSync()`.
+ Assert.ok(/HTTP 404/.test(error.message), "server will return 404 on sync");
+ Assert.equal(error.details.collection, "recipes");
+});
+add_task(clear_state);
+
+add_task(async function test_syncs_clients_with_local_dump() {
+ if (IS_ANDROID) {
+ // Skip test: we don't ship remote settings dumps on Android (see package-manifest).
+ return;
+ }
+ server.registerPathHandler(
+ CHANGES_PATH,
+ serveChangesEntries(42000, [
+ {
+ id: "d4a14f44-601f-11e8-8b8a-030f3dc5b844",
+ last_modified: 10000,
+ host: "localhost",
+ bucket: "main",
+ collection: "some-unknown",
+ },
+ {
+ id: "39f57e4e-6023-11e8-8b74-77c8dedfb389",
+ last_modified: 9000,
+ host: "localhost",
+ bucket: "blocklists",
+ collection: "addons",
+ },
+ {
+ id: "9a594c1a-601f-11e8-9c8a-33b2239d9113",
+ last_modified: 8000,
+ host: "localhost",
+ bucket: "main",
+ collection: "example",
+ },
+ ])
+ );
+
+ let error;
+ try {
+ await RemoteSettings.pollChanges();
+ } catch (e) {
+ error = e;
+ }
+
+ // The `main/some-unknown` should be skipped because it has no dump.
+ // The `blocklists/addons` should be skipped because it is not the main bucket.
+ // The `example` has a dump, and should cause a network error because the test
+ // does not setup the server to receive the requests of `maybeSync()`.
+ Assert.ok(/HTTP 404/.test(error.message), "server will return 404 on sync");
+ Assert.equal(error.details.collection, "example");
+});
+add_task(clear_state);
+
+add_task(async function test_adding_client_resets_polling() {
+ function serve200(request, response) {
+ const entries = [
+ {
+ id: "aa71e6cc-9f37-447a-b6e0-c025e8eabd03",
+ last_modified: 42,
+ host: "localhost",
+ bucket: "main",
+ collection: "a-collection",
+ },
+ ];
+ if (request.queryString.includes("_since")) {
+ response.write(
+ JSON.stringify({
+ timestamp: 42,
+ changes: [],
+ })
+ );
+ } else {
+ response.write(
+ JSON.stringify({
+ timestamp: 42,
+ changes: entries,
+ })
+ );
+ }
+ response.setStatusLine(null, 200, "OK");
+ response.setHeader("Content-Type", "application/json; charset=UTF-8");
+ response.setHeader("Date", new Date().toUTCString());
+ }
+ server.registerPathHandler(CHANGES_PATH, serve200);
+
+ // Poll once, without any client for "a-collection"
+ await RemoteSettings.pollChanges();
+
+ // Register a new client.
+ let maybeSyncCalled = false;
+ const c = RemoteSettings("a-collection");
+ c.maybeSync = () => {
+ maybeSyncCalled = true;
+ };
+
+ // Poll again.
+ await RemoteSettings.pollChanges();
+
+ // The new client was called, even if the server data didn't change.
+ Assert.ok(maybeSyncCalled);
+
+ // Poll again. This time maybeSync() won't be called.
+ maybeSyncCalled = false;
+ await RemoteSettings.pollChanges();
+ Assert.ok(!maybeSyncCalled);
+});
+add_task(clear_state);
+
+add_task(
+ async function test_broadcast_handler_passes_version_and_trigger_values() {
+ // The polling will use the broadcast version as cache busting query param.
+ let passedQueryString;
+ function serveCacheBusted(request, response) {
+ passedQueryString = request.queryString;
+ const entries = [
+ {
+ id: "b6ba7fab-a40a-4d03-a4af-6b627f3c5b36",
+ last_modified: 42,
+ host: "localhost",
+ bucket: "main",
+ collection: "from-broadcast",
+ },
+ ];
+ response.write(
+ JSON.stringify({
+ changes: entries,
+ timestamp: 42,
+ })
+ );
+ response.setHeader("ETag", '"42"');
+ response.setStatusLine(null, 200, "OK");
+ response.setHeader("Content-Type", "application/json; charset=UTF-8");
+ response.setHeader("Date", new Date().toUTCString());
+ }
+ server.registerPathHandler(CHANGES_PATH, serveCacheBusted);
+
+ let passedTrigger;
+ const c = RemoteSettings("from-broadcast");
+ c.maybeSync = (last_modified, { trigger }) => {
+ passedTrigger = trigger;
+ };
+
+ const version = "1337";
+
+ let context = { phase: pushBroadcastService.PHASES.HELLO };
+ await remoteSettingsBroadcastHandler.receivedBroadcastMessage(
+ version,
+ BROADCAST_ID,
+ context
+ );
+ Assert.equal(passedTrigger, "startup");
+ Assert.equal(passedQueryString, `_expected=${version}`);
+
+ clear_state();
+
+ context = { phase: pushBroadcastService.PHASES.REGISTER };
+ await remoteSettingsBroadcastHandler.receivedBroadcastMessage(
+ version,
+ BROADCAST_ID,
+ context
+ );
+ Assert.equal(passedTrigger, "startup");
+
+ clear_state();
+
+ context = { phase: pushBroadcastService.PHASES.BROADCAST };
+ await remoteSettingsBroadcastHandler.receivedBroadcastMessage(
+ version,
+ BROADCAST_ID,
+ context
+ );
+ Assert.equal(passedTrigger, "broadcast");
+ }
+);
+add_task(clear_state);
diff --git a/services/settings/test/unit/test_remote_settings_recover_broken.js b/services/settings/test/unit/test_remote_settings_recover_broken.js
new file mode 100644
index 0000000000..09ad08a175
--- /dev/null
+++ b/services/settings/test/unit/test_remote_settings_recover_broken.js
@@ -0,0 +1,151 @@
+/* import-globals-from ../../../common/tests/unit/head_helpers.js */
+
+const { SyncHistory } = ChromeUtils.import(
+ "resource://services-settings/SyncHistory.jsm"
+);
+const { RemoteSettingsClient } = ChromeUtils.import(
+ "resource://services-settings/RemoteSettingsClient.jsm"
+);
+const { RemoteSettings } = ChromeUtils.import(
+ "resource://services-settings/remote-settings.js"
+);
+const { Utils } = ChromeUtils.import("resource://services-settings/Utils.jsm");
+
+const PREF_SETTINGS_SERVER = "services.settings.server";
+const CHANGES_PATH = "/v1" + Utils.CHANGES_PATH;
+const BROKEN_SYNC_THRESHOLD = 10; // See default pref value
+
+let server;
+let client;
+let maybeSyncBackup;
+
+async function clear_state() {
+ // Disable logging output.
+ Services.prefs.setCharPref("services.settings.loglevel", "critical");
+ // Pull data from the test server.
+ Services.prefs.setCharPref(
+ PREF_SETTINGS_SERVER,
+ `http://localhost:${server.identity.primaryPort}/v1`
+ );
+
+ // Clear sync history.
+ await new SyncHistory("").clear();
+
+ // Simulate a response whose ETag gets incremented on each call
+ // (in order to generate several history entries, indexed by timestamp).
+ let timestamp = 1337;
+ server.registerPathHandler(CHANGES_PATH, (request, response) => {
+ response.setStatusLine(null, 200, "OK");
+ response.setHeader("Content-Type", "application/json; charset=UTF-8");
+ response.setHeader("Date", new Date(1000000).toUTCString());
+ response.setHeader("ETag", `"${timestamp}"`);
+ response.write(
+ JSON.stringify({
+ timestamp,
+ changes: [
+ {
+ last_modified: ++timestamp,
+ bucket: "main",
+ collection: "desktop-manager",
+ },
+ ],
+ })
+ );
+ });
+
+ // Restore original maybeSync() method between each test.
+ client.maybeSync = maybeSyncBackup;
+}
+
+function run_test() {
+ // Set up an HTTP Server
+ server = new HttpServer();
+ server.start(-1);
+
+ client = RemoteSettings("desktop-manager");
+ maybeSyncBackup = client.maybeSync;
+
+ run_next_test();
+
+ registerCleanupFunction(() => {
+ server.stop(() => {});
+ // Restore original maybeSync() method when test suite is done.
+ client.maybeSync = maybeSyncBackup;
+ });
+}
+
+add_task(clear_state);
+
+add_task(async function test_db_is_destroyed_when_sync_is_broken() {
+ // Simulate a successful sync.
+ client.maybeSync = async () => {
+ // Store some data in local DB.
+ await client.db.importChanges({}, 1515, []);
+ };
+ await RemoteSettings.pollChanges({ trigger: "timer" });
+
+ // Register a client with a failing sync method.
+ client.maybeSync = () => {
+ throw new RemoteSettingsClient.InvalidSignatureError(
+ "main/desktop-manager"
+ );
+ };
+
+ // Now obtain several failures in a row.
+ for (var i = 0; i < BROKEN_SYNC_THRESHOLD; i++) {
+ try {
+ await RemoteSettings.pollChanges({ trigger: "timer" });
+ } catch (e) {}
+ }
+
+ // Synchronization is in broken state.
+ Assert.equal(
+ await client.db.getLastModified(),
+ 1515,
+ "Local DB was not destroyed yet"
+ );
+
+ // Synchronize again. Broken state will be detected.
+ try {
+ await RemoteSettings.pollChanges({ trigger: "timer" });
+ } catch (e) {}
+
+ // DB was destroyed.
+ Assert.equal(
+ await client.db.getLastModified(),
+ null,
+ "Local DB was destroyed"
+ );
+});
+
+add_task(clear_state);
+
+add_task(async function test_db_is_not_destroyed_when_state_is_server_error() {
+ // Since we don't mock the server endpoints to obtain the changeset of this
+ // collection, the call to `maybeSync()` will fail with network errors.
+
+ // Store some data in local DB.
+ await client.db.importChanges({}, 1515, []);
+
+ // Now obtain several failures in a row.
+ let lastError;
+ for (var i = 0; i < BROKEN_SYNC_THRESHOLD + 1; i++) {
+ try {
+ await RemoteSettings.pollChanges({ trigger: "timer" });
+ } catch (e) {
+ lastError = e;
+ }
+ }
+ Assert.ok(
+ /Cannot parse server content/.test(lastError.message),
+ "Error is about server"
+ );
+ // DB was not destroyed.
+ Assert.equal(
+ await client.db.getLastModified(),
+ 1515,
+ "Local DB was not destroyed"
+ );
+});
+
+add_task(clear_state);
diff --git a/services/settings/test/unit/test_remote_settings_release_prefs.js b/services/settings/test/unit/test_remote_settings_release_prefs.js
new file mode 100644
index 0000000000..460c1bff88
--- /dev/null
+++ b/services/settings/test/unit/test_remote_settings_release_prefs.js
@@ -0,0 +1,202 @@
+"use strict";
+
+const { AppConstants } = ChromeUtils.importESModule(
+ "resource://gre/modules/AppConstants.sys.mjs"
+);
+
+const UTILS_MODULE = "resource://services-settings/Utils.jsm";
+
+function clear_state() {
+ Services.env.set("MOZ_REMOTE_SETTINGS_DEVTOOLS", "0");
+ Services.prefs.clearUserPref("services.settings.server");
+ Services.prefs.clearUserPref("services.settings.preview_enabled");
+ Cu.unload(UTILS_MODULE);
+ Assert.ok(!Cu.isModuleLoaded(UTILS_MODULE), "Utils was unloaded before test");
+}
+
+add_setup(async function() {
+ // Set this env vars in order to test the code path where the
+ // server URL can only be overridden from Dev Tools.
+ // See `isRunningTests` in `services/settings/Utils.jsm`.
+ const before = Services.env.get("MOZ_DISABLE_NONLOCAL_CONNECTIONS");
+ Services.env.set("MOZ_DISABLE_NONLOCAL_CONNECTIONS", "0");
+
+ registerCleanupFunction(() => {
+ clear_state();
+ Services.env.set("MOZ_DISABLE_NONLOCAL_CONNECTIONS", before);
+ });
+});
+
+add_task(clear_state);
+
+add_task(
+ {
+ skip_if: () => !AppConstants.RELEASE_OR_BETA,
+ },
+ async function test_server_url_cannot_be_toggled_in_release() {
+ Services.prefs.setCharPref(
+ "services.settings.server",
+ "http://localhost:8888/v1"
+ );
+
+ const { Utils } = ChromeUtils.import(UTILS_MODULE);
+
+ Assert.equal(
+ Utils.SERVER_URL,
+ AppConstants.REMOTE_SETTINGS_SERVER_URL,
+ "Server url pref was not read in release"
+ );
+ }
+);
+
+add_task(
+ {
+ skip_if: () => AppConstants.RELEASE_OR_BETA,
+ },
+ async function test_server_url_cannot_be_toggled_in_dev_nightly() {
+ Services.prefs.setCharPref(
+ "services.settings.server",
+ "http://localhost:8888/v1"
+ );
+
+ const { Utils } = ChromeUtils.import(UTILS_MODULE);
+
+ Assert.notEqual(
+ Utils.SERVER_URL,
+ AppConstants.REMOTE_SETTINGS_SERVER_URL,
+ "Server url pref was read in nightly/dev"
+ );
+ }
+);
+add_task(clear_state);
+
+add_task(
+ {
+ skip_if: () => !AppConstants.RELEASE_OR_BETA,
+ },
+ async function test_preview_mode_cannot_be_toggled_in_release() {
+ Services.prefs.setBoolPref("services.settings.preview_enabled", true);
+
+ const { Utils } = ChromeUtils.import(UTILS_MODULE);
+
+ Assert.ok(!Utils.PREVIEW_MODE, "Preview mode pref was not read in release");
+ }
+);
+add_task(clear_state);
+
+add_task(
+ {
+ skip_if: () => AppConstants.RELEASE_OR_BETA,
+ },
+ async function test_preview_mode_cannot_be_toggled_in_dev_nightly() {
+ Services.prefs.setBoolPref("services.settings.preview_enabled", true);
+
+ const { Utils } = ChromeUtils.import(UTILS_MODULE);
+
+ Assert.ok(Utils.PREVIEW_MODE, "Preview mode pref is read in dev/nightly");
+ }
+);
+add_task(clear_state);
+
+add_task(
+ {
+ skip_if: () => !AppConstants.RELEASE_OR_BETA,
+ },
+ async function test_load_dumps_will_always_be_loaded_in_release() {
+ Services.prefs.setCharPref(
+ "services.settings.server",
+ "http://localhost:8888/v1"
+ );
+
+ const { Utils } = ChromeUtils.import(UTILS_MODULE);
+
+ Assert.equal(
+ Utils.SERVER_URL,
+ AppConstants.REMOTE_SETTINGS_SERVER_URL,
+ "Server url pref was not read"
+ );
+ Assert.ok(Utils.LOAD_DUMPS, "Dumps will always be loaded");
+ }
+);
+
+add_task(
+ {
+ skip_if: () => AppConstants.RELEASE_OR_BETA,
+ },
+ async function test_load_dumps_can_be_disabled_in_dev_nightly() {
+ Services.prefs.setCharPref(
+ "services.settings.server",
+ "http://localhost:8888/v1"
+ );
+
+ const { Utils } = ChromeUtils.import(UTILS_MODULE);
+
+ Assert.notEqual(
+ Utils.SERVER_URL,
+ AppConstants.REMOTE_SETTINGS_SERVER_URL,
+ "Server url pref was read"
+ );
+ Assert.ok(!Utils.LOAD_DUMPS, "Dumps are not loaded if server is not prod");
+ }
+);
+add_task(clear_state);
+
+add_task(
+ async function test_server_url_can_be_changed_in_all_versions_if_running_for_devtools() {
+ Services.env.set("MOZ_REMOTE_SETTINGS_DEVTOOLS", "1");
+ Services.prefs.setCharPref(
+ "services.settings.server",
+ "http://localhost:8888/v1"
+ );
+
+ const { Utils } = ChromeUtils.import(UTILS_MODULE);
+
+ Assert.notEqual(
+ Utils.SERVER_URL,
+ AppConstants.REMOTE_SETTINGS_SERVER_URL,
+ "Server url pref was read"
+ );
+ }
+);
+add_task(clear_state);
+
+add_task(
+ async function test_preview_mode_can_be_changed_in_all_versions_if_running_for_devtools() {
+ Services.env.set("MOZ_REMOTE_SETTINGS_DEVTOOLS", "1");
+ Services.prefs.setBoolPref("services.settings.preview_enabled", true);
+
+ const { Utils } = ChromeUtils.import(UTILS_MODULE);
+
+ Assert.ok(Utils.PREVIEW_MODE, "Preview mode pref was read");
+ }
+);
+add_task(clear_state);
+
+add_task(
+ async function test_dumps_are_not_loaded_if_server_is_not_prod_if_running_for_devtools() {
+ Services.env.set("MOZ_REMOTE_SETTINGS_DEVTOOLS", "1");
+ Services.prefs.setCharPref(
+ "services.settings.server",
+ "http://localhost:8888/v1"
+ );
+
+ const { Utils } = ChromeUtils.import(UTILS_MODULE);
+ Assert.ok(!Utils.LOAD_DUMPS, "Dumps won't be loaded");
+ }
+);
+add_task(clear_state);
+
+add_task(
+ async function test_dumps_are_loaded_if_server_is_prod_if_running_for_devtools() {
+ Services.env.set("MOZ_REMOTE_SETTINGS_DEVTOOLS", "1");
+ Services.prefs.setCharPref(
+ "services.settings.server",
+ AppConstants.REMOTE_SETTINGS_SERVER_URL
+ );
+
+ const { Utils } = ChromeUtils.import(UTILS_MODULE);
+
+ Assert.ok(Utils.LOAD_DUMPS, "dumps are loaded if prod");
+ }
+);
+add_task(clear_state);
diff --git a/services/settings/test/unit/test_remote_settings_signatures.js b/services/settings/test/unit/test_remote_settings_signatures.js
new file mode 100644
index 0000000000..d16a0a1370
--- /dev/null
+++ b/services/settings/test/unit/test_remote_settings_signatures.js
@@ -0,0 +1,836 @@
+/* import-globals-from ../../../common/tests/unit/head_helpers.js */
+"use strict";
+
+const { RemoteSettings } = ChromeUtils.import(
+ "resource://services-settings/remote-settings.js"
+);
+const { RemoteSettingsClient } = ChromeUtils.import(
+ "resource://services-settings/RemoteSettingsClient.jsm"
+);
+const { UptakeTelemetry, Policy } = ChromeUtils.import(
+ "resource://services-common/uptake-telemetry.js"
+);
+const { TelemetryTestUtils } = ChromeUtils.importESModule(
+ "resource://testing-common/TelemetryTestUtils.sys.mjs"
+);
+
+const PREF_SETTINGS_SERVER = "services.settings.server";
+const SIGNER_NAME = "onecrl.content-signature.mozilla.org";
+const TELEMETRY_COMPONENT = "remotesettings";
+
+const CERT_DIR = "test_remote_settings_signatures/";
+const CHAIN_FILES = ["collection_signing_ee.pem", "collection_signing_int.pem"];
+
+function getFileData(file) {
+ const stream = Cc["@mozilla.org/network/file-input-stream;1"].createInstance(
+ Ci.nsIFileInputStream
+ );
+ stream.init(file, -1, 0, 0);
+ const data = NetUtil.readInputStreamToString(stream, stream.available());
+ stream.close();
+ return data;
+}
+
+function getCertChain() {
+ const chain = [];
+ for (let file of CHAIN_FILES) {
+ chain.push(getFileData(do_get_file(CERT_DIR + file)));
+ }
+ return chain.join("\n");
+}
+
+let server;
+let client;
+
+function run_test() {
+ // Signature verification is enabled by default. We use a custom signer
+ // because these tests were originally written for OneCRL.
+ client = RemoteSettings("signed", { signerName: SIGNER_NAME });
+
+ Services.prefs.setCharPref("services.settings.loglevel", "debug");
+
+ // Set up an HTTP Server
+ server = new HttpServer();
+ server.start(-1);
+
+ // Pretend we are in nightly channel to make sure all telemetry events are sent.
+ let oldGetChannel = Policy.getChannel;
+ Policy.getChannel = () => "nightly";
+
+ run_next_test();
+
+ registerCleanupFunction(() => {
+ Policy.getChannel = oldGetChannel;
+ server.stop(() => {});
+ });
+}
+
+add_task(async function test_check_signatures() {
+ // First, perform a signature verification with known data and signature
+ // to ensure things are working correctly
+ let verifier = Cc[
+ "@mozilla.org/security/contentsignatureverifier;1"
+ ].createInstance(Ci.nsIContentSignatureVerifier);
+
+ const emptyData = "[]";
+ const emptySignature =
+ "p384ecdsa=zbugm2FDitsHwk5-IWsas1PpWwY29f0Fg5ZHeqD8fzep7AVl2vfcaHA7LdmCZ28qZLOioGKvco3qT117Q4-HlqFTJM7COHzxGyU2MMJ0ZTnhJrPOC1fP3cVQjU1PTWi9";
+
+ ok(
+ await verifier.asyncVerifyContentSignature(
+ emptyData,
+ emptySignature,
+ getCertChain(),
+ SIGNER_NAME,
+ Ci.nsIX509CertDB.AppXPCShellRoot
+ )
+ );
+
+ const collectionData =
+ '[{"details":{"bug":"https://bugzilla.mozilla.org/show_bug.cgi?id=1155145","created":"2016-01-18T14:43:37Z","name":"GlobalSign certs","who":".","why":"."},"enabled":true,"id":"97fbf7c4-3ef2-f54f-0029-1ba6540c63ea","issuerName":"MHExKDAmBgNVBAMTH0dsb2JhbFNpZ24gUm9vdFNpZ24gUGFydG5lcnMgQ0ExHTAbBgNVBAsTFFJvb3RTaWduIFBhcnRuZXJzIENBMRkwFwYDVQQKExBHbG9iYWxTaWduIG52LXNhMQswCQYDVQQGEwJCRQ==","last_modified":2000,"serialNumber":"BAAAAAABA/A35EU="},{"details":{"bug":"https://bugzilla.mozilla.org/show_bug.cgi?id=1155145","created":"2016-01-18T14:48:11Z","name":"GlobalSign certs","who":".","why":"."},"enabled":true,"id":"e3bd531e-1ee4-7407-27ce-6fdc9cecbbdc","issuerName":"MIGBMQswCQYDVQQGEwJCRTEZMBcGA1UEChMQR2xvYmFsU2lnbiBudi1zYTElMCMGA1UECxMcUHJpbWFyeSBPYmplY3QgUHVibGlzaGluZyBDQTEwMC4GA1UEAxMnR2xvYmFsU2lnbiBQcmltYXJ5IE9iamVjdCBQdWJsaXNoaW5nIENB","last_modified":3000,"serialNumber":"BAAAAAABI54PryQ="}]';
+ const collectionSignature =
+ "p384ecdsa=f4pA2tYM5jQgWY6YUmhUwQiBLj6QO5sHLD_5MqLePz95qv-7cNCuQoZnPQwxoptDtW8hcWH3kLb0quR7SB-r82gkpR9POVofsnWJRA-ETb0BcIz6VvI3pDT49ZLlNg3p";
+
+ ok(
+ await verifier.asyncVerifyContentSignature(
+ collectionData,
+ collectionSignature,
+ getCertChain(),
+ SIGNER_NAME,
+ Ci.nsIX509CertDB.AppXPCShellRoot
+ )
+ );
+});
+
+add_task(async function test_check_synchronization_with_signatures() {
+ const port = server.identity.primaryPort;
+
+ const x5u = `http://localhost:${port}/test_remote_settings_signatures/test_cert_chain.pem`;
+
+ // Telemetry reports.
+ const TELEMETRY_SOURCE = client.identifier;
+
+ function registerHandlers(responses) {
+ function handleResponse(serverTimeMillis, request, response) {
+ const key = `${request.method}:${request.path}?${request.queryString}`;
+ const available = responses[key];
+ const sampled = available.length > 1 ? available.shift() : available[0];
+ if (!sampled) {
+ do_throw(
+ `unexpected ${request.method} request for ${request.path}?${request.queryString}`
+ );
+ }
+
+ response.setStatusLine(
+ null,
+ sampled.status.status,
+ sampled.status.statusText
+ );
+ // send the headers
+ for (let headerLine of sampled.sampleHeaders) {
+ let headerElements = headerLine.split(":");
+ response.setHeader(headerElements[0], headerElements[1].trimLeft());
+ }
+
+ // set the server date
+ response.setHeader("Date", new Date(serverTimeMillis).toUTCString());
+
+ response.write(sampled.responseBody);
+ }
+
+ for (let key of Object.keys(responses)) {
+ const keyParts = key.split(":");
+ const valueParts = keyParts[1].split("?");
+ const path = valueParts[0];
+
+ server.registerPathHandler(path, handleResponse.bind(null, 2000));
+ }
+ }
+
+ // set up prefs so the kinto updater talks to the test server
+ Services.prefs.setCharPref(
+ PREF_SETTINGS_SERVER,
+ `http://localhost:${server.identity.primaryPort}/v1`
+ );
+
+ // These are records we'll use in the test collections
+ const RECORD1 = {
+ details: {
+ bug: "https://bugzilla.mozilla.org/show_bug.cgi?id=1155145",
+ created: "2016-01-18T14:43:37Z",
+ name: "GlobalSign certs",
+ who: ".",
+ why: ".",
+ },
+ enabled: true,
+ id: "97fbf7c4-3ef2-f54f-0029-1ba6540c63ea",
+ issuerName:
+ "MHExKDAmBgNVBAMTH0dsb2JhbFNpZ24gUm9vdFNpZ24gUGFydG5lcnMgQ0ExHTAbBgNVBAsTFFJvb3RTaWduIFBhcnRuZXJzIENBMRkwFwYDVQQKExBHbG9iYWxTaWduIG52LXNhMQswCQYDVQQGEwJCRQ==",
+ last_modified: 2000,
+ serialNumber: "BAAAAAABA/A35EU=",
+ };
+
+ const RECORD2 = {
+ details: {
+ bug: "https://bugzilla.mozilla.org/show_bug.cgi?id=1155145",
+ created: "2016-01-18T14:48:11Z",
+ name: "GlobalSign certs",
+ who: ".",
+ why: ".",
+ },
+ enabled: true,
+ id: "e3bd531e-1ee4-7407-27ce-6fdc9cecbbdc",
+ issuerName:
+ "MIGBMQswCQYDVQQGEwJCRTEZMBcGA1UEChMQR2xvYmFsU2lnbiBudi1zYTElMCMGA1UECxMcUHJpbWFyeSBPYmplY3QgUHVibGlzaGluZyBDQTEwMC4GA1UEAxMnR2xvYmFsU2lnbiBQcmltYXJ5IE9iamVjdCBQdWJsaXNoaW5nIENB",
+ last_modified: 3000,
+ serialNumber: "BAAAAAABI54PryQ=",
+ };
+
+ const RECORD3 = {
+ details: {
+ bug: "https://bugzilla.mozilla.org/show_bug.cgi?id=1155145",
+ created: "2016-01-18T14:48:11Z",
+ name: "GlobalSign certs",
+ who: ".",
+ why: ".",
+ },
+ enabled: true,
+ id: "c7c49b69-a4ab-418e-92a9-e1961459aa7f",
+ issuerName:
+ "MIGBMQswCQYDVQQGEwJCRTEZMBcGA1UEChMQR2xvYmFsU2lnbiBudi1zYTElMCMGA1UECxMcUHJpbWFyeSBPYmplY3QgUHVibGlzaGluZyBDQTEwMC4GA1UEAxMnR2xvYmFsU2lnbiBQcmltYXJ5IE9iamVjdCBQdWJsaXNoaW5nIENB",
+ last_modified: 4000,
+ serialNumber: "BAAAAAABI54PryQ=",
+ };
+
+ const RECORD1_DELETION = {
+ deleted: true,
+ enabled: true,
+ id: "97fbf7c4-3ef2-f54f-0029-1ba6540c63ea",
+ last_modified: 3500,
+ };
+
+ // Check that a signature on an empty collection is OK
+ // We need to set up paths on the HTTP server to return specific data from
+ // specific paths for each test. Here we prepare data for each response.
+
+ // A cert chain response (this the cert chain that contains the signing
+ // cert, the root and any intermediates in between). This is used in each
+ // sync.
+ const RESPONSE_CERT_CHAIN = {
+ comment: "RESPONSE_CERT_CHAIN",
+ sampleHeaders: ["Content-Type: text/plain; charset=UTF-8"],
+ status: { status: 200, statusText: "OK" },
+ responseBody: getCertChain(),
+ };
+
+ // A server settings response. This is used in each sync.
+ const RESPONSE_SERVER_SETTINGS = {
+ comment: "RESPONSE_SERVER_SETTINGS",
+ sampleHeaders: [
+ "Access-Control-Allow-Origin: *",
+ "Access-Control-Expose-Headers: Retry-After, Content-Length, Alert, Backoff",
+ "Content-Type: application/json; charset=UTF-8",
+ "Server: waitress",
+ ],
+ status: { status: 200, statusText: "OK" },
+ responseBody: JSON.stringify({
+ settings: {
+ batch_max_requests: 25,
+ },
+ url: `http://localhost:${port}/v1/`,
+ documentation: "https://kinto.readthedocs.org/",
+ version: "1.5.1",
+ commit: "cbc6f58",
+ hello: "kinto",
+ }),
+ };
+
+ // This is the initial, empty state of the collection. This is only used
+ // for the first sync.
+ const RESPONSE_EMPTY_INITIAL = {
+ comment: "RESPONSE_EMPTY_INITIAL",
+ sampleHeaders: [
+ "Content-Type: application/json; charset=UTF-8",
+ 'ETag: "1000"',
+ ],
+ status: { status: 200, statusText: "OK" },
+ responseBody: JSON.stringify({
+ timestamp: 1000,
+ metadata: {
+ signature: {
+ x5u,
+ signature:
+ "vxuAg5rDCB-1pul4a91vqSBQRXJG_j7WOYUTswxRSMltdYmbhLRH8R8brQ9YKuNDF56F-w6pn4HWxb076qgKPwgcEBtUeZAO_RtaHXRkRUUgVzAr86yQL4-aJTbv3D6u",
+ },
+ },
+ changes: [],
+ }),
+ };
+
+ // Here, we map request method and path to the available responses
+ const emptyCollectionResponses = {
+ "GET:/test_remote_settings_signatures/test_cert_chain.pem?": [
+ RESPONSE_CERT_CHAIN,
+ ],
+ "GET:/v1/?": [RESPONSE_SERVER_SETTINGS],
+ "GET:/v1/buckets/main/collections/signed/changeset?_expected=1000": [
+ RESPONSE_EMPTY_INITIAL,
+ ],
+ };
+
+ //
+ // 1.
+ // - collection: undefined -> []
+ // - timestamp: undefined -> 1000
+ //
+
+ // .. and use this map to register handlers for each path
+ registerHandlers(emptyCollectionResponses);
+
+ let startSnapshot = getUptakeTelemetrySnapshot(
+ TELEMETRY_COMPONENT,
+ TELEMETRY_SOURCE
+ );
+
+ // With all of this set up, we attempt a sync. This will resolve if all is
+ // well and throw if something goes wrong.
+ await client.maybeSync(1000);
+
+ equal((await client.get()).length, 0);
+
+ let endSnapshot = getUptakeTelemetrySnapshot(
+ TELEMETRY_COMPONENT,
+ TELEMETRY_SOURCE
+ );
+
+ // ensure that a success histogram is tracked when a succesful sync occurs.
+ let expectedIncrements = { [UptakeTelemetry.STATUS.SUCCESS]: 1 };
+ checkUptakeTelemetry(startSnapshot, endSnapshot, expectedIncrements);
+
+ //
+ // 2.
+ // - collection: [] -> [RECORD2, RECORD1]
+ // - timestamp: 1000 -> 3000
+ //
+ // Check that some additions (2 records) to the collection have a valid
+ // signature.
+
+ // This response adds two entries (RECORD1 and RECORD2) to the collection
+ const RESPONSE_TWO_ADDED = {
+ comment: "RESPONSE_TWO_ADDED",
+ sampleHeaders: [
+ "Content-Type: application/json; charset=UTF-8",
+ 'ETag: "3000"',
+ ],
+ status: { status: 200, statusText: "OK" },
+ responseBody: JSON.stringify({
+ timestamp: 3000,
+ metadata: {
+ signature: {
+ x5u,
+ signature:
+ "dwhJeypadNIyzGj3QdI0KMRTPnHhFPF_j73mNrsPAHKMW46S2Ftf4BzsPMvPMB8h0TjDus13wo_R4l432DHe7tYyMIWXY0PBeMcoe5BREhFIxMxTsh9eGVXBD1e3UwRy",
+ },
+ },
+ changes: [RECORD2, RECORD1],
+ }),
+ };
+
+ const twoItemsResponses = {
+ "GET:/v1/buckets/main/collections/signed/changeset?_expected=3000&_since=%221000%22": [
+ RESPONSE_TWO_ADDED,
+ ],
+ };
+ registerHandlers(twoItemsResponses);
+ await client.maybeSync(3000);
+
+ equal((await client.get()).length, 2);
+
+ //
+ // 3.
+ // - collection: [RECORD2, RECORD1] -> [RECORD2, RECORD3]
+ // - timestamp: 3000 -> 4000
+ //
+ // Check the collection with one addition and one removal has a valid
+ // signature
+ const THREE_ITEMS_SIG =
+ "MIEmNghKnkz12UodAAIc3q_Y4a3IJJ7GhHF4JYNYmm8avAGyPM9fYU7NzVo94pzjotG7vmtiYuHyIX2rTHTbT587w0LdRWxipgFd_PC1mHiwUyjFYNqBBG-kifYk7kEw";
+
+ // Remove RECORD1, add RECORD3
+ const RESPONSE_ONE_ADDED_ONE_REMOVED = {
+ comment: "RESPONSE_ONE_ADDED_ONE_REMOVED ",
+ sampleHeaders: [
+ "Content-Type: application/json; charset=UTF-8",
+ 'ETag: "4000"',
+ ],
+ status: { status: 200, statusText: "OK" },
+ responseBody: JSON.stringify({
+ timestamp: 4000,
+ metadata: {
+ signature: {
+ x5u,
+ signature: THREE_ITEMS_SIG,
+ },
+ },
+ changes: [RECORD3, RECORD1_DELETION],
+ }),
+ };
+
+ const oneAddedOneRemovedResponses = {
+ "GET:/v1/buckets/main/collections/signed/changeset?_expected=4000&_since=%223000%22": [
+ RESPONSE_ONE_ADDED_ONE_REMOVED,
+ ],
+ };
+ registerHandlers(oneAddedOneRemovedResponses);
+ await client.maybeSync(4000);
+
+ equal((await client.get()).length, 2);
+
+ //
+ // 4.
+ // - collection: [RECORD2, RECORD3] -> [RECORD2, RECORD3]
+ // - timestamp: 4000 -> 4100
+ //
+ // Check the signature is still valid with no operation (no changes)
+
+ // Leave the collection unchanged
+ const RESPONSE_EMPTY_NO_UPDATE = {
+ comment: "RESPONSE_EMPTY_NO_UPDATE ",
+ sampleHeaders: [
+ "Content-Type: application/json; charset=UTF-8",
+ 'ETag: "4000"',
+ ],
+ status: { status: 200, statusText: "OK" },
+ responseBody: JSON.stringify({
+ timestamp: 4000,
+ metadata: {
+ signature: {
+ x5u,
+ signature: THREE_ITEMS_SIG,
+ },
+ },
+ changes: [],
+ }),
+ };
+
+ const noOpResponses = {
+ "GET:/v1/buckets/main/collections/signed/changeset?_expected=4100&_since=%224000%22": [
+ RESPONSE_EMPTY_NO_UPDATE,
+ ],
+ };
+ registerHandlers(noOpResponses);
+ await client.maybeSync(4100);
+
+ equal((await client.get()).length, 2);
+
+ console.info("---------------------------------------------------------");
+ //
+ // 5.
+ // - collection: [RECORD2, RECORD3] -> [RECORD2, RECORD3]
+ // - timestamp: 4000 -> 5000
+ //
+ // Check the collection is reset when the signature is invalid.
+ // Client will:
+ // - Fetch metadata (with bad signature)
+ // - Perform the sync (fetch empty changes)
+ // - Refetch the metadata and the whole collection
+ // - Validate signature successfully, but with no changes to emit.
+
+ const RESPONSE_COMPLETE_INITIAL = {
+ comment: "RESPONSE_COMPLETE_INITIAL ",
+ sampleHeaders: [
+ "Content-Type: application/json; charset=UTF-8",
+ 'ETag: "4000"',
+ ],
+ status: { status: 200, statusText: "OK" },
+ responseBody: JSON.stringify({
+ timestamp: 4000,
+ metadata: {
+ signature: {
+ x5u,
+ signature: THREE_ITEMS_SIG,
+ },
+ },
+ changes: [RECORD2, RECORD3],
+ }),
+ };
+
+ const RESPONSE_EMPTY_NO_UPDATE_BAD_SIG = {
+ ...RESPONSE_EMPTY_NO_UPDATE,
+ responseBody: JSON.stringify({
+ timestamp: 4000,
+ metadata: {
+ signature: {
+ x5u,
+ signature: "aW52YWxpZCBzaWduYXR1cmUK",
+ },
+ },
+ changes: [],
+ }),
+ };
+
+ const badSigGoodSigResponses = {
+ // The first collection state is the three item collection (since
+ // there was sync with no updates before) - but, since the signature is wrong,
+ // another request will be made...
+ "GET:/v1/buckets/main/collections/signed/changeset?_expected=5000&_since=%224000%22": [
+ RESPONSE_EMPTY_NO_UPDATE_BAD_SIG,
+ ],
+ // Subsequent signature returned is a valid one for the three item
+ // collection.
+ "GET:/v1/buckets/main/collections/signed/changeset?_expected=5000": [
+ RESPONSE_COMPLETE_INITIAL,
+ ],
+ };
+
+ registerHandlers(badSigGoodSigResponses);
+
+ startSnapshot = getUptakeTelemetrySnapshot(
+ TELEMETRY_COMPONENT,
+ TELEMETRY_SOURCE
+ );
+
+ let syncEventSent = false;
+ client.on("sync", ({ data }) => {
+ syncEventSent = true;
+ });
+
+ await client.maybeSync(5000);
+
+ equal((await client.get()).length, 2);
+
+ endSnapshot = getUptakeTelemetrySnapshot(
+ TELEMETRY_COMPONENT,
+ TELEMETRY_SOURCE
+ );
+
+ // since we only fixed the signature, and no data was changed, the sync event
+ // was not sent.
+ equal(syncEventSent, false);
+
+ // ensure that the failure count is incremented for a succesful sync with an
+ // (initial) bad signature - only SERVICES_SETTINGS_SYNC_SIG_FAIL should
+ // increment.
+ expectedIncrements = { [UptakeTelemetry.STATUS.SIGNATURE_ERROR]: 1 };
+ checkUptakeTelemetry(startSnapshot, endSnapshot, expectedIncrements);
+
+ //
+ // 6.
+ // - collection: [RECORD2, RECORD3] -> [RECORD2, RECORD3]
+ // - timestamp: 4000 -> 5000
+ //
+ // Check the collection is reset when the signature is invalid.
+ // Client will:
+ // - Fetch metadata (with bad signature)
+ // - Perform the sync (fetch empty changes)
+ // - Refetch the whole collection and metadata
+ // - Sync will be no-op since local is equal to server, no changes to emit.
+
+ const badSigGoodOldResponses = {
+ // The first collection state is the current state (since there's no update
+ // - but, since the signature is wrong, another request will be made)
+ "GET:/v1/buckets/main/collections/signed/changeset?_expected=5000&_since=%224000%22": [
+ RESPONSE_EMPTY_NO_UPDATE_BAD_SIG,
+ ],
+ // The next request is for the full collection. This will be
+ // checked against the valid signature and last_modified times will be
+ // compared. Sync should be a no-op, even though the signature is good,
+ // because the local collection is newer.
+ "GET:/v1/buckets/main/collections/signed/changeset?_expected=5000": [
+ RESPONSE_EMPTY_INITIAL,
+ ],
+ };
+
+ // ensure our collection hasn't been replaced with an older, empty one
+ equal((await client.get()).length, 2, "collection was restored");
+
+ registerHandlers(badSigGoodOldResponses);
+
+ syncEventSent = false;
+ client.on("sync", ({ data }) => {
+ syncEventSent = true;
+ });
+
+ await client.maybeSync(5000);
+
+ // Local data was unchanged, since it was never than the one returned by the server,
+ // thus the sync event is not sent.
+ equal(syncEventSent, false, "event was not sent");
+
+ //
+ // 7.
+ // - collection: [RECORD2, RECORD3] -> [RECORD2, RECORD3]
+ // - timestamp: 4000 -> 5000
+ //
+ // Check that a tampered local DB will be overwritten and
+ // sync event contain the appropriate data.
+
+ const RESPONSE_COMPLETE_BAD_SIG = {
+ ...RESPONSE_EMPTY_NO_UPDATE,
+ responseBody: JSON.stringify({
+ timestamp: 5000,
+ metadata: {
+ signature: {
+ x5u,
+ signature: "aW52YWxpZCBzaWduYXR1cmUK",
+ },
+ },
+ changes: [RECORD2, RECORD3],
+ }),
+ };
+
+ const badLocalContentGoodSigResponses = {
+ "GET:/v1/buckets/main/collections/signed/changeset?_expected=5000": [
+ RESPONSE_COMPLETE_BAD_SIG,
+ RESPONSE_COMPLETE_INITIAL,
+ ],
+ };
+
+ registerHandlers(badLocalContentGoodSigResponses);
+
+ // we create a local state manually here, in order to test that the sync event data
+ // properly contains created, updated, and deleted records.
+ // the local DB contains same id as RECORD2 and a fake record.
+ // the final server collection contains RECORD2 and RECORD3
+ const localId = "0602b1b2-12ab-4d3a-b6fb-593244e7b035";
+ await client.db.importChanges(
+ { signature: { x5u, signature: "abc" } },
+ null,
+ [
+ { ...RECORD2, last_modified: 1234567890, serialNumber: "abc" },
+ { id: localId },
+ ],
+ {
+ clear: true,
+ }
+ );
+
+ let syncData = null;
+ client.on("sync", ({ data }) => {
+ syncData = data;
+ });
+
+ // Clear events snapshot.
+ TelemetryTestUtils.assertEvents([], {}, { process: "dummy" });
+
+ const TELEMETRY_EVENTS_FILTERS = {
+ category: "uptake.remotecontent.result",
+ method: "uptake",
+ };
+
+ // Events telemetry is sampled on released, use fake channel.
+ await client.maybeSync(5000);
+
+ // We should report a corruption_error.
+ TelemetryTestUtils.assertEvents(
+ [
+ [
+ "uptake.remotecontent.result",
+ "uptake",
+ "remotesettings",
+ UptakeTelemetry.STATUS.CORRUPTION_ERROR,
+ {
+ source: client.identifier,
+ duration: v => v > 0,
+ trigger: "manual",
+ },
+ ],
+ ],
+ TELEMETRY_EVENTS_FILTERS
+ );
+
+ // The local data was corrupted, and the Telemetry status reflects it.
+ // But the sync overwrote the bad data and was eventually a success.
+ // Since local data was replaced, we use records IDs to determine
+ // what was created and deleted. And bad local data will appear
+ // in the sync event as deleted.
+ equal(syncData.current.length, 2);
+ equal(syncData.created.length, 1);
+ equal(syncData.created[0].id, RECORD3.id);
+ equal(syncData.updated.length, 1);
+ equal(syncData.updated[0].old.serialNumber, "abc");
+ equal(syncData.updated[0].new.serialNumber, RECORD2.serialNumber);
+ equal(syncData.deleted.length, 1);
+ equal(syncData.deleted[0].id, localId);
+
+ //
+ // 8.
+ // - collection: [RECORD2, RECORD3] -> [RECORD2, RECORD3] (unchanged because of error)
+ // - timestamp: 4000 -> 6000
+ //
+ // Check that a failing signature throws after retry, and that sync changes
+ // are not applied.
+
+ const RESPONSE_ONLY_RECORD4_BAD_SIG = {
+ comment: "Create RECORD4",
+ sampleHeaders: [
+ "Content-Type: application/json; charset=UTF-8",
+ 'ETag: "6000"',
+ ],
+ status: { status: 200, statusText: "OK" },
+ responseBody: JSON.stringify({
+ timestamp: 6000,
+ metadata: {
+ signature: {
+ x5u,
+ signature: "aaaaaaaaaaaaaaaaaaaaaaaa", // sig verifier wants proper length or will crash.
+ },
+ },
+ changes: [
+ {
+ id: "f765df30-b2f1-42f6-9803-7bd5a07b5098",
+ last_modified: 6000,
+ },
+ ],
+ }),
+ };
+ const RESPONSE_EMPTY_NO_UPDATE_BAD_SIG_6000 = {
+ ...RESPONSE_EMPTY_NO_UPDATE,
+ responseBody: JSON.stringify({
+ timestamp: 6000,
+ metadata: {
+ signature: {
+ x5u,
+ signature: "aW52YWxpZCBzaWduYXR1cmUK",
+ },
+ },
+ changes: [],
+ }),
+ };
+ const allBadSigResponses = {
+ "GET:/v1/buckets/main/collections/signed/changeset?_expected=6000&_since=%224000%22": [
+ RESPONSE_EMPTY_NO_UPDATE_BAD_SIG_6000,
+ ],
+ "GET:/v1/buckets/main/collections/signed/changeset?_expected=6000": [
+ RESPONSE_ONLY_RECORD4_BAD_SIG,
+ ],
+ };
+
+ startSnapshot = getUptakeTelemetrySnapshot(
+ TELEMETRY_COMPONENT,
+ TELEMETRY_SOURCE
+ );
+ registerHandlers(allBadSigResponses);
+ await Assert.rejects(
+ client.maybeSync(6000),
+ RemoteSettingsClient.InvalidSignatureError,
+ "Sync failed as expected (bad signature after retry)"
+ );
+
+ // Ensure that the failure is reflected in the accumulated telemetry:
+ endSnapshot = getUptakeTelemetrySnapshot(
+ TELEMETRY_COMPONENT,
+ TELEMETRY_SOURCE
+ );
+ expectedIncrements = { [UptakeTelemetry.STATUS.SIGNATURE_RETRY_ERROR]: 1 };
+ checkUptakeTelemetry(startSnapshot, endSnapshot, expectedIncrements);
+
+ // When signature fails after retry, the local data present before sync
+ // should be maintained (if its signature is valid).
+ ok(
+ arrayEqual(
+ (await client.get()).map(r => r.id),
+ [RECORD3.id, RECORD2.id]
+ ),
+ "Local records were not changed"
+ );
+ // And local data should still be valid.
+ await client.get({ verifySignature: true }); // Not raising.
+
+ //
+ // 9.
+ // - collection: [RECORD2, RECORD3] -> [] (cleared)
+ // - timestamp: 4000 -> 6000
+ //
+ // Check that local data is cleared during sync if signature is not valid.
+
+ await client.db.create({
+ id: "c6b19c67-2e0e-4a82-b7f7-1777b05f3e81",
+ last_modified: 42,
+ tampered: true,
+ });
+
+ await Assert.rejects(
+ client.maybeSync(6000),
+ RemoteSettingsClient.InvalidSignatureError,
+ "Sync failed as expected (bad signature after retry)"
+ );
+
+ // Since local data was tampered, it was cleared.
+ equal((await client.get()).length, 0, "Local database is now empty.");
+
+ //
+ // 10.
+ // - collection: [RECORD2, RECORD3] -> [] (cleared)
+ // - timestamp: 4000 -> 6000
+ //
+ // Check that local data is cleared during sync if signature is not valid.
+
+ await client.db.create({
+ id: "c6b19c67-2e0e-4a82-b7f7-1777b05f3e81",
+ last_modified: 42,
+ tampered: true,
+ });
+
+ await Assert.rejects(
+ client.maybeSync(6000),
+ RemoteSettingsClient.InvalidSignatureError,
+ "Sync failed as expected (bad signature after retry)"
+ );
+ // Since local data was tampered, it was cleared.
+ equal((await client.get()).length, 0, "Local database is now empty.");
+
+ //
+ // 11.
+ // - collection: [RECORD2, RECORD3] -> [RECORD2, RECORD3]
+ // - timestamp: 4000 -> 6000
+ //
+ // Check that local data is restored if signature was valid before sync.
+ const sigCalls = [];
+ let i = 0;
+ client._verifier = {
+ async asyncVerifyContentSignature(serialized, signature) {
+ sigCalls.push(serialized);
+ console.log(`verify call ${i}`);
+ return [
+ false, // After importing changes.
+ true, // When checking previous local data.
+ false, // Still fail after retry.
+ true, // When checking previous local data again.
+ ][i++];
+ },
+ };
+ // Create an extra record. It will have a valid signature locally
+ // thanks to the verifier mock.
+ await client.db.importChanges(
+ {
+ signature: { x5u, signature: "aa" },
+ },
+ 4000,
+ [
+ {
+ id: "extraId",
+ last_modified: 42,
+ },
+ ]
+ );
+
+ equal((await client.get()).length, 1);
+
+ // Now sync, but importing changes will have failing signature,
+ // and so will retry (see `sigResults`).
+ await Assert.rejects(
+ client.maybeSync(6000),
+ RemoteSettingsClient.InvalidSignatureError,
+ "Sync failed as expected (bad signature after retry)"
+ );
+ equal(i, 4, "sync has retried as expected");
+
+ // Make sure that we retried on a blank DB. The extra record should
+ // have been deleted when we validated the signature the second time.
+ // Since local data was tampered, it was cleared.
+ ok(/extraId/.test(sigCalls[0]), "extra record when importing changes");
+ ok(/extraId/.test(sigCalls[1]), "extra record when checking local");
+ ok(!/extraId/.test(sigCalls[2]), "db was flushed before retry");
+ ok(/extraId/.test(sigCalls[3]), "when checking local after retry");
+});
diff --git a/services/settings/test/unit/test_remote_settings_signatures/collection_signing_ee.pem b/services/settings/test/unit/test_remote_settings_signatures/collection_signing_ee.pem
new file mode 100644
index 0000000000..4a31a53abc
--- /dev/null
+++ b/services/settings/test/unit/test_remote_settings_signatures/collection_signing_ee.pem
@@ -0,0 +1,16 @@
+-----BEGIN CERTIFICATE-----
+MIICdTCCAV2gAwIBAgIUUUM5QQqbDKU02LlGaCuge99SOokwDQYJKoZIhvcNAQEL
+BQAwIzEhMB8GA1UEAwwYY29sbGVjdGlvbi1zaWduZXItaW50LUNBMCIYDzIwMjEx
+MTI3MDAwMDAwWhgPMjAyNDAyMDUwMDAwMDBaMCYxJDAiBgNVBAMMG2NvbGxlY3Rp
+b24tc2lnbmVyLWVlLWludC1DQTB2MBAGByqGSM49AgEGBSuBBAAiA2IABKFockM2
+K1x7GInzeRVGFaHHP7SN7oY+AikV22COJS3ktxMtqM6Y6DFTTmqcDAsJyNY5regy
+BuW6gTRzoR+jMOBdqMluQ4P+J4c9qXEDviiIz/AC8Fr3Gh/dzIN0qm6pzqNIMEYw
+EwYDVR0lBAwwCgYIKwYBBQUHAwMwLwYDVR0RBCgwJoIkb25lY3JsLmNvbnRlbnQt
+c2lnbmF0dXJlLm1vemlsbGEub3JnMA0GCSqGSIb3DQEBCwUAA4IBAQAUwZjGceKB
+w4KxrHHi51ZPCd+VivUZcgeMjXgljmKOGFvqiMnVZGhDbcuCo5idnjEOK33WdHBb
+oxKnVA/IsXAXJFkOEYIV2bKCZYaCtVPus/b2bu40k1xtKp6lj1q8xALCpBwVPWam
+rBOQf039il/mSgXIO3kQE4ueT9KbasMYyI3fdCG8K1PU15xdf9NKRoQz3T+j8+8a
+p/WMDNUBErRNGRCTmIsfj4XIMiiXNn+7chitcL2Kvg/v9WnmyIekXNApM9CC9bpY
+RYMXIcHs01T8JL6Lwkw00l1KVCDh/QH30k986yP/oG/zc7Na6bTR1emoGrkFURa5
+DRVif8+I88Kd
+-----END CERTIFICATE-----
diff --git a/services/settings/test/unit/test_remote_settings_signatures/collection_signing_ee.pem.certspec b/services/settings/test/unit/test_remote_settings_signatures/collection_signing_ee.pem.certspec
new file mode 100644
index 0000000000..866c357c50
--- /dev/null
+++ b/services/settings/test/unit/test_remote_settings_signatures/collection_signing_ee.pem.certspec
@@ -0,0 +1,5 @@
+issuer:collection-signer-int-CA
+subject:collection-signer-ee-int-CA
+subjectKey:secp384r1
+extension:extKeyUsage:codeSigning
+extension:subjectAlternativeName:onecrl.content-signature.mozilla.org
diff --git a/services/settings/test/unit/test_remote_settings_signatures/collection_signing_int.pem b/services/settings/test/unit/test_remote_settings_signatures/collection_signing_int.pem
new file mode 100644
index 0000000000..db96075e80
--- /dev/null
+++ b/services/settings/test/unit/test_remote_settings_signatures/collection_signing_int.pem
@@ -0,0 +1,19 @@
+-----BEGIN CERTIFICATE-----
+MIIDAzCCAeugAwIBAgIUZ0EUMclrdAM3H/nCxRm3Fa2xBNcwDQYJKoZIhvcNAQEL
+BQAwKTEnMCUGA1UEAwweeHBjc2hlbGwgc2lnbmVkIGFwcHMgdGVzdCByb290MCIY
+DzIwMjExMTI3MDAwMDAwWhgPMjAyNDAyMDUwMDAwMDBaMCMxITAfBgNVBAMMGGNv
+bGxlY3Rpb24tc2lnbmVyLWludC1DQTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCC
+AQoCggEBALqIUahEjhbWQf1utogGNhA9PBPZ6uQ1SrTs9WhXbCR7wcclqODYH72x
+nAabbhqG8mvir1p1a2pkcQh6pVqnRYf3HNUknAJ+zUP8HmnQOCApk6sgw0nk27lM
+wmtsDu0Vgg/xfq1pGrHTAjqLKkHup3DgDw2N/WYLK7AkkqR9uYhheZCxV5A90jvF
+4LhIH6g304hD7ycW2FW3ZlqqfgKQLzp7EIAGJMwcbJetlmFbt+KWEsB1MaMMkd20
+yvf8rR0l0wnvuRcOp2jhs3svIm9p47SKlWEd7ibWJZ2rkQhONsscJAQsvxaLL+Xx
+j5kXMbiz/kkj+nJRxDHVA6zaGAo17Y0CAwEAAaMlMCMwDAYDVR0TBAUwAwEB/zAT
+BgNVHSUEDDAKBggrBgEFBQcDAzANBgkqhkiG9w0BAQsFAAOCAQEAZzxJzD1HA7C3
+VnFxOop9ZATDuALX8MXjf0obe3rvnOldqIAyOjT6NUmrJGul64u95FloVojIp8N6
+cYvviRZOjaV/J4hjN+p7193vdxaxDKtfUFHUnB3Wjqi3UJM/xfj7qxPgxQoy/Rvg
+mBmmqBUDM2N49sp5ZiXyJfJzA4jzQsJoFxBzgJEl5t+luWTFsBtGmu2KRTxSeOdX
+AJJwWawYr6oB8tSk5xweGVOZx/U27Z4e30JbWDzBRmMDX2aBGDcch+i48+tYbRyp
+KGjKPK69iJg0t4Q27jsFkBnL+ayW08YtixhA7lqix87PkVHjW+ITOwR/45QRQ3AE
+h4Bxt3g47Q==
+-----END CERTIFICATE-----
diff --git a/services/settings/test/unit/test_remote_settings_signatures/collection_signing_int.pem.certspec b/services/settings/test/unit/test_remote_settings_signatures/collection_signing_int.pem.certspec
new file mode 100644
index 0000000000..2b3eb2d78f
--- /dev/null
+++ b/services/settings/test/unit/test_remote_settings_signatures/collection_signing_int.pem.certspec
@@ -0,0 +1,4 @@
+issuer:xpcshell signed apps test root
+subject:collection-signer-int-CA
+extension:basicConstraints:cA,
+extension:extKeyUsage:codeSigning
diff --git a/services/settings/test/unit/test_remote_settings_sync_history.js b/services/settings/test/unit/test_remote_settings_sync_history.js
new file mode 100644
index 0000000000..9a14db1fcf
--- /dev/null
+++ b/services/settings/test/unit/test_remote_settings_sync_history.js
@@ -0,0 +1,69 @@
+"use strict";
+
+const { SyncHistory } = ChromeUtils.import(
+ "resource://services-settings/SyncHistory.jsm"
+);
+
+async function clear_state() {
+ await new SyncHistory("").clear();
+}
+add_task(clear_state);
+
+add_task(async function test_entries_are_stored_by_source() {
+ const history = new SyncHistory();
+ await history.store("42", "success", { pi: "3.14" });
+ // Check that history is isolated by source.
+ await new SyncHistory("main/cfr").store("88", "error");
+
+ const l = await history.list();
+
+ Assert.deepEqual(l, [
+ {
+ timestamp: 42,
+ status: "success",
+ infos: { pi: "3.14" },
+ datetime: new Date(42),
+ },
+ ]);
+});
+add_task(clear_state);
+
+add_task(
+ async function test_old_entries_are_removed_keep_fixed_size_per_source() {
+ const history = new SyncHistory("settings-sync", { size: 3 });
+ const anotherHistory = await new SyncHistory("main/cfr");
+
+ await history.store("42", "success");
+ await history.store("41", "sync_error");
+ await history.store("43", "up_to_date");
+
+ let l = await history.list();
+ Assert.equal(l.length, 3);
+
+ await history.store("44", "success");
+ await anotherHistory.store("44", "success");
+
+ l = await history.list();
+ Assert.equal(l.length, 3);
+ Assert.ok(!l.map(e => e.timestamp).includes(41));
+
+ l = await anotherHistory.list();
+ Assert.equal(l.length, 1);
+ }
+);
+add_task(clear_state);
+
+add_task(async function test_entries_are_sorted_by_timestamp_desc() {
+ const history = new SyncHistory("settings-sync");
+ await history.store("42", "success");
+ await history.store("41", "sync_error");
+ await history.store("44", "up_to_date");
+
+ const l = await history.list();
+
+ Assert.deepEqual(
+ l.map(e => e.timestamp),
+ [44, 42, 41]
+ );
+});
+add_task(clear_state);
diff --git a/services/settings/test/unit/test_remote_settings_utils.js b/services/settings/test/unit/test_remote_settings_utils.js
new file mode 100644
index 0000000000..e2b2449f34
--- /dev/null
+++ b/services/settings/test/unit/test_remote_settings_utils.js
@@ -0,0 +1,164 @@
+/* import-globals-from ../../../common/tests/unit/head_helpers.js */
+
+const { TestUtils } = ChromeUtils.importESModule(
+ "resource://testing-common/TestUtils.sys.mjs"
+);
+const { Utils } = ChromeUtils.import("resource://services-settings/Utils.jsm");
+
+const BinaryOutputStream = Components.Constructor(
+ "@mozilla.org/binaryoutputstream;1",
+ "nsIBinaryOutputStream",
+ "setOutputStream"
+);
+
+const server = new HttpServer();
+server.start(-1);
+registerCleanupFunction(() => server.stop(() => {}));
+const SERVER_BASE_URL = `http://localhost:${server.identity.primaryPort}`;
+
+const proxyServer = new HttpServer();
+proxyServer.identity.add("http", "localhost", server.identity.primaryPort);
+proxyServer.start(-1);
+registerCleanupFunction(() => proxyServer.stop(() => {}));
+const PROXY_PORT = proxyServer.identity.primaryPort;
+
+// A sequence of bytes that would become garbage if it were to be read as UTF-8:
+// - 0xEF 0xBB 0xBF is a byte order mark.
+// - 0xC0 on its own is invalid (it's the first byte of a 2-byte encoding).
+const INVALID_UTF_8_BYTES = [0xef, 0xbb, 0xbf, 0xc0];
+
+server.registerPathHandler("/binary.dat", (request, response) => {
+ response.setStatusLine(null, 201, "StatusLineHere");
+ response.setHeader("headerName", "HeaderValue: HeaderValueEnd");
+ let binaryOut = new BinaryOutputStream(response.bodyOutputStream);
+ binaryOut.writeByteArray([0xef, 0xbb, 0xbf, 0xc0]);
+});
+
+// HTTPS requests are proxied with CONNECT, but our test server is HTTP,
+// which means that the proxy will receive GET http://localhost:port.
+var proxiedCount = 0;
+proxyServer.registerPrefixHandler("/", (request, response) => {
+ ++proxiedCount;
+ Assert.equal(request.path, "/binary.dat", `Proxy request ${proxiedCount}`);
+ // Close connection without sending any response.
+ response.seizePower();
+ response.finish();
+});
+
+add_task(async function test_utils_fetch_binary() {
+ let res = await Utils.fetch(`${SERVER_BASE_URL}/binary.dat`);
+
+ Assert.equal(res.status, 201, "res.status");
+ Assert.equal(res.statusText, "StatusLineHere", "res.statusText");
+ Assert.equal(
+ res.headers.get("headerName"),
+ "HeaderValue: HeaderValueEnd",
+ "Utils.fetch should return the header"
+ );
+
+ Assert.deepEqual(
+ Array.from(new Uint8Array(await res.arrayBuffer())),
+ INVALID_UTF_8_BYTES,
+ "Binary response body should be returned as is"
+ );
+});
+
+add_task(async function test_utils_fetch_binary_as_text() {
+ let res = await Utils.fetch(`${SERVER_BASE_URL}/binary.dat`);
+ Assert.deepEqual(
+ Array.from(await res.text(), c => c.charCodeAt(0)),
+ [65533],
+ "Interpreted as UTF-8, the response becomes garbage"
+ );
+});
+
+add_task(async function test_utils_fetch_binary_as_json() {
+ let res = await Utils.fetch(`${SERVER_BASE_URL}/binary.dat`);
+ await Assert.rejects(
+ res.json(),
+ /SyntaxError: JSON.parse: unexpected character/,
+ "Binary data is invalid JSON"
+ );
+});
+
+add_task(async function test_utils_fetch_has_conservative() {
+ let channelPromise = TestUtils.topicObserved("http-on-modify-request");
+ await Utils.fetch(`${SERVER_BASE_URL}/binary.dat`);
+
+ let channel = (await channelPromise)[0].QueryInterface(Ci.nsIHttpChannel);
+
+ Assert.equal(channel.URI.spec, `${SERVER_BASE_URL}/binary.dat`, "URL OK");
+
+ let internalChannel = channel.QueryInterface(Ci.nsIHttpChannelInternal);
+ Assert.ok(internalChannel.beConservative, "beConservative flag is set");
+});
+
+add_task(async function test_utils_fetch_has_conservative() {
+ let channelPromise = TestUtils.topicObserved("http-on-modify-request");
+ await Utils.fetch(`${SERVER_BASE_URL}/binary.dat`);
+
+ let channel = (await channelPromise)[0].QueryInterface(Ci.nsIHttpChannel);
+
+ Assert.equal(channel.URI.spec, `${SERVER_BASE_URL}/binary.dat`, "URL OK");
+
+ let internalChannel = channel.QueryInterface(Ci.nsIHttpChannelInternal);
+ Assert.ok(internalChannel.beConservative, "beConservative flag is set");
+});
+
+add_task(async function test_utils_fetch_with_bad_proxy() {
+ Services.prefs.setIntPref("network.proxy.type", 1);
+ Services.prefs.setStringPref("network.proxy.http", "127.0.0.1");
+ Services.prefs.setIntPref("network.proxy.http_port", PROXY_PORT);
+ Services.prefs.setBoolPref("network.proxy.allow_hijacking_localhost", true);
+
+ // The URL that we're going to request.
+ const DESTINATION_URL = `${SERVER_BASE_URL}/binary.dat`;
+
+ Assert.equal(proxiedCount, 0, "Proxy not used yet");
+ {
+ info("Bad proxy, default prefs");
+ let res = await Utils.fetch(DESTINATION_URL);
+ Assert.equal(res.status, 201, "Bypassed bad proxy");
+ // 10 instead of 1 because of reconnect attempts after a dropped request.
+ Assert.equal(proxiedCount, 10, "Proxy was used by HttpChannel");
+ }
+
+ // Disables the failover logic from HttpChannel.
+ Services.prefs.setBoolPref("network.proxy.failover_direct", false);
+ proxiedCount = 0;
+ {
+ info("Bad proxy, disabled network.proxy.failover_direct");
+ let res = await Utils.fetch(DESTINATION_URL);
+ Assert.equal(res.status, 201, "Bypassed bad proxy");
+ // 10 instead of 1 because of reconnect attempts after a dropped request.
+ Assert.equal(proxiedCount, 10, "Proxy was used by ServiceRequest");
+ }
+
+ proxiedCount = 0;
+ {
+ info("Using internal option of Utils.fetch: bypassProxy=true");
+ let res = await Utils.fetch(DESTINATION_URL, { bypassProxy: true });
+ Assert.equal(res.status, 201, "Bypassed bad proxy");
+ Assert.equal(proxiedCount, 0, "Not using proxy when bypassProxy=true");
+ }
+
+ // Disables the failover logic from ServiceRequest/Utils.fetch
+ Services.prefs.setBoolPref("network.proxy.allow_bypass", false);
+ proxiedCount = 0;
+
+ info("Bad proxy, disabled network.proxy.allow_bypass");
+ await Assert.rejects(
+ Utils.fetch(DESTINATION_URL),
+ /NetworkError/,
+ "Bad proxy request should fail without failover"
+ );
+ // 10 instead of 1 because of reconnect attempts after a dropped request.
+ Assert.equal(proxiedCount, 10, "Attempted to use proxy again");
+
+ Services.prefs.clearUserPref("network.proxy.type");
+ Services.prefs.clearUserPref("network.proxy.http");
+ Services.prefs.clearUserPref("network.proxy.http_port");
+ Services.prefs.clearUserPref("network.proxy.allow_hijacking_localhost");
+ Services.prefs.clearUserPref("network.proxy.failover_direct");
+ Services.prefs.clearUserPref("network.proxy.allow_bypass");
+});
diff --git a/services/settings/test/unit/test_remote_settings_utils_telemetry.js b/services/settings/test/unit/test_remote_settings_utils_telemetry.js
new file mode 100644
index 0000000000..2a4ee71135
--- /dev/null
+++ b/services/settings/test/unit/test_remote_settings_utils_telemetry.js
@@ -0,0 +1,88 @@
+/* Any copyright is dedicated to the Public Domain.
+ http://creativecommons.org/publicdomain/zero/1.0/ */
+
+"use strict";
+
+/* import-globals-from ../../../common/tests/unit/head_helpers.js */
+
+const { TelemetryController } = ChromeUtils.importESModule(
+ "resource://gre/modules/TelemetryController.sys.mjs"
+);
+const { TelemetryTestUtils } = ChromeUtils.importESModule(
+ "resource://testing-common/TelemetryTestUtils.sys.mjs"
+);
+const { Utils } = ChromeUtils.import("resource://services-settings/Utils.jsm");
+
+const server = new HttpServer();
+server.start(-1);
+registerCleanupFunction(() => server.stop(() => {}));
+const SERVER_BASE_URL = `http://localhost:${server.identity.primaryPort}`;
+
+const proxyServer = new HttpServer();
+proxyServer.start(-1);
+const PROXY_PORT = proxyServer.identity.primaryPort;
+proxyServer.stop();
+
+server.registerPathHandler("/destination", (request, response) => {
+ response.setStatusLine(null, 412);
+});
+
+async function assertTelemetryEvents(expectedEvents) {
+ await TelemetryTestUtils.assertEvents(expectedEvents, {
+ category: "service_request",
+ method: "bypass",
+ });
+}
+
+add_task(async function setup() {
+ await TelemetryController.testSetup();
+});
+
+add_task(async function test_telemetry() {
+ const DESTINATION_URL = `${SERVER_BASE_URL}/destination`;
+
+ {
+ let res = await Utils.fetch(DESTINATION_URL);
+ Assert.equal(res.status, 412, "fetch without proxy succeeded");
+ }
+ await assertTelemetryEvents([]);
+
+ Services.prefs.setIntPref("network.proxy.type", 1);
+ Services.prefs.setStringPref("network.proxy.http", "127.0.0.1");
+ Services.prefs.setIntPref("network.proxy.http_port", PROXY_PORT);
+ Services.prefs.setBoolPref("network.proxy.allow_hijacking_localhost", true);
+
+ {
+ let res = await Utils.fetch(DESTINATION_URL);
+ Assert.equal(res.status, 412, "fetch with broken proxy succeeded");
+ }
+ // Note: failover handled by HttpChannel, hence no failover here.
+ await assertTelemetryEvents([]);
+
+ // Disable HttpChannel failover in favor of Utils.fetch's implementation.
+ Services.prefs.setBoolPref("network.proxy.failover_direct", false);
+ {
+ let res = await Utils.fetch(DESTINATION_URL);
+ Assert.equal(res.status, 412, "fetch succeeded with bypassProxy feature");
+ }
+ await assertTelemetryEvents([
+ {
+ category: "service_request",
+ method: "bypass",
+ object: "proxy_info",
+ value: "remote-settings",
+ extra: {
+ source: "prefs",
+ type: "manual",
+ },
+ },
+ ]);
+
+ Services.prefs.setBoolPref("network.proxy.allow_bypass", false);
+ await Assert.rejects(
+ Utils.fetch(DESTINATION_URL),
+ /NetworkError/,
+ "Request without failover fails"
+ );
+ await assertTelemetryEvents([]);
+});
diff --git a/services/settings/test/unit/test_remote_settings_worker.js b/services/settings/test/unit/test_remote_settings_worker.js
new file mode 100644
index 0000000000..fb600e91ce
--- /dev/null
+++ b/services/settings/test/unit/test_remote_settings_worker.js
@@ -0,0 +1,140 @@
+/* import-globals-from ../../../common/tests/unit/head_helpers.js */
+
+const { AppConstants } = ChromeUtils.importESModule(
+ "resource://gre/modules/AppConstants.sys.mjs"
+);
+const { TestUtils } = ChromeUtils.importESModule(
+ "resource://testing-common/TestUtils.sys.mjs"
+);
+
+const { RemoteSettingsWorker } = ChromeUtils.import(
+ "resource://services-settings/RemoteSettingsWorker.jsm"
+);
+const { RemoteSettingsClient } = ChromeUtils.import(
+ "resource://services-settings/RemoteSettingsClient.jsm"
+);
+const { Database } = ChromeUtils.import(
+ "resource://services-settings/Database.jsm"
+);
+
+XPCOMUtils.defineLazyGlobalGetters(this, ["indexedDB"]);
+
+const IS_ANDROID = AppConstants.platform == "android";
+
+add_task(async function test_canonicaljson() {
+ const records = [
+ { id: "1", title: "title 1" },
+ { id: "2", title: "title 2" },
+ ];
+ const timestamp = 42;
+
+ const serialized = await RemoteSettingsWorker.canonicalStringify(
+ records,
+ timestamp
+ );
+
+ Assert.equal(
+ serialized,
+ '{"data":[{"id":"1","title":"title 1"},{"id":"2","title":"title 2"}],"last_modified":"42"}'
+ );
+});
+
+add_task(async function test_import_json_dump_into_idb() {
+ if (IS_ANDROID) {
+ // Skip test: we don't ship remote settings dumps on Android (see package-manifest).
+ return;
+ }
+ const client = new RemoteSettingsClient("language-dictionaries");
+ const before = await client.get({ syncIfEmpty: false });
+ Assert.equal(before.length, 0);
+
+ await RemoteSettingsWorker.importJSONDump("main", "language-dictionaries");
+
+ const after = await client.get({ syncIfEmpty: false });
+ Assert.ok(!!after.length);
+ let lastModifiedStamp = await client.getLastModified();
+
+ Assert.equal(
+ lastModifiedStamp,
+ Math.max(...after.map(record => record.last_modified)),
+ "Should have correct last modified timestamp"
+ );
+
+ // Force a DB close for shutdown so we can delete the DB later.
+ Database._shutdownHandler();
+});
+
+add_task(async function test_throws_error_if_worker_fails() {
+ let error;
+ try {
+ await RemoteSettingsWorker.canonicalStringify(null, 42);
+ } catch (e) {
+ error = e;
+ }
+ Assert.equal(error.message.endsWith("records is null"), true);
+});
+
+add_task(async function test_throws_error_if_worker_fails_async() {
+ if (IS_ANDROID) {
+ // Skip test: we don't ship dump, so importJSONDump() is no-op.
+ return;
+ }
+ // Delete the Remote Settings database, and try to import a dump.
+ // This is not supported, and the error thrown asynchronously in the worker
+ // should be reported to the caller.
+ await new Promise((resolve, reject) => {
+ const request = indexedDB.deleteDatabase("remote-settings");
+ request.onsuccess = event => resolve();
+ request.onblocked = event => reject(new Error("Cannot delete DB"));
+ request.onerror = event => reject(event.target.error);
+ });
+ let error;
+ try {
+ await RemoteSettingsWorker.importJSONDump("main", "language-dictionaries");
+ } catch (e) {
+ error = e;
+ }
+ Assert.ok(/IndexedDB: Error accessing remote-settings/.test(error.message));
+});
+
+add_task(async function test_throws_error_if_worker_crashes() {
+ // This simulates a crash at the worker level (not within a promise).
+ let error;
+ try {
+ await RemoteSettingsWorker._execute("unknown_method");
+ } catch (e) {
+ error = e;
+ }
+ Assert.equal(error.message, "TypeError: Agent[method] is not a function");
+});
+
+add_task(async function test_stops_worker_after_timeout() {
+ // Change the idle time.
+ Services.prefs.setIntPref(
+ "services.settings.worker_idle_max_milliseconds",
+ 1
+ );
+ // Run a task:
+ let serialized = await RemoteSettingsWorker.canonicalStringify([], 42);
+ Assert.equal(serialized, '{"data":[],"last_modified":"42"}', "API works.");
+ // Check that the worker gets stopped now the task is done:
+ await TestUtils.waitForCondition(() => !RemoteSettingsWorker.worker);
+ // Ensure the worker stays alive for 10 minutes instead:
+ Services.prefs.setIntPref(
+ "services.settings.worker_idle_max_milliseconds",
+ 600000
+ );
+ // Run another task:
+ serialized = await RemoteSettingsWorker.canonicalStringify([], 42);
+ Assert.equal(
+ serialized,
+ '{"data":[],"last_modified":"42"}',
+ "API still works."
+ );
+ Assert.ok(RemoteSettingsWorker.worker, "Worker should stay alive a bit.");
+
+ // Clear the pref.
+ Services.prefs.clearUserPref(
+ "services.settings.worker_idle_max_milliseconds"
+ );
+});
diff --git a/services/settings/test/unit/test_shutdown_handling.js b/services/settings/test/unit/test_shutdown_handling.js
new file mode 100644
index 0000000000..28a1765267
--- /dev/null
+++ b/services/settings/test/unit/test_shutdown_handling.js
@@ -0,0 +1,139 @@
+/* Any copyright is dedicated to the Public Domain.
+http://creativecommons.org/publicdomain/zero/1.0/ */
+
+"use strict";
+
+const { TestUtils } = ChromeUtils.importESModule(
+ "resource://testing-common/TestUtils.sys.mjs"
+);
+
+const { Database } = ChromeUtils.import(
+ "resource://services-settings/Database.jsm"
+);
+const { RemoteSettingsWorker } = ChromeUtils.import(
+ "resource://services-settings/RemoteSettingsWorker.jsm"
+);
+const { RemoteSettingsClient } = ChromeUtils.import(
+ "resource://services-settings/RemoteSettingsClient.jsm"
+);
+
+add_task(async function test_shutdown_abort_after_start() {
+ // Start a forever transaction:
+ let counter = 0;
+ let transactionStarted;
+ let startedPromise = new Promise(r => {
+ transactionStarted = r;
+ });
+ let promise = Database._executeIDB(
+ "records",
+ store => {
+ // Signal we've started.
+ transactionStarted();
+ function makeRequest() {
+ if (++counter > 1000) {
+ Assert.ok(
+ false,
+ "We ran 1000 requests and didn't get aborted, what?"
+ );
+ return;
+ }
+ dump("Making request " + counter + "\n");
+ const request = store
+ .index("cid")
+ .openCursor(IDBKeyRange.only("foopydoo/foo"));
+ request.onsuccess = event => {
+ makeRequest();
+ };
+ }
+ makeRequest();
+ },
+ { mode: "readonly" }
+ );
+
+ // Wait for the transaction to start.
+ await startedPromise;
+
+ Database._shutdownHandler(); // should abort the readonly transaction.
+
+ let rejection;
+ await promise.catch(e => {
+ rejection = e;
+ });
+ ok(rejection, "Promise should have rejected.");
+
+ // Now clear the shutdown flag and rejection error:
+ Database._cancelShutdown();
+ rejection = null;
+});
+
+add_task(async function test_shutdown_immediate_abort() {
+ // Now abort directly from the successful request.
+ let promise = Database._executeIDB(
+ "records",
+ store => {
+ let request = store
+ .index("cid")
+ .openCursor(IDBKeyRange.only("foopydoo/foo"));
+ request.onsuccess = event => {
+ // Abort immediately.
+ Database._shutdownHandler();
+ request = store
+ .index("cid")
+ .openCursor(IDBKeyRange.only("foopydoo/foo"));
+ Assert.ok(false, "IndexedDB allowed opening a cursor after aborting?!");
+ };
+ },
+ { mode: "readonly" }
+ );
+
+ let rejection;
+ // Wait for the abort
+ await promise.catch(e => {
+ rejection = e;
+ });
+ ok(rejection, "Directly aborted promise should also have rejected.");
+ // Now clear the shutdown flag and rejection error:
+ Database._cancelShutdown();
+});
+
+add_task(async function test_shutdown_worker() {
+ let client = new RemoteSettingsClient("language-dictionaries");
+ const before = await client.get({ syncIfEmpty: false });
+ Assert.equal(before.length, 0);
+
+ let records = [{}];
+ let importPromise = RemoteSettingsWorker._execute(
+ "_test_only_import",
+ ["main", "language-dictionaries", records, 0],
+ { mustComplete: true }
+ );
+ let stringifyPromise = RemoteSettingsWorker.canonicalStringify(
+ [],
+ [],
+ Date.now()
+ );
+ // Change the idle time so we shut the worker down even though we can't
+ // set gShutdown from outside of the worker management code.
+ Services.prefs.setIntPref(
+ "services.settings.worker_idle_max_milliseconds",
+ 1
+ );
+ RemoteSettingsWorker._abortCancelableRequests();
+ await Assert.rejects(
+ stringifyPromise,
+ /Shutdown/,
+ "Should have aborted the stringify request at shutdown."
+ );
+ await Assert.rejects(
+ importPromise,
+ /shutting down/,
+ "Ensure imports get aborted during shutdown"
+ );
+ const after = await client.get({ syncIfEmpty: false });
+ Assert.equal(after.length, 0);
+ await TestUtils.waitForCondition(() => !RemoteSettingsWorker.worker);
+ Assert.ok(
+ !RemoteSettingsWorker.worker,
+ "Worker should have been terminated."
+ );
+});
diff --git a/services/settings/test/unit/xpcshell.ini b/services/settings/test/unit/xpcshell.ini
new file mode 100644
index 0000000000..1e5df0a248
--- /dev/null
+++ b/services/settings/test/unit/xpcshell.ini
@@ -0,0 +1,24 @@
+[DEFAULT]
+head = ../../../common/tests/unit/head_global.js ../../../common/tests/unit/head_helpers.js
+firefox-appdir = browser
+tags = remote-settings
+support-files =
+ test_remote_settings_signatures/**
+skip-if = appname == "thunderbird" # Bug 1662758 - these tests don't pass if default bucket isn't "main".
+
+[test_attachments_downloader.js]
+support-files = test_attachments_downloader/**
+[test_remote_settings.js]
+[test_remote_settings_dump_lastmodified.js]
+[test_remote_settings_offline.js]
+[test_remote_settings_poll.js]
+[test_remote_settings_recover_broken.js]
+[test_remote_settings_worker.js]
+[test_remote_settings_jexl_filters.js]
+[test_remote_settings_release_prefs.js]
+[test_remote_settings_signatures.js]
+[test_remote_settings_sync_history.js]
+[test_remote_settings_utils.js]
+[test_remote_settings_utils_telemetry.js]
+skip-if = os == "android" # bug 1739463
+[test_shutdown_handling.js]