diff options
Diffstat (limited to 'services')
38 files changed, 1093 insertions, 1082 deletions
diff --git a/services/common/logmanager.sys.mjs b/services/common/logmanager.sys.mjs deleted file mode 100644 index 724cfde38b..0000000000 --- a/services/common/logmanager.sys.mjs +++ /dev/null @@ -1,447 +0,0 @@ -/* This Source Code Form is subject to the terms of the Mozilla Public - * License, v. 2.0. If a copy of the MPL was not distributed with this - * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ -"use strict;"; - -import { Log } from "resource://gre/modules/Log.sys.mjs"; - -const lazy = {}; - -ChromeUtils.defineESModuleGetters(lazy, { - FileUtils: "resource://gre/modules/FileUtils.sys.mjs", - NetUtil: "resource://gre/modules/NetUtil.sys.mjs", -}); - -const DEFAULT_MAX_ERROR_AGE = 20 * 24 * 60 * 60; // 20 days - -// "shared" logs (ie, where the same log name is used by multiple LogManager -// instances) are a fact of life here - eg, FirefoxAccounts logs are used by -// both Sync and Reading List. -// However, different instances have different pref branches, so we need to -// handle when one pref branch says "Debug" and the other says "Error" -// So we (a) keep singleton console and dump appenders and (b) keep track -// of the minimum (ie, most verbose) level and use that. -// This avoids (a) the most recent setter winning (as that is indeterminate) -// and (b) multiple dump/console appenders being added to the same log multiple -// times, which would cause messages to appear twice. - -// Singletons used by each instance. -var formatter; -var dumpAppender; -var consoleAppender; - -// A set of all preference roots used by all instances. -var allBranches = new Set(); - -const STREAM_SEGMENT_SIZE = 4096; -const PR_UINT32_MAX = 0xffffffff; - -/** - * Append to an nsIStorageStream - * - * This writes logging output to an in-memory stream which can later be read - * back as an nsIInputStream. It can be used to avoid expensive I/O operations - * during logging. Instead, one can periodically consume the input stream and - * e.g. write it to disk asynchronously. - */ -class StorageStreamAppender extends Log.Appender { - constructor(formatter) { - super(formatter); - this._name = "StorageStreamAppender"; - - this._converterStream = null; // holds the nsIConverterOutputStream - this._outputStream = null; // holds the underlying nsIOutputStream - - this._ss = null; - } - - get outputStream() { - if (!this._outputStream) { - // First create a raw stream. We can bail out early if that fails. - this._outputStream = this.newOutputStream(); - if (!this._outputStream) { - return null; - } - - // Wrap the raw stream in an nsIConverterOutputStream. We can reuse - // the instance if we already have one. - if (!this._converterStream) { - this._converterStream = Cc[ - "@mozilla.org/intl/converter-output-stream;1" - ].createInstance(Ci.nsIConverterOutputStream); - } - this._converterStream.init(this._outputStream, "UTF-8"); - } - return this._converterStream; - } - - newOutputStream() { - let ss = (this._ss = Cc["@mozilla.org/storagestream;1"].createInstance( - Ci.nsIStorageStream - )); - ss.init(STREAM_SEGMENT_SIZE, PR_UINT32_MAX, null); - return ss.getOutputStream(0); - } - - getInputStream() { - if (!this._ss) { - return null; - } - return this._ss.newInputStream(0); - } - - reset() { - if (!this._outputStream) { - return; - } - this.outputStream.close(); - this._outputStream = null; - this._ss = null; - } - - doAppend(formatted) { - if (!formatted) { - return; - } - try { - this.outputStream.writeString(formatted + "\n"); - } catch (ex) { - if (ex.result == Cr.NS_BASE_STREAM_CLOSED) { - // The underlying output stream is closed, so let's open a new one - // and try again. - this._outputStream = null; - } - try { - this.outputStream.writeString(formatted + "\n"); - } catch (ex) { - // Ah well, we tried, but something seems to be hosed permanently. - } - } - } -} - -// A storage appender that is flushable to a file on disk. Policies for -// when to flush, to what file, log rotation etc are up to the consumer -// (although it does maintain a .sawError property to help the consumer decide -// based on its policies) -class FlushableStorageAppender extends StorageStreamAppender { - constructor(formatter) { - super(formatter); - this.sawError = false; - } - - append(message) { - if (message.level >= Log.Level.Error) { - this.sawError = true; - } - StorageStreamAppender.prototype.append.call(this, message); - } - - reset() { - super.reset(); - this.sawError = false; - } - - // Flush the current stream to a file. Somewhat counter-intuitively, you - // must pass a log which will be written to with details of the operation. - async flushToFile(subdirArray, filename, log) { - let inStream = this.getInputStream(); - this.reset(); - if (!inStream) { - log.debug("Failed to flush log to a file - no input stream"); - return; - } - log.debug("Flushing file log"); - log.trace("Beginning stream copy to " + filename + ": " + Date.now()); - try { - await this._copyStreamToFile(inStream, subdirArray, filename, log); - log.trace("onCopyComplete", Date.now()); - } catch (ex) { - log.error("Failed to copy log stream to file", ex); - } - } - - /** - * Copy an input stream to the named file, doing everything off the main - * thread. - * subDirArray is an array of path components, relative to the profile - * directory, where the file will be created. - * outputFileName is the filename to create. - * Returns a promise that is resolved on completion or rejected with an error. - */ - async _copyStreamToFile(inputStream, subdirArray, outputFileName, log) { - let outputDirectory = PathUtils.join(PathUtils.profileDir, ...subdirArray); - await IOUtils.makeDirectory(outputDirectory); - let fullOutputFileName = PathUtils.join(outputDirectory, outputFileName); - - let outputStream = Cc[ - "@mozilla.org/network/file-output-stream;1" - ].createInstance(Ci.nsIFileOutputStream); - - outputStream.init( - new lazy.FileUtils.File(fullOutputFileName), - -1, - -1, - Ci.nsIFileOutputStream.DEFER_OPEN - ); - - await new Promise(resolve => - lazy.NetUtil.asyncCopy(inputStream, outputStream, () => resolve()) - ); - - outputStream.close(); - log.trace("finished copy to", fullOutputFileName); - } -} - -// The public LogManager object. -export function LogManager(prefRoot, logNames, logFilePrefix) { - this._prefObservers = []; - this.init(prefRoot, logNames, logFilePrefix); -} - -LogManager.StorageStreamAppender = StorageStreamAppender; - -LogManager.prototype = { - _cleaningUpFileLogs: false, - - init(prefRoot, logNames, logFilePrefix) { - this._prefs = Services.prefs.getBranch(prefRoot); - this._prefsBranch = prefRoot; - - this.logFilePrefix = logFilePrefix; - if (!formatter) { - // Create a formatter and various appenders to attach to the logs. - formatter = new Log.BasicFormatter(); - consoleAppender = new Log.ConsoleAppender(formatter); - dumpAppender = new Log.DumpAppender(formatter); - } - - allBranches.add(this._prefsBranch); - // We create a preference observer for all our prefs so they are magically - // reflected if the pref changes after creation. - let setupAppender = ( - appender, - prefName, - defaultLevel, - findSmallest = false - ) => { - let observer = newVal => { - let level = Log.Level[newVal] || defaultLevel; - if (findSmallest) { - // As some of our appenders have global impact (ie, there is only one - // place 'dump' goes to), we need to find the smallest value from all - // prefs controlling this appender. - // For example, if consumerA has dump=Debug then consumerB sets - // dump=Error, we need to keep dump=Debug so consumerA is respected. - for (let branch of allBranches) { - let lookPrefBranch = Services.prefs.getBranch(branch); - let lookVal = - Log.Level[lookPrefBranch.getStringPref(prefName, null)]; - if (lookVal && lookVal < level) { - level = lookVal; - } - } - } - appender.level = level; - }; - this._prefs.addObserver(prefName, observer); - this._prefObservers.push([prefName, observer]); - // and call the observer now with the current pref value. - observer(this._prefs.getStringPref(prefName, null)); - return observer; - }; - - this._observeConsolePref = setupAppender( - consoleAppender, - "log.appender.console", - Log.Level.Fatal, - true - ); - this._observeDumpPref = setupAppender( - dumpAppender, - "log.appender.dump", - Log.Level.Error, - true - ); - - // The file appender doesn't get the special singleton behaviour. - let fapp = (this._fileAppender = new FlushableStorageAppender(formatter)); - // the stream gets a default of Debug as the user must go out of their way - // to see the stuff spewed to it. - this._observeStreamPref = setupAppender( - fapp, - "log.appender.file.level", - Log.Level.Debug - ); - - // now attach the appenders to all our logs. - for (let logName of logNames) { - let log = Log.repository.getLogger(logName); - for (let appender of [fapp, dumpAppender, consoleAppender]) { - log.addAppender(appender); - } - } - // and use the first specified log as a "root" for our log. - this._log = Log.repository.getLogger(logNames[0] + ".LogManager"); - }, - - /** - * Cleanup this instance - */ - finalize() { - for (let [prefName, observer] of this._prefObservers) { - this._prefs.removeObserver(prefName, observer); - } - this._prefObservers = []; - try { - allBranches.delete(this._prefsBranch); - } catch (e) {} - this._prefs = null; - }, - - get _logFileSubDirectoryEntries() { - // At this point we don't allow a custom directory for the logs, nor allow - // it to be outside the profile directory. - // This returns an array of the the relative directory entries below the - // profile dir, and is the directory about:sync-log uses. - return ["weave", "logs"]; - }, - - get sawError() { - return this._fileAppender.sawError; - }, - - // Result values for resetFileLog. - SUCCESS_LOG_WRITTEN: "success-log-written", - ERROR_LOG_WRITTEN: "error-log-written", - - /** - * Possibly generate a log file for all accumulated log messages and refresh - * the input & output streams. - * Whether a "success" or "error" log is written is determined based on - * whether an "Error" log entry was written to any of the logs. - * Returns a promise that resolves on completion with either null (for no - * file written or on error), SUCCESS_LOG_WRITTEN if a "success" log was - * written, or ERROR_LOG_WRITTEN if an "error" log was written. - */ - async resetFileLog() { - try { - let flushToFile; - let reasonPrefix; - let reason; - if (this._fileAppender.sawError) { - reason = this.ERROR_LOG_WRITTEN; - flushToFile = this._prefs.getBoolPref( - "log.appender.file.logOnError", - true - ); - reasonPrefix = "error"; - } else { - reason = this.SUCCESS_LOG_WRITTEN; - flushToFile = this._prefs.getBoolPref( - "log.appender.file.logOnSuccess", - false - ); - reasonPrefix = "success"; - } - - // might as well avoid creating an input stream if we aren't going to use it. - if (!flushToFile) { - this._fileAppender.reset(); - return null; - } - - // We have reasonPrefix at the start of the filename so all "error" - // logs are grouped in about:sync-log. - let filename = - reasonPrefix + "-" + this.logFilePrefix + "-" + Date.now() + ".txt"; - await this._fileAppender.flushToFile( - this._logFileSubDirectoryEntries, - filename, - this._log - ); - // It's not completely clear to markh why we only do log cleanups - // for errors, but for now the Sync semantics have been copied... - // (one theory is that only cleaning up on error makes it less - // likely old error logs would be removed, but that's not true if - // there are occasional errors - let's address this later!) - if (reason == this.ERROR_LOG_WRITTEN && !this._cleaningUpFileLogs) { - this._log.trace("Running cleanup."); - try { - await this.cleanupLogs(); - } catch (err) { - this._log.error("Failed to cleanup logs", err); - } - } - return reason; - } catch (ex) { - this._log.error("Failed to resetFileLog", ex); - return null; - } - }, - - /** - * Finds all logs older than maxErrorAge and deletes them using async I/O. - */ - cleanupLogs() { - let maxAge = this._prefs.getIntPref( - "log.appender.file.maxErrorAge", - DEFAULT_MAX_ERROR_AGE - ); - let threshold = Date.now() - 1000 * maxAge; - this._log.debug("Log cleanup threshold time: " + threshold); - - let shouldDelete = fileInfo => { - return fileInfo.lastModified < threshold; - }; - return this._deleteLogFiles(shouldDelete); - }, - - /** - * Finds all logs and removes them. - */ - removeAllLogs() { - return this._deleteLogFiles(() => true); - }, - - // Delete some log files. A callback is invoked for each found log file to - // determine if that file should be removed. - async _deleteLogFiles(cbShouldDelete) { - this._cleaningUpFileLogs = true; - let logDir = lazy.FileUtils.getDir( - "ProfD", - this._logFileSubDirectoryEntries - ); - for (const path of await IOUtils.getChildren(logDir.path)) { - const name = PathUtils.filename(path); - - if (!name.startsWith("error-") && !name.startsWith("success-")) { - continue; - } - - try { - const info = await IOUtils.stat(path); - if (!cbShouldDelete(info)) { - continue; - } - - this._log.trace(` > Cleanup removing ${name} (${info.lastModified})`); - await IOUtils.remove(path); - this._log.trace(`Deleted ${name}`); - } catch (ex) { - this._log.debug( - `Encountered error trying to clean up old log file ${name}`, - ex - ); - } - } - this._cleaningUpFileLogs = false; - this._log.debug("Done deleting files."); - // This notification is used only for tests. - Services.obs.notifyObservers( - null, - "services-tests:common:log-manager:cleanup-logs" - ); - }, -}; diff --git a/services/common/moz.build b/services/common/moz.build index 144ccaff04..42c5431de8 100644 --- a/services/common/moz.build +++ b/services/common/moz.build @@ -22,7 +22,6 @@ EXTRA_JS_MODULES["services-common"] += [ "kinto-http-client.sys.mjs", "kinto-offline-client.sys.mjs", "kinto-storage-adapter.sys.mjs", - "logmanager.sys.mjs", "observers.sys.mjs", "rest.sys.mjs", "uptake-telemetry.sys.mjs", diff --git a/services/common/tests/unit/test_load_modules.js b/services/common/tests/unit/test_load_modules.js index d86165266f..f188acb4f5 100644 --- a/services/common/tests/unit/test_load_modules.js +++ b/services/common/tests/unit/test_load_modules.js @@ -6,12 +6,7 @@ const { AppConstants } = ChromeUtils.importESModule( ); const MODULE_BASE = "resource://services-common/"; -const shared_modules = [ - "async.sys.mjs", - "logmanager.sys.mjs", - "rest.sys.mjs", - "utils.sys.mjs", -]; +const shared_modules = ["async.sys.mjs", "rest.sys.mjs", "utils.sys.mjs"]; const non_android_modules = ["tokenserverclient.sys.mjs"]; @@ -51,6 +46,7 @@ function expectImportsToFail(mm, base = MODULE_BASE) { function run_test() { expectImportsToSucceed(shared_modules); expectImportsToSucceed(shared_test_modules, TEST_BASE); + expectImportsToSucceed(["LogManager.sys.mjs"], "resource://gre/modules/"); if (AppConstants.platform != "android") { expectImportsToSucceed(non_android_modules); diff --git a/services/common/tests/unit/test_logmanager.js b/services/common/tests/unit/test_logmanager.js deleted file mode 100644 index 89ac274e61..0000000000 --- a/services/common/tests/unit/test_logmanager.js +++ /dev/null @@ -1,330 +0,0 @@ -/* Any copyright is dedicated to the Public Domain. - http://creativecommons.org/publicdomain/zero/1.0/ */ - -// NOTE: The sync test_errorhandler_* tests have quite good coverage for -// other aspects of this. - -const { LogManager } = ChromeUtils.importESModule( - "resource://services-common/logmanager.sys.mjs" -); -const { FileUtils } = ChromeUtils.importESModule( - "resource://gre/modules/FileUtils.sys.mjs" -); - -// Returns an array of [consoleAppender, dumpAppender, [fileAppenders]] for -// the specified log. Note that fileAppenders will usually have length=1 -function getAppenders(log) { - let capps = log.appenders.filter(app => app instanceof Log.ConsoleAppender); - equal(capps.length, 1, "should only have one console appender"); - let dapps = log.appenders.filter(app => app instanceof Log.DumpAppender); - equal(dapps.length, 1, "should only have one dump appender"); - let fapps = log.appenders.filter( - app => app instanceof LogManager.StorageStreamAppender - ); - return [capps[0], dapps[0], fapps]; -} - -// Test that the correct thing happens when no prefs exist for the log manager. -add_task(async function test_noPrefs() { - // tell the log manager to init with a pref branch that doesn't exist. - let lm = new LogManager("no-such-branch.", ["TestLog"], "test"); - - let log = Log.repository.getLogger("TestLog"); - let [capp, dapp, fapps] = getAppenders(log); - // The console appender gets "Fatal" while the "dump" appender gets "Error" levels - equal(capp.level, Log.Level.Fatal); - equal(dapp.level, Log.Level.Error); - // and the file (stream) appender gets Debug by default - equal(fapps.length, 1, "only 1 file appender"); - equal(fapps[0].level, Log.Level.Debug); - lm.finalize(); -}); - -// Test that changes to the prefs used by the log manager are updated dynamically. -add_task(async function test_PrefChanges() { - Services.prefs.setStringPref( - "log-manager.test.log.appender.console", - "Trace" - ); - Services.prefs.setStringPref("log-manager.test.log.appender.dump", "Trace"); - Services.prefs.setStringPref( - "log-manager.test.log.appender.file.level", - "Trace" - ); - let lm = new LogManager("log-manager.test.", ["TestLog2"], "test"); - - let log = Log.repository.getLogger("TestLog2"); - let [capp, dapp, [fapp]] = getAppenders(log); - equal(capp.level, Log.Level.Trace); - equal(dapp.level, Log.Level.Trace); - equal(fapp.level, Log.Level.Trace); - // adjust the prefs and they should magically be reflected in the appenders. - Services.prefs.setStringPref( - "log-manager.test.log.appender.console", - "Debug" - ); - Services.prefs.setStringPref("log-manager.test.log.appender.dump", "Debug"); - Services.prefs.setStringPref( - "log-manager.test.log.appender.file.level", - "Debug" - ); - equal(capp.level, Log.Level.Debug); - equal(dapp.level, Log.Level.Debug); - equal(fapp.level, Log.Level.Debug); - // and invalid values should cause them to fallback to their defaults. - Services.prefs.setStringPref("log-manager.test.log.appender.console", "xxx"); - Services.prefs.setStringPref("log-manager.test.log.appender.dump", "xxx"); - Services.prefs.setStringPref( - "log-manager.test.log.appender.file.level", - "xxx" - ); - equal(capp.level, Log.Level.Fatal); - equal(dapp.level, Log.Level.Error); - equal(fapp.level, Log.Level.Debug); - lm.finalize(); -}); - -// Test that the same log used by multiple log managers does the right thing. -add_task(async function test_SharedLogs() { - // create the prefs for the first instance. - Services.prefs.setStringPref( - "log-manager-1.test.log.appender.console", - "Trace" - ); - Services.prefs.setStringPref("log-manager-1.test.log.appender.dump", "Trace"); - Services.prefs.setStringPref( - "log-manager-1.test.log.appender.file.level", - "Trace" - ); - let lm1 = new LogManager("log-manager-1.test.", ["TestLog3"], "test"); - - // and the second. - Services.prefs.setStringPref( - "log-manager-2.test.log.appender.console", - "Debug" - ); - Services.prefs.setStringPref("log-manager-2.test.log.appender.dump", "Debug"); - Services.prefs.setStringPref( - "log-manager-2.test.log.appender.file.level", - "Debug" - ); - let lm2 = new LogManager("log-manager-2.test.", ["TestLog3"], "test"); - - let log = Log.repository.getLogger("TestLog3"); - let [capp, dapp] = getAppenders(log); - - // console and dump appenders should be "trace" as it is more verbose than - // "debug" - equal(capp.level, Log.Level.Trace); - equal(dapp.level, Log.Level.Trace); - - // Set the prefs on the -1 branch to "Error" - it should then end up with - // "Debug" from the -2 branch. - Services.prefs.setStringPref( - "log-manager-1.test.log.appender.console", - "Error" - ); - Services.prefs.setStringPref("log-manager-1.test.log.appender.dump", "Error"); - Services.prefs.setStringPref( - "log-manager-1.test.log.appender.file.level", - "Error" - ); - - equal(capp.level, Log.Level.Debug); - equal(dapp.level, Log.Level.Debug); - - lm1.finalize(); - lm2.finalize(); -}); - -// A little helper to test what log files exist. We expect exactly zero (if -// prefix is null) or exactly one with the specified prefix. -function checkLogFile(prefix) { - let logsdir = FileUtils.getDir("ProfD", ["weave", "logs"]); - let entries = logsdir.directoryEntries; - if (!prefix) { - // expecting no files. - ok(!entries.hasMoreElements()); - } else { - // expecting 1 file. - ok(entries.hasMoreElements()); - let logfile = entries.getNext().QueryInterface(Ci.nsIFile); - equal(logfile.leafName.slice(-4), ".txt"); - ok(logfile.leafName.startsWith(prefix + "-test-"), logfile.leafName); - // and remove it ready for the next check. - logfile.remove(false); - } -} - -// Test that we correctly write error logs by default -add_task(async function test_logFileErrorDefault() { - let lm = new LogManager("log-manager.test.", ["TestLog2"], "test"); - - let log = Log.repository.getLogger("TestLog2"); - log.error("an error message"); - await lm.resetFileLog(lm.REASON_ERROR); - // One error log file exists. - checkLogFile("error"); - - lm.finalize(); -}); - -// Test that we correctly write success logs. -add_task(async function test_logFileSuccess() { - Services.prefs.setBoolPref( - "log-manager.test.log.appender.file.logOnError", - false - ); - Services.prefs.setBoolPref( - "log-manager.test.log.appender.file.logOnSuccess", - false - ); - - let lm = new LogManager("log-manager.test.", ["TestLog2"], "test"); - - let log = Log.repository.getLogger("TestLog2"); - log.info("an info message"); - await lm.resetFileLog(); - // Zero log files exist. - checkLogFile(null); - - // Reset logOnSuccess and do it again - log should appear. - Services.prefs.setBoolPref( - "log-manager.test.log.appender.file.logOnSuccess", - true - ); - log.info("an info message"); - await lm.resetFileLog(); - - checkLogFile("success"); - - // Now test with no "reason" specified and no "error" record. - log.info("an info message"); - await lm.resetFileLog(); - // should get a "success" entry. - checkLogFile("success"); - - // With no "reason" and an error record - should get no success log. - log.error("an error message"); - await lm.resetFileLog(); - // should get no entry - checkLogFile(null); - - // And finally now with no error, to ensure that the fact we had an error - // previously doesn't persist after the .resetFileLog call. - log.info("an info message"); - await lm.resetFileLog(); - checkLogFile("success"); - - lm.finalize(); -}); - -// Test that we correctly write error logs. -add_task(async function test_logFileError() { - Services.prefs.setBoolPref( - "log-manager.test.log.appender.file.logOnError", - false - ); - Services.prefs.setBoolPref( - "log-manager.test.log.appender.file.logOnSuccess", - false - ); - - let lm = new LogManager("log-manager.test.", ["TestLog2"], "test"); - - let log = Log.repository.getLogger("TestLog2"); - log.info("an info message"); - let reason = await lm.resetFileLog(); - Assert.equal(reason, null, "null returned when no file created."); - // Zero log files exist. - checkLogFile(null); - - // Reset logOnSuccess - success logs should appear if no error records. - Services.prefs.setBoolPref( - "log-manager.test.log.appender.file.logOnSuccess", - true - ); - log.info("an info message"); - reason = await lm.resetFileLog(); - Assert.equal(reason, lm.SUCCESS_LOG_WRITTEN); - checkLogFile("success"); - - // Set logOnError and unset logOnSuccess - error logs should appear. - Services.prefs.setBoolPref( - "log-manager.test.log.appender.file.logOnSuccess", - false - ); - Services.prefs.setBoolPref( - "log-manager.test.log.appender.file.logOnError", - true - ); - log.error("an error message"); - reason = await lm.resetFileLog(); - Assert.equal(reason, lm.ERROR_LOG_WRITTEN); - checkLogFile("error"); - - // Now test with no "error" record. - log.info("an info message"); - reason = await lm.resetFileLog(); - // should get no file - Assert.equal(reason, null); - checkLogFile(null); - - // With an error record we should get an error log. - log.error("an error message"); - reason = await lm.resetFileLog(); - // should get en error log - Assert.equal(reason, lm.ERROR_LOG_WRITTEN); - checkLogFile("error"); - - // And finally now with success, to ensure that the fact we had an error - // previously doesn't persist after the .resetFileLog call. - log.info("an info message"); - await lm.resetFileLog(); - checkLogFile(null); - - lm.finalize(); -}); - -function countLogFiles() { - let logsdir = FileUtils.getDir("ProfD", ["weave", "logs"]); - let count = 0; - for (let entry of logsdir.directoryEntries) { - void entry; - count += 1; - } - return count; -} - -// Test that removeAllLogs removes all log files. -add_task(async function test_logFileError() { - Services.prefs.setBoolPref( - "log-manager.test.log.appender.file.logOnError", - true - ); - Services.prefs.setBoolPref( - "log-manager.test.log.appender.file.logOnSuccess", - true - ); - - let lm = new LogManager("log-manager.test.", ["TestLog2"], "test"); - - let log = Log.repository.getLogger("TestLog2"); - log.info("an info message"); - let reason = await lm.resetFileLog(); - Assert.equal(reason, lm.SUCCESS_LOG_WRITTEN, "success log was written."); - - log.error("an error message"); - reason = await lm.resetFileLog(); - Assert.equal(reason, lm.ERROR_LOG_WRITTEN); - - Assert.equal(countLogFiles(), 2, "expect 2 log files"); - await lm.removeAllLogs(); - Assert.equal( - countLogFiles(), - 0, - "should be no log files after removing them" - ); - - lm.finalize(); -}); diff --git a/services/common/tests/unit/xpcshell.toml b/services/common/tests/unit/xpcshell.toml index e4035f66b2..35c10dfce3 100644 --- a/services/common/tests/unit/xpcshell.toml +++ b/services/common/tests/unit/xpcshell.toml @@ -20,8 +20,6 @@ tags = "blocklist" ["test_load_modules.js"] -["test_logmanager.js"] - ["test_observers.js"] ["test_restrequest.js"] diff --git a/services/fxaccounts/FxAccountsCommands.sys.mjs b/services/fxaccounts/FxAccountsCommands.sys.mjs index 40fcc7f925..0851906061 100644 --- a/services/fxaccounts/FxAccountsCommands.sys.mjs +++ b/services/fxaccounts/FxAccountsCommands.sys.mjs @@ -5,10 +5,14 @@ import { COMMAND_SENDTAB, COMMAND_SENDTAB_TAIL, + COMMAND_CLOSETAB, + COMMAND_CLOSETAB_TAIL, SCOPE_OLD_SYNC, log, } from "resource://gre/modules/FxAccountsCommon.sys.mjs"; +import { clearTimeout, setTimeout } from "resource://gre/modules/Timer.sys.mjs"; + import { XPCOMUtils } from "resource://gre/modules/XPCOMUtils.sys.mjs"; import { Observers } from "resource://services-common/observers.sys.mjs"; @@ -36,18 +40,32 @@ export class FxAccountsCommands { constructor(fxAccountsInternal) { this._fxai = fxAccountsInternal; this.sendTab = new SendTab(this, fxAccountsInternal); + this.closeTab = new CloseRemoteTab(this, fxAccountsInternal); this._invokeRateLimitExpiry = 0; } async availableCommands() { + // Invalid keys usually means the account is not verified yet. const encryptedSendTabKeys = await this.sendTab.getEncryptedSendTabKeys(); - if (!encryptedSendTabKeys) { - // This will happen if the account is not verified yet. - return {}; + let commands = {}; + + if (encryptedSendTabKeys) { + commands[COMMAND_SENDTAB] = encryptedSendTabKeys; } - return { - [COMMAND_SENDTAB]: encryptedSendTabKeys, - }; + + // Close Tab is still a worked-on feature, so we should not broadcast it widely yet + let closeTabEnabled = Services.prefs.getBoolPref( + "identity.fxaccounts.commands.remoteTabManagement.enabled", + false + ); + if (closeTabEnabled) { + const encryptedCloseTabKeys = + await this.closeTab.getEncryptedCloseTabKeys(); + if (encryptedCloseTabKeys) { + commands[COMMAND_CLOSETAB] = encryptedCloseTabKeys; + } + } + return commands; } async invoke(command, device, payload) { @@ -166,6 +184,7 @@ export class FxAccountsCommands { } // We debounce multiple incoming tabs so we show a single notification. const tabsReceived = []; + const tabsToClose = []; for (const { index, data } of messages) { const { command, payload, sender: senderId } = data; const reason = this._getReason(notifiedIndex, index); @@ -179,6 +198,24 @@ export class FxAccountsCommands { ); } switch (command) { + case COMMAND_CLOSETAB: + try { + const { urls } = await this.closeTab.handleTabClose( + senderId, + payload, + reason + ); + log.info( + `Close Tab received with FxA commands: "${urls.length} tabs" + from ${sender ? sender.name : "Unknown device"}.` + ); + // URLs are PII, so only logged at trace. + log.trace(`Close Remote Tabs received URLs: ${urls}`); + tabsToClose.push({ urls, sender }); + } catch (e) { + log.error(`Error while handling incoming Close Tab payload.`, e); + } + break; case COMMAND_SENDTAB: try { const { title, uri } = await this.sendTab.handle( @@ -212,11 +249,18 @@ export class FxAccountsCommands { if (tabsReceived.length) { this._notifyFxATabsReceived(tabsReceived); } + if (tabsToClose.length) { + this._notifyFxATabsClosed(tabsToClose); + } } _notifyFxATabsReceived(tabsReceived) { Observers.notify("fxaccounts:commands:open-uri", tabsReceived); } + + _notifyFxATabsClosed(tabsToClose) { + Observers.notify("fxaccounts:commands:close-uri", tabsToClose); + } } /** @@ -458,6 +502,286 @@ export class SendTab { } } +/** + * Close Tabs is built on-top of device commands and handles + * actions a client wants to perform on tabs found on other devices + * This class is very similar to the Send Tab component in FxAccountsCommands + * + * Devices exchange keys wrapped in the oldsync key between themselves (getEncryptedCloseTabKeys) + * during the device registration flow. The FxA server can theoretically never + * retrieve the close tab keys since it doesn't know the oldsync key. + * + * Note: Close Tabs does things slightly different from SendTab + * The sender encrypts the close-tab command using the receiver's public key, + * and the FxA server stores it (without re-encrypting). + * A web-push notifies the receiver that a new command is available. + * The receiver decrypts the payload using its private key. + */ +export class CloseRemoteTab { + constructor(commands, fxAccountsInternal) { + this._commands = commands; + this._fxai = fxAccountsInternal; + this.pendingClosedTabs = new Map(); + // pushes happen per device, making a timer per device makes sending + // the pushes a little more sane + this.pushTimers = new Map(); + } + + /** + * Sending a push everytime the user wants to close a tab on a remote device + * could lead to excessive notifications to the users device, push throttling, etc + * so we add the tabs to a queue and have a timer that sends the push after a certain + * amount of "inactivity" + */ + /** + * @param {Device} targetDevice - Device object (typically returned by fxAccounts.getDevicesList()). + * @param {String} tab - url for the tab to close + * @param {Integer} how far to delay, in miliseconds, the push for this timer + */ + enqueueTabToClose(targetDevice, tab, pushDelay = 6000) { + if (this.pendingClosedTabs.has(targetDevice.id)) { + this.pendingClosedTabs.get(targetDevice.id).tabs.push(tab); + } else { + this.pendingClosedTabs.set(targetDevice.id, { + device: targetDevice, + tabs: [tab], + }); + } + + // extend the timer + this._refreshPushTimer(targetDevice.id, pushDelay); + } + + async _refreshPushTimer(deviceId, pushDelay) { + // If the user is still performing "actions" for this device + // reset the timer to send the push + if (this.pushTimers.has(deviceId)) { + clearTimeout(this.pushTimers.get(deviceId)); + } + + // There is a possibility that the browser closes before this actually executes + // we should catch the browser as it's closing and immediately fire these + // See https://bugzilla.mozilla.org/show_bug.cgi?id=1888299 + const timerId = setTimeout(async () => { + let { device, tabs } = this.pendingClosedTabs.get(deviceId); + // send a push notification for this specific device + await this._sendCloseTabPush(device, tabs); + + // Clear the timer + this.pushTimers.delete(deviceId); + // We also need to locally store the tabs we sent so the user doesn't + // see these anymore + this.pendingClosedTabs.delete(deviceId); + + // This is used for tests only, to get around timer woes + Observers.notify("test:fxaccounts:commands:close-uri:sent"); + }, pushDelay); + + // Store the new timer with the device + this.pushTimers.set(deviceId, timerId); + } + + /** + * @param {Device} target - Device object (typically returned by fxAccounts.getDevicesList()). + * @param {String[]} urls - array of urls that should be closed on the remote device + */ + async _sendCloseTabPush(target, urls) { + log.info(`Sending tab closures to ${target.id} device.`); + const flowID = this._fxai.telemetry.generateFlowID(); + const encoder = new TextEncoder(); + try { + const streamID = this._fxai.telemetry.generateFlowID(); + const targetData = { flowID, streamID, urls }; + const bytes = encoder.encode(JSON.stringify(targetData)); + const encrypted = await this._encrypt(bytes, target); + // FxA expects an object as the payload, but we only have a single encrypted string; wrap it. + // If you add any plaintext items to this payload, please carefully consider the privacy implications + // of revealing that data to the FxA server. + const payload = { encrypted }; + await this._commands.invoke(COMMAND_CLOSETAB, target, payload); + this._fxai.telemetry.recordEvent( + "command-sent", + COMMAND_CLOSETAB_TAIL, + this._fxai.telemetry.sanitizeDeviceId(target.id), + { flowID, streamID } + ); + } catch (error) { + // We should also show the user there was some kind've error + log.error("Error while invoking a send tab command.", error); + } + } + + // Returns true if the target device is compatible with FxA Commands Send tab. + isDeviceCompatible(device) { + let pref = Services.prefs.getBoolPref( + "identity.fxaccounts.commands.remoteTabManagement.enabled", + false + ); + return ( + pref && + device.availableCommands && + device.availableCommands[COMMAND_CLOSETAB] + ); + } + + // Handle incoming remote tab payload, called by FxAccountsCommands. + async handleTabClose(senderID, { encrypted }, reason) { + const bytes = await this._decrypt(encrypted); + const decoder = new TextDecoder("utf8"); + const data = JSON.parse(decoder.decode(bytes)); + // urls is an array of strings + const { flowID, streamID, urls } = data; + this._fxai.telemetry.recordEvent( + "command-received", + COMMAND_CLOSETAB_TAIL, + this._fxai.telemetry.sanitizeDeviceId(senderID), + { flowID, streamID, reason } + ); + + return { + urls, + }; + } + + async _encrypt(bytes, device) { + let bundle = device.availableCommands[COMMAND_CLOSETAB]; + if (!bundle) { + throw new Error(`Device ${device.id} does not have close tab keys.`); + } + const oldsyncKey = await this._fxai.keys.getKeyForScope(SCOPE_OLD_SYNC); + const json = JSON.parse(bundle); + const wrapper = new lazy.CryptoWrapper(); + wrapper.deserialize({ payload: json }); + const syncKeyBundle = lazy.BulkKeyBundle.fromJWK(oldsyncKey); + let { publicKey, authSecret } = await wrapper.decrypt(syncKeyBundle); + authSecret = urlsafeBase64Decode(authSecret); + publicKey = urlsafeBase64Decode(publicKey); + + const { ciphertext: encrypted } = await lazy.PushCrypto.encrypt( + bytes, + publicKey, + authSecret + ); + return urlsafeBase64Encode(encrypted); + } + + async _getPersistedCloseTabKeys() { + const { device } = await this._fxai.getUserAccountData(["device"]); + return device && device.closeTabKeys; + } + + async _decrypt(ciphertext) { + let { privateKey, publicKey, authSecret } = + await this._getPersistedCloseTabKeys(); + publicKey = urlsafeBase64Decode(publicKey); + authSecret = urlsafeBase64Decode(authSecret); + ciphertext = new Uint8Array(urlsafeBase64Decode(ciphertext)); + return lazy.PushCrypto.decrypt( + privateKey, + publicKey, + authSecret, + // The only Push encoding we support. + { encoding: "aes128gcm" }, + ciphertext + ); + } + + async _generateAndPersistCloseTabKeys() { + let [publicKey, privateKey] = await lazy.PushCrypto.generateKeys(); + publicKey = urlsafeBase64Encode(publicKey); + let authSecret = lazy.PushCrypto.generateAuthenticationSecret(); + authSecret = urlsafeBase64Encode(authSecret); + const closeTabKeys = { + publicKey, + privateKey, + authSecret, + }; + await this._fxai.withCurrentAccountState(async state => { + const { device } = await state.getUserAccountData(["device"]); + await state.updateUserAccountData({ + device: { + ...device, + closeTabKeys, + }, + }); + }); + return closeTabKeys; + } + + async _getPersistedEncryptedCloseTabKey() { + const { encryptedCloseTabKeys } = await this._fxai.getUserAccountData([ + "encryptedCloseTabKeys", + ]); + return encryptedCloseTabKeys; + } + + async _generateAndPersistEncryptedCloseTabKeys() { + let closeTabKeys = await this._getPersistedCloseTabKeys(); + if (!closeTabKeys) { + log.info("Could not find closeTab keys, generating them"); + closeTabKeys = await this._generateAndPersistCloseTabKeys(); + } + // Strip the private key from the bundle to encrypt. + const keyToEncrypt = { + publicKey: closeTabKeys.publicKey, + authSecret: closeTabKeys.authSecret, + }; + if (!(await this._fxai.keys.canGetKeyForScope(SCOPE_OLD_SYNC))) { + log.info("Can't fetch keys, so unable to determine closeTab keys"); + return null; + } + let oldsyncKey; + try { + oldsyncKey = await this._fxai.keys.getKeyForScope(SCOPE_OLD_SYNC); + } catch (ex) { + log.warn( + "Failed to fetch keys, so unable to determine closeTab keys", + ex + ); + return null; + } + const wrapper = new lazy.CryptoWrapper(); + wrapper.cleartext = keyToEncrypt; + const keyBundle = lazy.BulkKeyBundle.fromJWK(oldsyncKey); + await wrapper.encrypt(keyBundle); + const encryptedCloseTabKeys = JSON.stringify({ + // This is expected in hex, due to pre-JWK sync key ids :-( + kid: this._fxai.keys.kidAsHex(oldsyncKey), + IV: wrapper.IV, + hmac: wrapper.hmac, + ciphertext: wrapper.ciphertext, + }); + await this._fxai.withCurrentAccountState(async state => { + await state.updateUserAccountData({ + encryptedCloseTabKeys, + }); + }); + return encryptedCloseTabKeys; + } + + async getEncryptedCloseTabKeys() { + let encryptedCloseTabKeys = await this._getPersistedEncryptedCloseTabKey(); + const closeTabKeys = await this._getPersistedCloseTabKeys(); + if (!encryptedCloseTabKeys || !closeTabKeys) { + log.info("Generating and persisting encrypted closeTab keys"); + // `_generateAndPersistEncryptedCloseTabKeys` requires the sync key + // which cannot be accessed if the login manager is locked + // (i.e when the primary password is locked) or if the sync keys + // aren't accessible (account isn't verified) + // so this function could fail to retrieve the keys + // however, device registration will trigger when the account + // is verified, so it's OK + // Note that it's okay to persist those keys, because they are + // already persisted in plaintext and the encrypted bundle + // does not include the sync-key (the sync key is used to encrypt + // it though) + encryptedCloseTabKeys = + await this._generateAndPersistEncryptedCloseTabKeys(); + } + return encryptedCloseTabKeys; + } +} + function urlsafeBase64Encode(buffer) { return ChromeUtils.base64URLEncode(new Uint8Array(buffer), { pad: false }); } diff --git a/services/fxaccounts/FxAccountsCommon.sys.mjs b/services/fxaccounts/FxAccountsCommon.sys.mjs index 2688fc3c0a..18f129af38 100644 --- a/services/fxaccounts/FxAccountsCommon.sys.mjs +++ b/services/fxaccounts/FxAccountsCommon.sys.mjs @@ -3,7 +3,7 @@ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ import { Log } from "resource://gre/modules/Log.sys.mjs"; -import { LogManager } from "resource://services-common/logmanager.sys.mjs"; +import { LogManager } from "resource://gre/modules/LogManager.sys.mjs"; // loglevel should be one of "Fatal", "Error", "Warn", "Info", "Config", // "Debug", "Trace" or "All". If none is specified, "Debug" will be used by @@ -29,7 +29,13 @@ let logs = [ ]; // For legacy reasons, the log manager still thinks it's part of sync. -export let logManager = new LogManager("services.sync.", logs, "sync"); +export let logManager = new LogManager({ + prefRoot: "services.sync.", + logNames: logs, + logFilePrefix: "sync", + logFileSubDirectoryEntries: ["weave", "logs"], + testTopicPrefix: "services-tests:common:log-manager:", +}); // A boolean to indicate if personally identifiable information (or anything // else sensitive, such as credentials) should be logged. @@ -77,6 +83,9 @@ export let COMMAND_PREFIX = "https://identity.mozilla.com/cmd/"; // The commands we support - only the _TAIL values are recorded in telemetry. export let COMMAND_SENDTAB_TAIL = "open-uri"; export let COMMAND_SENDTAB = COMMAND_PREFIX + COMMAND_SENDTAB_TAIL; +// A command to close a tab on this device +export let COMMAND_CLOSETAB_TAIL = "close-uri/v1"; +export let COMMAND_CLOSETAB = COMMAND_PREFIX + COMMAND_CLOSETAB_TAIL; // OAuth export let FX_OAUTH_CLIENT_ID = "5882386c6d801776"; @@ -266,6 +275,7 @@ export let FXA_PWDMGR_PLAINTEXT_FIELDS = new Set([ "device", "profileCache", "encryptedSendTabKeys", + "encryptedCloseTabKeys", ]); // Fields we store in secure storage if it exists. diff --git a/services/fxaccounts/FxAccountsKeys.sys.mjs b/services/fxaccounts/FxAccountsKeys.sys.mjs index 9717f010c7..1258bdcd05 100644 --- a/services/fxaccounts/FxAccountsKeys.sys.mjs +++ b/services/fxaccounts/FxAccountsKeys.sys.mjs @@ -157,7 +157,9 @@ export class FxAccountsKeys { if (!kid.includes("-")) { return false; } - const [keyRotationTimestamp, fingerprint] = kid.split("-"); + const dashIndex = kid.indexOf("-"); + const keyRotationTimestamp = kid.substring(0, dashIndex); + const fingerprint = kid.substring(dashIndex + 1); // We then verify that the timestamp is a valid timestamp const keyRotationTimestampNum = Number(keyRotationTimestamp); // If the value we got back is falsy it's not a valid timestamp diff --git a/services/fxaccounts/FxAccountsProfileClient.sys.mjs b/services/fxaccounts/FxAccountsProfileClient.sys.mjs index 7ae1bd95db..14a773c999 100644 --- a/services/fxaccounts/FxAccountsProfileClient.sys.mjs +++ b/services/fxaccounts/FxAccountsProfileClient.sys.mjs @@ -5,7 +5,6 @@ /** * A client to fetch profile information for a Firefox Account. */ -"use strict;"; import { ERRNO_NETWORK, diff --git a/services/fxaccounts/tests/xpcshell/test_commands_closetab.js b/services/fxaccounts/tests/xpcshell/test_commands_closetab.js new file mode 100644 index 0000000000..447b80be94 --- /dev/null +++ b/services/fxaccounts/tests/xpcshell/test_commands_closetab.js @@ -0,0 +1,263 @@ +/* Any copyright is dedicated to the Public Domain. + * http://creativecommons.org/publicdomain/zero/1.0/ */ + +"use strict"; + +const { CloseRemoteTab } = ChromeUtils.importESModule( + "resource://gre/modules/FxAccountsCommands.sys.mjs" +); + +const { COMMAND_CLOSETAB, COMMAND_CLOSETAB_TAIL } = ChromeUtils.importESModule( + "resource://gre/modules/FxAccountsCommon.sys.mjs" +); + +class TelemetryMock { + constructor() { + this._events = []; + this._uuid_counter = 0; + } + + recordEvent(object, method, value, extra = undefined) { + this._events.push({ object, method, value, extra }); + } + + generateFlowID() { + this._uuid_counter += 1; + return this._uuid_counter.toString(); + } + + sanitizeDeviceId(id) { + return id + "-san"; + } +} + +function FxaInternalMock() { + return { + telemetry: new TelemetryMock(), + }; +} + +function promiseObserver(topic) { + return new Promise(resolve => { + let obs = (aSubject, aTopic) => { + Services.obs.removeObserver(obs, aTopic); + resolve(aSubject); + }; + Services.obs.addObserver(obs, topic); + }); +} + +add_task(async function test_closetab_isDeviceCompatible() { + const closeTab = new CloseRemoteTab(null, null); + let device = { name: "My device" }; + Assert.ok(!closeTab.isDeviceCompatible(device)); + device = { name: "My device", availableCommands: {} }; + Assert.ok(!closeTab.isDeviceCompatible(device)); + device = { + name: "My device", + availableCommands: { + "https://identity.mozilla.com/cmd/close-uri/v1": "payload", + }, + }; + // Even though the command is available, we're keeping this feature behind a feature + // flag for now, so it should still show up as "not available" + Assert.ok(!closeTab.isDeviceCompatible(device)); + + // Enable the feature + Services.prefs.setBoolPref( + "identity.fxaccounts.commands.remoteTabManagement.enabled", + true + ); + Assert.ok(closeTab.isDeviceCompatible(device)); + + // clear it for the next test + Services.prefs.clearUserPref( + "identity.fxaccounts.commands.remoteTabManagement.enabled" + ); +}); + +add_task(async function test_closetab_send() { + const commands = { + invoke: sinon.spy((cmd, device, payload) => { + Assert.equal(payload.encrypted, "encryptedpayload"); + }), + }; + const fxai = FxaInternalMock(); + const closeTab = new CloseRemoteTab(commands, fxai); + closeTab._encrypt = async () => { + return "encryptedpayload"; + }; + const targetDevice = { id: "dev1", name: "Device 1" }; + const tab = { url: "https://foo.bar/" }; + + // We add a 0 delay so we can "send" the push immediately + closeTab.enqueueTabToClose(targetDevice, tab, 0); + + // We have a tab queued + Assert.equal(closeTab.pendingClosedTabs.get(targetDevice.id).tabs.length, 1); + + // Wait on the notification to ensure the push sent + await promiseObserver("test:fxaccounts:commands:close-uri:sent"); + + // The push has been sent, we should not have the tabs anymore + Assert.equal( + closeTab.pendingClosedTabs.has(targetDevice.id), + false, + "The device should be removed from the queue after sending." + ); + + // Telemetry shows we sent one successfully + Assert.deepEqual(fxai.telemetry._events, [ + { + object: "command-sent", + method: COMMAND_CLOSETAB_TAIL, + value: "dev1-san", + // streamID uses the same generator as flowId, so it will be 2 + extra: { flowID: "1", streamID: "2" }, + }, + ]); +}); + +add_task(async function test_multiple_tabs_one_device() { + const commands = sinon.stub({ + invoke: async () => {}, + }); + const fxai = FxaInternalMock(); + const closeTab = new CloseRemoteTab(commands, fxai); + closeTab._encrypt = async () => "encryptedpayload"; + + const targetDevice = { + id: "dev1", + name: "Device 1", + availableCommands: { [COMMAND_CLOSETAB]: "payload" }, + }; + const tab1 = { url: "https://foo.bar/" }; + const tab2 = { url: "https://example.com/" }; + + closeTab.enqueueTabToClose(targetDevice, tab1, 1000); + closeTab.enqueueTabToClose(targetDevice, tab2, 0); + + // We have two tabs queued + Assert.equal(closeTab.pendingClosedTabs.get("dev1").tabs.length, 2); + + // Wait on the notification to ensure the push sent + await promiseObserver("test:fxaccounts:commands:close-uri:sent"); + + Assert.equal( + closeTab.pendingClosedTabs.has(targetDevice.id), + false, + "The device should be removed from the queue after sending." + ); + + // Telemetry shows we sent one successfully + Assert.deepEqual(fxai.telemetry._events, [ + { + object: "command-sent", + method: COMMAND_CLOSETAB_TAIL, + value: "dev1-san", + extra: { flowID: "1", streamID: "2" }, + }, + ]); +}); + +add_task(async function test_timer_reset_on_new_tab() { + const commands = sinon.stub({ + invoke: async () => {}, + }); + const fxai = FxaInternalMock(); + const closeTab = new CloseRemoteTab(commands, fxai); + closeTab._encrypt = async () => "encryptedpayload"; + + const targetDevice = { + id: "dev1", + name: "Device 1", + availableCommands: { [COMMAND_CLOSETAB]: "payload" }, + }; + const tab1 = { url: "https://foo.bar/" }; + const tab2 = { url: "https://example.com/" }; + + // default wait is 6s + closeTab.enqueueTabToClose(targetDevice, tab1); + + Assert.equal(closeTab.pendingClosedTabs.get(targetDevice.id).tabs.length, 1); + + // Adds a new tab and should reset timer + closeTab.enqueueTabToClose(targetDevice, tab2, 100); + + // We have two tabs queued + Assert.equal(closeTab.pendingClosedTabs.get(targetDevice.id).tabs.length, 2); + + // Wait on the notification to ensure the push sent + await promiseObserver("test:fxaccounts:commands:close-uri:sent"); + + // We only sent one push + sinon.assert.calledOnce(commands.invoke); + Assert.equal(closeTab.pendingClosedTabs.has(targetDevice.id), false); + + // Telemetry shows we sent only one + Assert.deepEqual(fxai.telemetry._events, [ + { + object: "command-sent", + method: COMMAND_CLOSETAB_TAIL, + value: "dev1-san", + extra: { flowID: "1", streamID: "2" }, + }, + ]); +}); + +add_task(async function test_multiple_devices() { + const commands = sinon.stub({ + invoke: async () => {}, + }); + const fxai = FxaInternalMock(); + const closeTab = new CloseRemoteTab(commands, fxai); + closeTab._encrypt = async () => "encryptedpayload"; + + const device1 = { + id: "dev1", + name: "Device 1", + availableCommands: { [COMMAND_CLOSETAB]: "payload" }, + }; + const device2 = { + id: "dev2", + name: "Device 2", + availableCommands: { [COMMAND_CLOSETAB]: "payload" }, + }; + const tab1 = { url: "https://foo.bar/" }; + const tab2 = { url: "https://example.com/" }; + + closeTab.enqueueTabToClose(device1, tab1, 100); + closeTab.enqueueTabToClose(device2, tab2, 200); + + Assert.equal(closeTab.pendingClosedTabs.get(device1.id).tabs.length, 1); + Assert.equal(closeTab.pendingClosedTabs.get(device2.id).tabs.length, 1); + + // observe the notification to ensure the push sent + await promiseObserver("test:fxaccounts:commands:close-uri:sent"); + + // We should have only sent the first device + sinon.assert.calledOnce(commands.invoke); + Assert.equal(closeTab.pendingClosedTabs.has(device1.id), false); + + // Wait on the notification to ensure the push sent + await promiseObserver("test:fxaccounts:commands:close-uri:sent"); + + // Now we've sent both pushes + sinon.assert.calledTwice(commands.invoke); + + // Two telemetry events to two different devices + Assert.deepEqual(fxai.telemetry._events, [ + { + object: "command-sent", + method: COMMAND_CLOSETAB_TAIL, + value: "dev1-san", + extra: { flowID: "1", streamID: "2" }, + }, + { + object: "command-sent", + method: COMMAND_CLOSETAB_TAIL, + value: "dev2-san", + extra: { flowID: "3", streamID: "4" }, + }, + ]); +}); diff --git a/services/fxaccounts/tests/xpcshell/test_keys.js b/services/fxaccounts/tests/xpcshell/test_keys.js index 9a25ca90f3..5caa4679c6 100644 --- a/services/fxaccounts/tests/xpcshell/test_keys.js +++ b/services/fxaccounts/tests/xpcshell/test_keys.js @@ -219,6 +219,18 @@ add_task(function test_check_valid_scoped_keys() { }; Assert.equal(keys.validScopedKeys(scopedKeys), true); }); + add_task(function test_valid_kid_with_dash() { + const scopedKeys = { + "https://identity.mozilla.com/apps/oldsync": { + kty: "oct", + // kid contains another dash. The fingerprint must not be truncated. + kid: "1510726318123-I-Qv4onc7VcVE1kTQkyyOw", + k: "DW_ll5GwX6SJ5GPqJVAuMUP2t6kDqhUulc2cbt26xbTcaKGQl-9l29FHAQ7kUiJETma4s9fIpEHrt909zgFang", + scope: "https://identity.mozilla.com/apps/oldsync", + }, + }; + Assert.equal(keys.validScopedKeys(scopedKeys), true); + }); }); add_task(async function test_rejects_bad_scoped_key_data() { diff --git a/services/fxaccounts/tests/xpcshell/xpcshell.toml b/services/fxaccounts/tests/xpcshell/xpcshell.toml index 7fc9c60006..09469fc0b4 100644 --- a/services/fxaccounts/tests/xpcshell/xpcshell.toml +++ b/services/fxaccounts/tests/xpcshell/xpcshell.toml @@ -20,6 +20,8 @@ support-files = [ ["test_commands.js"] +["test_commands_closetab.js"] + ["test_credentials.js"] ["test_device.js"] diff --git a/services/settings/Attachments.sys.mjs b/services/settings/Attachments.sys.mjs index 345bf1e8e6..7724afd5b8 100644 --- a/services/settings/Attachments.sys.mjs +++ b/services/settings/Attachments.sys.mjs @@ -10,6 +10,8 @@ ChromeUtils.defineESModuleGetters(lazy, { Utils: "resource://services-settings/Utils.sys.mjs", }); +ChromeUtils.defineLazyGetter(lazy, "console", () => lazy.Utils.log); + class DownloadError extends Error { constructor(url, resp) { super(`Could not download ${url}`); @@ -212,6 +214,15 @@ export class Downloader { ); } + if (!lazy.Utils.LOAD_DUMPS) { + if (fallbackToDump) { + lazy.console.warn( + "#fetchAttachment: Forcing fallbackToDump to false due to Utils.LOAD_DUMPS being false" + ); + } + fallbackToDump = false; + } + const dumpInfo = new LazyRecordAndBuffer(() => this._readAttachmentDump(attachmentId) ); diff --git a/services/settings/RemoteSettingsClient.sys.mjs b/services/settings/RemoteSettingsClient.sys.mjs index c521b72123..2153520adb 100644 --- a/services/settings/RemoteSettingsClient.sys.mjs +++ b/services/settings/RemoteSettingsClient.sys.mjs @@ -469,9 +469,10 @@ export class RemoteSettingsClient extends EventEmitter { } else { lazy.console.debug(`${this.identifier} Awaiting existing import.`); } - } else if (hasLocalData && loadDumpIfNewer) { + } else if (hasLocalData && loadDumpIfNewer && lazy.Utils.LOAD_DUMPS) { // Check whether the local data is older than the packaged dump. - // If it is, load the packaged dump (which overwrites the local data). + // If it is and we are on production, load the packaged dump (which + // overwrites the local data). let lastModifiedDump = await lazy.Utils.getLocalDumpLastModified( this.bucketName, this.collectionName diff --git a/services/settings/dumps/blocklists/addons-bloomfilters.json b/services/settings/dumps/blocklists/addons-bloomfilters.json index 489a3dc429..6cdec41139 100644 --- a/services/settings/dumps/blocklists/addons-bloomfilters.json +++ b/services/settings/dumps/blocklists/addons-bloomfilters.json @@ -3,6 +3,35 @@ { "stash": { "blocked": [ + "{a5eff3e0-fd15-4905-8799-a3c8a3a1eb56}:1.6", + "{437c932f-fa9f-459d-8dab-ea6219d23513}:1.7", + "{2aeb7e23-47e4-4a41-9125-f5a28e9b62cf}:1.4" + ], + "unblocked": [] + }, + "schema": 1717054814742, + "key_format": "{guid}:{version}", + "stash_time": 1717094105559, + "id": "410c26fe-3975-49d7-9926-511f891fcfcf", + "last_modified": 1717094162111 + }, + { + "stash": { + "blocked": [ + "superiorblock@ext:1.0", + "charityaff@testnet:2024.1" + ], + "unblocked": [] + }, + "schema": 1716455304678, + "key_format": "{guid}:{version}", + "stash_time": 1716834906988, + "id": "dcd0b6d7-76ec-4c93-a447-c90fb79a8376", + "last_modified": 1716834963648 + }, + { + "stash": { + "blocked": [ "hiddenads@addon:1.2", "hiddenads@addon:1.1" ], @@ -678,5 +707,5 @@ "last_modified": 1707395854769 } ], - "timestamp": 1715085361954 + "timestamp": 1717094162111 } diff --git a/services/settings/dumps/main/cookie-banner-rules-list.json b/services/settings/dumps/main/cookie-banner-rules-list.json index e1757708ea..440c43ec92 100644 --- a/services/settings/dumps/main/cookie-banner-rules-list.json +++ b/services/settings/dumps/main/cookie-banner-rules-list.json @@ -2,6 +2,96 @@ "data": [ { "click": { + "optIn": "custom-button#consentAccept", + "presence": "main[data-consent-main]" + }, + "schema": 1717015159283, + "domains": [ + "rp-online.de" + ], + "id": "06ae9b8e-909a-4f50-be66-0046a1a75ddf", + "last_modified": 1717071078321 + }, + { + "click": { + "optIn": ".cookie-consent-banner__btn-primary", + "presence": "#cookie-consent-banner" + }, + "schema": 1717015159283, + "cookies": { + "optOut": [ + { + "name": "__tnw_cookieConsent", + "value": "{%22ad_storage%22:%22denied%22%2C%22analytics_storage%22:%22denied%22}" + } + ] + }, + "domains": [ + "thenextweb.com" + ], + "id": "02c3c5e1-03a6-426a-b00b-fa34f62322fd", + "last_modified": 1717071078318 + }, + { + "click": { + "optIn": "div[data-tracking-opt-in-accept=\"true\"]", + "optOut": "div[data-tracking-opt-in-reject=\"true\"]", + "presence": "div[data-tracking-opt-in-overlay=\"true\"]" + }, + "schema": 1717015159283, + "domains": [ + "fandom.com" + ], + "id": "D168AF87-F481-4AD7-BE78-28A59F798406", + "last_modified": 1717071078313 + }, + { + "schema": 1717015159283, + "cookies": { + "optIn": [ + { + "name": "d_prefs", + "value": "MToxLGNvbnNlbnRfdmVyc2lvbjoyLHRleHRfdmVyc2lvbjoxMDAw" + } + ], + "optOut": [ + { + "name": "d_prefs", + "value": "MjoxLGNvbnNlbnRfdmVyc2lvbjoyLHRleHRfdmVyc2lvbjoxMDAw" + }, + { + "name": "twtr_pixel_opt_in", + "value": "N" + } + ] + }, + "domains": [ + "twitter.com", + "x.com" + ], + "id": "05b3b417-c4c7-4ed0-a3cf-43053e8b33ab", + "last_modified": 1717071078310 + }, + { + "click": { + "optIn": "[data-cookiebanner=\"accept_button\"]", + "optOut": "[data-cookiebanner=\"accept_only_essential_button\"]", + "presence": "[data-testid=\"cookie-policy-manage-dialog\"]" + }, + "schema": 1717015159283, + "domains": [ + "facebook.com", + "instagram.com", + "messenger.com", + "meta.com", + "oculus.com", + "workplace.com" + ], + "id": "d1d8ba36-ced7-4453-8b17-2e051e0ab1eb", + "last_modified": 1717071078306 + }, + { + "click": { "optIn": "#shopify-pc__banner__btn-accept", "optOut": "#shopify-pc__banner__btn-decline", "presence": "#shopify-pc__banner" @@ -1368,32 +1458,6 @@ "last_modified": 1714811639827 }, { - "schema": 1714780808679, - "cookies": { - "optIn": [ - { - "name": "d_prefs", - "value": "MToxLGNvbnNlbnRfdmVyc2lvbjoyLHRleHRfdmVyc2lvbjoxMDAw" - } - ], - "optOut": [ - { - "name": "d_prefs", - "value": "MjoxLGNvbnNlbnRfdmVyc2lvbjoyLHRleHRfdmVyc2lvbjoxMDAw" - }, - { - "name": "twtr_pixel_opt_in", - "value": "N" - } - ] - }, - "domains": [ - "twitter.com" - ], - "id": "05b3b417-c4c7-4ed0-a3cf-43053e8b33ab", - "last_modified": 1714811639824 - }, - { "click": { "optIn": "button#CybotCookiebotDialogBodyLevelButtonLevelOptinAllowAll", "optOut": "button#CybotCookiebotDialogBodyButtonDecline", @@ -2182,22 +2246,6 @@ }, { "click": { - "optIn": "[data-cookiebanner=\"accept_button\"]", - "optOut": "[data-cookiebanner=\"accept_only_essential_button\"]", - "presence": "[data-testid=\"cookie-policy-manage-dialog\"]" - }, - "schema": 1710174339269, - "domains": [ - "facebook.com", - "messenger.com", - "oculus.com", - "workplace.com" - ], - "id": "d1d8ba36-ced7-4453-8b17-2e051e0ab1eb", - "last_modified": 1710331175381 - }, - { - "click": { "optIn": "#cookiescript_accept", "optOut": "#cookiescript_reject", "presence": "#cookiescript_injected_wrapper" @@ -4212,18 +4260,6 @@ "last_modified": 1697710931635 }, { - "click": { - "optIn": "div[data-tracking-opt-in-accept=\"true\"]", - "presence": "div[data-tracking-opt-in-overlay=\"true\"]" - }, - "schema": 1697557061727, - "domains": [ - "fandom.com" - ], - "id": "D168AF87-F481-4AD7-BE78-28A59F798406", - "last_modified": 1697710931628 - }, - { "schema": 1697557061727, "cookies": { "optOut": [ @@ -10115,5 +10151,5 @@ "last_modified": 1670498155651 } ], - "timestamp": 1714811640008 + "timestamp": 1717071078321 } diff --git a/services/settings/dumps/main/devtools-compatibility-browsers.json b/services/settings/dumps/main/devtools-compatibility-browsers.json index 4f78ce96d0..0f422a8f37 100644 --- a/services/settings/dumps/main/devtools-compatibility-browsers.json +++ b/services/settings/dumps/main/devtools-compatibility-browsers.json @@ -1,6 +1,15 @@ { "data": [ { + "name": "Edge", + "schema": 1716941107417, + "status": "current", + "version": "125", + "browserid": "edge", + "id": "f1147d5f-d690-43d0-879d-117c6ca24a16", + "last_modified": 1716966331501 + }, + { "name": "Firefox", "schema": 1715731507143, "status": "planned", @@ -182,15 +191,6 @@ }, { "name": "Edge", - "schema": 1714638388703, - "status": "beta", - "version": "125", - "browserid": "edge", - "id": "f1147d5f-d690-43d0-879d-117c6ca24a16", - "last_modified": 1715003394958 - }, - { - "name": "Edge", "schema": 1714867506401, "status": "nightly", "version": "126", @@ -208,15 +208,6 @@ "last_modified": 1714638388565 }, { - "name": "Edge", - "schema": 1714521906580, - "status": "current", - "version": "124", - "browserid": "edge", - "id": "3837dc37-38b7-483b-82b3-c5593e7a4c91", - "last_modified": 1714638388557 - }, - { "name": "Opera", "schema": 1713917107636, "status": "current", @@ -289,5 +280,5 @@ "last_modified": 1665656484764 } ], - "timestamp": 1715839095932 + "timestamp": 1716966331504 } diff --git a/services/settings/dumps/main/search-config-v2.json b/services/settings/dumps/main/search-config-v2.json index ec2e2cd829..3848d688ae 100644 --- a/services/settings/dumps/main/search-config-v2.json +++ b/services/settings/dumps/main/search-config-v2.json @@ -2524,10 +2524,10 @@ } }, "id": "8111d157-e064-40fa-993d-e1d972534754", - "identifier": "mercadolibre", - "last_modified": 1702906502289, + "identifier": "mercadolibre-ar", + "last_modified": 1717416922456, "recordType": "engine", - "schema": 1702901827142, + "schema": 1717411267717, "variants": [ { "environment": { @@ -7219,10 +7219,10 @@ } }, "id": "7c9d2fc3-1e6b-40f6-80ad-080bd94fe24b", - "identifier": "wiktionary", - "last_modified": 1702906502251, + "identifier": "wiktionary-oc", + "last_modified": 1717416922458, "recordType": "engine", - "schema": 1702901835738, + "schema": 1717411323291, "variants": [ { "environment": { @@ -7420,5 +7420,5 @@ "schema": 1707824831520 } ], - "timestamp": 1715090108535 + "timestamp": 1717416922458 } diff --git a/services/settings/dumps/security-state/intermediates.json b/services/settings/dumps/security-state/intermediates.json index 35b0b0893d..49a76e2cec 100644 --- a/services/settings/dumps/security-state/intermediates.json +++ b/services/settings/dumps/security-state/intermediates.json @@ -1,6 +1,114 @@ { "data": [ { + "schema": 1717559643220, + "derHash": "zBufnkNw+2gUHSihFeqoY/jq23oE4r0js8YvnZ8XwmM=", + "subject": "CN=FIRMAPROFESIONAL ICA A01 QWAC 2022,O=Firmaprofesional SA,C=ES", + "subjectDN": "MHIxCzAJBgNVBAYTAkVTMRwwGgYDVQQKDBNGaXJtYXByb2Zlc2lvbmFsIFNBMRgwFgYDVQRhDA9WQVRFUy1BNjI2MzQwNjgxKzApBgNVBAMMIkZJUk1BUFJPRkVTSU9OQUwgSUNBIEEwMSBRV0FDIDIwMjI=", + "whitelist": false, + "attachment": { + "hash": "d736a3e7ddd478034b9ce53949685d8115e8a539876acfe978db47a7cda1e5c6", + "size": 1447, + "filename": "bVle95d2TFsMug2xpivw0fPUY7VsTluskvwQa1q1hms=.pem", + "location": "security-state-staging/intermediates/ef18664d-a3b4-4937-86fa-040d42090680.pem", + "mimetype": "application/x-pem-file" + }, + "pubKeyHash": "bVle95d2TFsMug2xpivw0fPUY7VsTluskvwQa1q1hms=", + "crlite_enrolled": false, + "id": "b693d627-7f28-4463-adb8-51bd4f3c2712", + "last_modified": 1717559823094 + }, + { + "schema": 1717559643616, + "derHash": "Iv1U+TOxf0WJQsNF465iXkBc5AsZGzFriHyj0CzKw7E=", + "subject": "CN=FIRMAPROFESIONAL ICA A02 NO QWAC 2022,O=Firmaprofesional SA,C=ES", + "subjectDN": "MHUxCzAJBgNVBAYTAkVTMRwwGgYDVQQKDBNGaXJtYXByb2Zlc2lvbmFsIFNBMRgwFgYDVQRhDA9WQVRFUy1BNjI2MzQwNjgxLjAsBgNVBAMMJUZJUk1BUFJPRkVTSU9OQUwgSUNBIEEwMiBOTyBRV0FDIDIwMjI=", + "whitelist": false, + "attachment": { + "hash": "c1332c5b2b17a40d7e1c836a6cff328b9adb70daa8683b785dd00bc661c36f89", + "size": 1451, + "filename": "d96AQ4m8-YeSRHUqkGB8vTJPwsoHOcOVl8BS6_lfdXM=.pem", + "location": "security-state-staging/intermediates/01df6fff-e459-4353-a8c7-56a5333a7ebe.pem", + "mimetype": "application/x-pem-file" + }, + "pubKeyHash": "d96AQ4m8+YeSRHUqkGB8vTJPwsoHOcOVl8BS6/lfdXM=", + "crlite_enrolled": false, + "id": "27e327c4-059e-49c9-8697-35cb673ccf87", + "last_modified": 1717559823092 + }, + { + "schema": 1717192455323, + "derHash": "158fDIFBgEs50EslktV6/r50+UYGVK/0kUkNu3xaLXQ=", + "subject": "CN=XinChaCha Trust EV TLS G2 R34 CA,O=Beijing Xinchacha Credit Management Co.\\, Ltd.,C=CN", + "subjectDN": "MHAxCzAJBgNVBAYTAkNOMTYwNAYDVQQKDC1CZWlqaW5nIFhpbmNoYWNoYSBDcmVkaXQgTWFuYWdlbWVudCBDby4sIEx0ZC4xKTAnBgNVBAMMIFhpbkNoYUNoYSBUcnVzdCBFViBUTFMgRzIgUjM0IENB", + "whitelist": false, + "attachment": { + "hash": "cf9549b1295f7f5940d1eecd85224fe71746a9062b3324375a1ba76e6d1076db", + "size": 2393, + "filename": "-NirAYepPYxIIKyEoNgiZNqhl8nplSzI0S3gILMyP48=.pem", + "location": "security-state-staging/intermediates/3304fe4e-f870-45ba-8df0-81b2a42b4266.pem", + "mimetype": "application/x-pem-file" + }, + "pubKeyHash": "+NirAYepPYxIIKyEoNgiZNqhl8nplSzI0S3gILMyP48=", + "crlite_enrolled": false, + "id": "6b1982a7-6538-4496-a5ea-475ad795dca4", + "last_modified": 1717192623098 + }, + { + "schema": 1717192454914, + "derHash": "tch6CyI52v4KUoXjQGJiaayl6Q9XSSw46QUMpdGLwho=", + "subject": "CN=XinChaCha Trust DV TLS G2 R34 CA,O=Beijing Xinchacha Credit Management Co.\\, Ltd.,C=CN", + "subjectDN": "MHAxCzAJBgNVBAYTAkNOMTYwNAYDVQQKDC1CZWlqaW5nIFhpbmNoYWNoYSBDcmVkaXQgTWFuYWdlbWVudCBDby4sIEx0ZC4xKTAnBgNVBAMMIFhpbkNoYUNoYSBUcnVzdCBEViBUTFMgRzIgUjM0IENB", + "whitelist": false, + "attachment": { + "hash": "46a17eb3357831af3e72dd3e752ae9d06b7ab5c7e2bde38c552efac33da3c3e7", + "size": 2393, + "filename": "ia3jA8mRz8AVBO57iULX9U3eGAXK9KzaSydFhjkVQp8=.pem", + "location": "security-state-staging/intermediates/d7185123-3b86-4d58-897d-92ce9c326347.pem", + "mimetype": "application/x-pem-file" + }, + "pubKeyHash": "ia3jA8mRz8AVBO57iULX9U3eGAXK9KzaSydFhjkVQp8=", + "crlite_enrolled": false, + "id": "0c74406b-6c55-4aa5-a137-e7f059e08b40", + "last_modified": 1717192623095 + }, + { + "schema": 1717192454523, + "derHash": "tdRtwCcTDlztO+UIPrNAKN2SMPTVo2rRkk0hwO+YTLo=", + "subject": "CN=XinChaCha Trust OV TLS G2 R34 CA,O=Beijing Xinchacha Credit Management Co.\\, Ltd.,C=CN", + "subjectDN": "MHAxCzAJBgNVBAYTAkNOMTYwNAYDVQQKDC1CZWlqaW5nIFhpbmNoYWNoYSBDcmVkaXQgTWFuYWdlbWVudCBDby4sIEx0ZC4xKTAnBgNVBAMMIFhpbkNoYUNoYSBUcnVzdCBPViBUTFMgRzIgUjM0IENB", + "whitelist": false, + "attachment": { + "hash": "f3b04dbb9ce45e317a43dd7e0bb1f2d617cf950b6c4f4dcd9041ba8c29903ca9", + "size": 2393, + "filename": "BTYzRPrLbl9bgC2AezB8r8c6OQbO2yNxAWm2wTdHcPM=.pem", + "location": "security-state-staging/intermediates/5b4a4976-5c36-4f3c-ad2c-c646050b9449.pem", + "mimetype": "application/x-pem-file" + }, + "pubKeyHash": "BTYzRPrLbl9bgC2AezB8r8c6OQbO2yNxAWm2wTdHcPM=", + "crlite_enrolled": false, + "id": "6ab4c9cc-76f0-46fa-a224-955f948b8672", + "last_modified": 1717192623093 + }, + { + "schema": 1717192453960, + "derHash": "UJq7uShkwsRNfLxGa2OVDjUBZe53KjA3roFo6SImpG8=", + "subject": "CN=Shoper DV TLS G2 R34 CA,O=Shoper S.A.,C=PL", + "subjectDN": "MEUxCzAJBgNVBAYTAlBMMRQwEgYDVQQKDAtTaG9wZXIgUy5BLjEgMB4GA1UEAwwXU2hvcGVyIERWIFRMUyBHMiBSMzQgQ0E=", + "whitelist": false, + "attachment": { + "hash": "89370aee0f5144d748f03671a40cd353d07ebc3e624152b697c93eac8414ae0c", + "size": 2337, + "filename": "-g6X50iw1KjEyqdAsOvTe7IZ3ChaEo_P0UMuuNWHp4s=.pem", + "location": "security-state-staging/intermediates/90e3e772-6b7e-49ca-ad56-1ad21d0e3c6b.pem", + "mimetype": "application/x-pem-file" + }, + "pubKeyHash": "+g6X50iw1KjEyqdAsOvTe7IZ3ChaEo/P0UMuuNWHp4s=", + "crlite_enrolled": false, + "id": "9de0322b-3852-44e0-a52c-20c5398fb224", + "last_modified": 1717192623090 + }, + { "schema": 1714791237384, "derHash": "5b/O2dIW66faNjSBn7U0+5zroez55jee2DWD0usXfBs=", "subject": "CN=CrowdStrike TLS RSA CA G5,O=CrowdStrike\\, Inc.,C=US", @@ -30907,5 +31015,5 @@ "last_modified": 1559865884636 } ], - "timestamp": 1715637423088 + "timestamp": 1717559823094 } diff --git a/services/settings/test/unit/head_settings.js b/services/settings/test/unit/head_settings.js new file mode 100644 index 0000000000..96573a9fcb --- /dev/null +++ b/services/settings/test/unit/head_settings.js @@ -0,0 +1,25 @@ +/* Any copyright is dedicated to the Public Domain. + http://creativecommons.org/publicdomain/zero/1.0/ */ + +/* import-globals-from ../../../common/tests/unit/head_global.js */ +/* import-globals-from ../../../common/tests/unit/head_helpers.js */ + +"use strict"; + +ChromeUtils.defineESModuleGetters(this, { + AppConstants: "resource://gre/modules/AppConstants.sys.mjs", + Database: "resource://services-settings/Database.sys.mjs", + Policy: "resource://services-common/uptake-telemetry.sys.mjs", + RemoteSettings: "resource://services-settings/remote-settings.sys.mjs", + RemoteSettingsClient: + "resource://services-settings/RemoteSettingsClient.sys.mjs", + RemoteSettingsWorker: + "resource://services-settings/RemoteSettingsWorker.sys.mjs", + setTimeout: "resource://gre/modules/Timer.sys.mjs", + SharedUtils: "resource://services-settings/SharedUtils.sys.mjs", + SyncHistory: "resource://services-settings/SyncHistory.sys.mjs", + TelemetryTestUtils: "resource://testing-common/TelemetryTestUtils.sys.mjs", + TestUtils: "resource://testing-common/TestUtils.sys.mjs", + UptakeTelemetry: "resource://services-common/uptake-telemetry.sys.mjs", + Utils: "resource://services-settings/Utils.sys.mjs", +}); diff --git a/services/settings/test/unit/test_attachments_downloader.js b/services/settings/test/unit/test_attachments_downloader.js index 284294cfde..2f7db36d8b 100644 --- a/services/settings/test/unit/test_attachments_downloader.js +++ b/services/settings/test/unit/test_attachments_downloader.js @@ -1,17 +1,6 @@ -/* import-globals-from ../../../common/tests/unit/head_helpers.js */ - -const { RemoteSettings } = ChromeUtils.importESModule( - "resource://services-settings/remote-settings.sys.mjs" -); -const { UptakeTelemetry } = ChromeUtils.importESModule( - "resource://services-common/uptake-telemetry.sys.mjs" -); const { Downloader } = ChromeUtils.importESModule( "resource://services-settings/Attachments.sys.mjs" ); -const { TelemetryTestUtils } = ChromeUtils.importESModule( - "resource://testing-common/TelemetryTestUtils.sys.mjs" -); const RECORD = { id: "1f3a0802-648d-11ea-bd79-876a8b69c377", @@ -56,6 +45,11 @@ add_setup(() => { "/cdn/main-workspace/some-collection/", do_get_file("test_attachments_downloader") ); + + // For this test, we are using a server other than production. Force + // LOAD_DUMPS to true so that we can still load attachments from dumps. + delete Utils.LOAD_DUMPS; + Utils.LOAD_DUMPS = true; }); async function clear_state() { @@ -619,6 +613,80 @@ add_task(async function test_download_from_dump() { // but added for consistency with other tests tasks around here. add_task(clear_state); +add_task( + async function test_download_from_dump_fails_when_load_dumps_is_false() { + const client = RemoteSettings("dump-collection", { + bucketName: "dump-bucket", + }); + + // Temporarily replace the resource:-URL with another resource:-URL. + const orig_RESOURCE_BASE_URL = Downloader._RESOURCE_BASE_URL; + Downloader._RESOURCE_BASE_URL = "resource://rs-downloader-test"; + const resProto = Services.io + .getProtocolHandler("resource") + .QueryInterface(Ci.nsIResProtocolHandler); + resProto.setSubstitution( + "rs-downloader-test", + Services.io.newFileURI(do_get_file("test_attachments_downloader")) + ); + + function checkInfo( + result, + expectedSource, + expectedRecord = RECORD_OF_DUMP + ) { + Assert.equal( + new TextDecoder().decode(new Uint8Array(result.buffer)), + "This would be a RS dump.\n", + "expected content from dump" + ); + Assert.deepEqual( + result.record, + expectedRecord, + "expected record for dump" + ); + Assert.equal(result._source, expectedSource, "expected source of dump"); + } + + // Download the dump so that we can use it to fill the cache. + const dump1 = await client.attachments.download(RECORD_OF_DUMP, { + // Note: attachmentId not set, so should fall back to record.id. + fallbackToDump: true, + }); + checkInfo(dump1, "dump_match"); + + // Fill the cache with the same data as the dump for the next part. + await client.db.saveAttachment(RECORD_OF_DUMP.id, { + record: RECORD_OF_DUMP, + blob: new Blob([dump1.buffer]), + }); + + // Now turn off loading dumps, and check we no longer load from the dump, + // but use the cache instead. + Utils.LOAD_DUMPS = false; + + const dump2 = await client.attachments.download(RECORD_OF_DUMP, { + // Note: attachmentId not set, so should fall back to record.id. + fallbackToDump: true, + }); + checkInfo(dump2, "cache_match"); + + // When the record is not given, the dump would take precedence over the + // cache but we have disabled dumps, so we should load from the cache. + const dump4 = await client.attachments.download(null, { + attachmentId: RECORD_OF_DUMP.id, + fallbackToCache: true, + fallbackToDump: true, + }); + checkInfo(dump4, "cache_fallback"); + + // Restore, just in case. + Utils.LOAD_DUMPS = true; + Downloader._RESOURCE_BASE_URL = orig_RESOURCE_BASE_URL; + resProto.setSubstitution("rs-downloader-test", null); + } +); + add_task(async function test_attachment_get() { // Since get() is largely a wrapper around the same code as download(), // we only test a couple of parts to check it functions as expected, and diff --git a/services/settings/test/unit/test_remote_settings.js b/services/settings/test/unit/test_remote_settings.js index 382d1aa983..07007a755f 100644 --- a/services/settings/test/unit/test_remote_settings.js +++ b/services/settings/test/unit/test_remote_settings.js @@ -1,27 +1,8 @@ /* import-globals-from ../../../common/tests/unit/head_helpers.js */ -const { AppConstants } = ChromeUtils.importESModule( - "resource://gre/modules/AppConstants.sys.mjs" -); const { ObjectUtils } = ChromeUtils.importESModule( "resource://gre/modules/ObjectUtils.sys.mjs" ); -const { setTimeout } = ChromeUtils.importESModule( - "resource://gre/modules/Timer.sys.mjs" -); - -const { RemoteSettings } = ChromeUtils.importESModule( - "resource://services-settings/remote-settings.sys.mjs" -); -const { Utils } = ChromeUtils.importESModule( - "resource://services-settings/Utils.sys.mjs" -); -const { UptakeTelemetry, Policy } = ChromeUtils.importESModule( - "resource://services-common/uptake-telemetry.sys.mjs" -); -const { TelemetryTestUtils } = ChromeUtils.importESModule( - "resource://testing-common/TelemetryTestUtils.sys.mjs" -); const IS_ANDROID = AppConstants.platform == "android"; @@ -54,7 +35,7 @@ async function clear_state() { TelemetryTestUtils.assertEvents([], {}, { process: "dummy" }); } -function run_test() { +add_task(() => { // Set up an HTTP Server server = new HttpServer(); server.start(-1); @@ -93,13 +74,11 @@ function run_test() { ); server.registerPathHandler("/fake-x5u", handleResponse); - run_next_test(); - registerCleanupFunction(() => { Policy.getChannel = oldGetChannel; server.stop(() => {}); }); -} +}); add_task(clear_state); add_task(async function test_records_obtained_from_server_are_stored_in_db() { diff --git a/services/settings/test/unit/test_remote_settings_dump_lastmodified.js b/services/settings/test/unit/test_remote_settings_dump_lastmodified.js index 25de34c1be..6dd925281a 100644 --- a/services/settings/test/unit/test_remote_settings_dump_lastmodified.js +++ b/services/settings/test/unit/test_remote_settings_dump_lastmodified.js @@ -1,9 +1,5 @@ "use strict"; -const { Utils } = ChromeUtils.importESModule( - "resource://services-settings/Utils.sys.mjs" -); - async function getLocalDumpLastModified(bucket, collection) { let res; try { diff --git a/services/settings/test/unit/test_remote_settings_jexl_filters.js b/services/settings/test/unit/test_remote_settings_jexl_filters.js index 56d35bdd2b..b7496332dc 100644 --- a/services/settings/test/unit/test_remote_settings_jexl_filters.js +++ b/services/settings/test/unit/test_remote_settings_jexl_filters.js @@ -1,7 +1,3 @@ -const { RemoteSettings } = ChromeUtils.importESModule( - "resource://services-settings/remote-settings.sys.mjs" -); - let client; async function createRecords(records) { @@ -18,11 +14,9 @@ async function createRecords(records) { ); } -function run_test() { +add_setup(() => { client = RemoteSettings("some-key"); - - run_next_test(); -} +}); add_task(async function test_returns_all_without_target() { await createRecords([ diff --git a/services/settings/test/unit/test_remote_settings_offline.js b/services/settings/test/unit/test_remote_settings_offline.js index 0a250c3e0a..cb194677e1 100644 --- a/services/settings/test/unit/test_remote_settings_offline.js +++ b/services/settings/test/unit/test_remote_settings_offline.js @@ -1,13 +1,3 @@ -const { RemoteSettingsClient } = ChromeUtils.importESModule( - "resource://services-settings/RemoteSettingsClient.sys.mjs" -); -const { RemoteSettingsWorker } = ChromeUtils.importESModule( - "resource://services-settings/RemoteSettingsWorker.sys.mjs" -); -const { SharedUtils } = ChromeUtils.importESModule( - "resource://services-settings/SharedUtils.sys.mjs" -); - // A collection with a dump that's packaged on all builds where this test runs, // including on Android at mobile/android/installer/package-manifest.in const TEST_BUCKET = "main"; @@ -17,7 +7,7 @@ let client; let DUMP_RECORDS; let DUMP_LAST_MODIFIED; -add_task(async function setup() { +add_setup(async () => { // "services.settings.server" pref is not set. // Test defaults to an unreachable server, // and will only load from the dump if any. diff --git a/services/settings/test/unit/test_remote_settings_poll.js b/services/settings/test/unit/test_remote_settings_poll.js index c8025f4b7b..7146429427 100644 --- a/services/settings/test/unit/test_remote_settings_poll.js +++ b/services/settings/test/unit/test_remote_settings_poll.js @@ -1,34 +1,11 @@ -/* import-globals-from ../../../common/tests/unit/head_helpers.js */ - -const { AppConstants } = ChromeUtils.importESModule( - "resource://gre/modules/AppConstants.sys.mjs" -); -const { setTimeout } = ChromeUtils.importESModule( - "resource://gre/modules/Timer.sys.mjs" -); - -const { UptakeTelemetry, Policy } = ChromeUtils.importESModule( - "resource://services-common/uptake-telemetry.sys.mjs" -); -const { RemoteSettingsClient } = ChromeUtils.importESModule( - "resource://services-settings/RemoteSettingsClient.sys.mjs" -); const { pushBroadcastService } = ChromeUtils.importESModule( "resource://gre/modules/PushBroadcastService.sys.mjs" ); -const { SyncHistory } = ChromeUtils.importESModule( - "resource://services-settings/SyncHistory.sys.mjs" -); -const { RemoteSettings, remoteSettingsBroadcastHandler, BROADCAST_ID } = + +const { remoteSettingsBroadcastHandler, BROADCAST_ID } = ChromeUtils.importESModule( "resource://services-settings/remote-settings.sys.mjs" ); -const { Utils } = ChromeUtils.importESModule( - "resource://services-settings/Utils.sys.mjs" -); -const { TelemetryTestUtils } = ChromeUtils.importESModule( - "resource://testing-common/TelemetryTestUtils.sys.mjs" -); const IS_ANDROID = AppConstants.platform == "android"; @@ -80,7 +57,7 @@ function serveChangesEntries(serverTime, entriesOrFunc) { }; } -function run_test() { +add_setup(() => { // Set up an HTTP Server server = new HttpServer(); server.start(-1); @@ -89,13 +66,11 @@ function run_test() { let oldGetChannel = Policy.getChannel; Policy.getChannel = () => "nightly"; - run_next_test(); - registerCleanupFunction(() => { Policy.getChannel = oldGetChannel; server.stop(() => {}); }); -} +}); add_task(clear_state); diff --git a/services/settings/test/unit/test_remote_settings_recover_broken.js b/services/settings/test/unit/test_remote_settings_recover_broken.js index c5f82d6949..5bb047c8af 100644 --- a/services/settings/test/unit/test_remote_settings_recover_broken.js +++ b/services/settings/test/unit/test_remote_settings_recover_broken.js @@ -1,18 +1,3 @@ -/* import-globals-from ../../../common/tests/unit/head_helpers.js */ - -const { SyncHistory } = ChromeUtils.importESModule( - "resource://services-settings/SyncHistory.sys.mjs" -); -const { RemoteSettingsClient } = ChromeUtils.importESModule( - "resource://services-settings/RemoteSettingsClient.sys.mjs" -); -const { RemoteSettings } = ChromeUtils.importESModule( - "resource://services-settings/remote-settings.sys.mjs" -); -const { Utils } = ChromeUtils.importESModule( - "resource://services-settings/Utils.sys.mjs" -); - const PREF_SETTINGS_SERVER = "services.settings.server"; const CHANGES_PATH = "/v1" + Utils.CHANGES_PATH; const BROKEN_SYNC_THRESHOLD = 10; // See default pref value diff --git a/services/settings/test/unit/test_remote_settings_release_prefs.js b/services/settings/test/unit/test_remote_settings_release_prefs.js index 251c407631..5d1a9f2e28 100644 --- a/services/settings/test/unit/test_remote_settings_release_prefs.js +++ b/services/settings/test/unit/test_remote_settings_release_prefs.js @@ -1,9 +1,5 @@ "use strict"; -const { AppConstants } = ChromeUtils.importESModule( - "resource://gre/modules/AppConstants.sys.mjs" -); - var nextUniqId = 0; function getNewUtils() { const { Utils } = ChromeUtils.importESModule( @@ -12,7 +8,21 @@ function getNewUtils() { return Utils; } -function clear_state() { +// A collection with a dump that's packaged on all builds where this test runs, +// including on Android at mobile/android/installer/package-manifest.in +const TEST_BUCKET = "main"; +const TEST_COLLECTION = "password-recipes"; + +async function importData(records) { + await RemoteSettingsWorker._execute("_test_only_import", [ + TEST_BUCKET, + TEST_COLLECTION, + records, + records[0]?.last_modified || 0, + ]); +} + +async function clear_state() { Services.env.set("MOZ_REMOTE_SETTINGS_DEVTOOLS", "0"); Services.prefs.clearUserPref("services.settings.server"); Services.prefs.clearUserPref("services.settings.preview_enabled"); @@ -186,6 +196,42 @@ add_task( const Utils = getNewUtils(); Assert.ok(!Utils.LOAD_DUMPS, "Dumps won't be loaded"); + + // The section below ensures that the LOAD_DUMPS flag properly takes effect. + // The client is set up here rather than add_setup to avoid triggering the + // lazy getters that are behind the global Utils.LOAD_DUMPS. If they are + // triggered too early, then they will potentially cache different values + // for the server urls and environment variables and this test then won't be + // testing what we expect it to. + + let client = new RemoteSettingsClient(TEST_COLLECTION); + + const dump = await SharedUtils.loadJSONDump(TEST_BUCKET, TEST_COLLECTION); + let DUMP_LAST_MODIFIED = dump.timestamp; + + // Dump is updated regularly, verify that the dump matches our expectations + // before running the test. + Assert.greater( + DUMP_LAST_MODIFIED, + 1234, + "Assuming dump to be newer than dummy 1234" + ); + + await client.db.clear(); + await importData([{ last_modified: 1234, id: "dummy" }]); + + const after = await client.get(); + Assert.deepEqual( + after, + [{ last_modified: 1234, id: "dummy" }], + "Should have kept the original import" + ); + Assert.equal( + await client.getLastModified(), + 1234, + "Should have kept the import's timestamp" + ); + await client.db.clear(); } ); add_task(clear_state); diff --git a/services/settings/test/unit/test_remote_settings_signatures.js b/services/settings/test/unit/test_remote_settings_signatures.js index a730ba185e..6d3b01468e 100644 --- a/services/settings/test/unit/test_remote_settings_signatures.js +++ b/services/settings/test/unit/test_remote_settings_signatures.js @@ -1,19 +1,6 @@ /* import-globals-from ../../../common/tests/unit/head_helpers.js */ "use strict"; -const { RemoteSettings } = ChromeUtils.importESModule( - "resource://services-settings/remote-settings.sys.mjs" -); -const { RemoteSettingsClient } = ChromeUtils.importESModule( - "resource://services-settings/RemoteSettingsClient.sys.mjs" -); -const { UptakeTelemetry, Policy } = ChromeUtils.importESModule( - "resource://services-common/uptake-telemetry.sys.mjs" -); -const { TelemetryTestUtils } = ChromeUtils.importESModule( - "resource://testing-common/TelemetryTestUtils.sys.mjs" -); - const PREF_SETTINGS_SERVER = "services.settings.server"; const SIGNER_NAME = "onecrl.content-signature.mozilla.org"; const TELEMETRY_COMPONENT = "remotesettings"; @@ -42,7 +29,7 @@ function getCertChain() { let server; let client; -function run_test() { +add_setup(() => { // Signature verification is enabled by default. We use a custom signer // because these tests were originally written for OneCRL. client = RemoteSettings("signed", { signerName: SIGNER_NAME }); @@ -57,13 +44,11 @@ function run_test() { let oldGetChannel = Policy.getChannel; Policy.getChannel = () => "nightly"; - run_next_test(); - registerCleanupFunction(() => { Policy.getChannel = oldGetChannel; server.stop(() => {}); }); -} +}); add_task(async function test_check_signatures() { // First, perform a signature verification with known data and signature diff --git a/services/settings/test/unit/test_remote_settings_sync_history.js b/services/settings/test/unit/test_remote_settings_sync_history.js index 1fdc3d1adc..cc1c5c2dab 100644 --- a/services/settings/test/unit/test_remote_settings_sync_history.js +++ b/services/settings/test/unit/test_remote_settings_sync_history.js @@ -1,9 +1,5 @@ "use strict"; -const { SyncHistory } = ChromeUtils.importESModule( - "resource://services-settings/SyncHistory.sys.mjs" -); - async function clear_state() { await new SyncHistory("").clear(); } diff --git a/services/settings/test/unit/test_remote_settings_utils.js b/services/settings/test/unit/test_remote_settings_utils.js index de372b3e44..e2b4ddf8e3 100644 --- a/services/settings/test/unit/test_remote_settings_utils.js +++ b/services/settings/test/unit/test_remote_settings_utils.js @@ -1,12 +1,3 @@ -/* import-globals-from ../../../common/tests/unit/head_helpers.js */ - -const { TestUtils } = ChromeUtils.importESModule( - "resource://testing-common/TestUtils.sys.mjs" -); -const { Utils } = ChromeUtils.importESModule( - "resource://services-settings/Utils.sys.mjs" -); - const BinaryOutputStream = Components.Constructor( "@mozilla.org/binaryoutputstream;1", "nsIBinaryOutputStream", diff --git a/services/settings/test/unit/test_remote_settings_utils_telemetry.js b/services/settings/test/unit/test_remote_settings_utils_telemetry.js index f12b0d7f9a..87b72c5f4d 100644 --- a/services/settings/test/unit/test_remote_settings_utils_telemetry.js +++ b/services/settings/test/unit/test_remote_settings_utils_telemetry.js @@ -3,17 +3,9 @@ "use strict"; -/* import-globals-from ../../../common/tests/unit/head_helpers.js */ - const { TelemetryController } = ChromeUtils.importESModule( "resource://gre/modules/TelemetryController.sys.mjs" ); -const { TelemetryTestUtils } = ChromeUtils.importESModule( - "resource://testing-common/TelemetryTestUtils.sys.mjs" -); -const { Utils } = ChromeUtils.importESModule( - "resource://services-settings/Utils.sys.mjs" -); const server = new HttpServer(); server.start(-1); @@ -36,7 +28,7 @@ async function assertTelemetryEvents(expectedEvents) { }); } -add_task(async function setup() { +add_setup(async () => { await TelemetryController.testSetup(); }); diff --git a/services/settings/test/unit/test_remote_settings_worker.js b/services/settings/test/unit/test_remote_settings_worker.js index e2dcdb0063..083beb1a73 100644 --- a/services/settings/test/unit/test_remote_settings_worker.js +++ b/services/settings/test/unit/test_remote_settings_worker.js @@ -1,22 +1,3 @@ -/* import-globals-from ../../../common/tests/unit/head_helpers.js */ - -const { AppConstants } = ChromeUtils.importESModule( - "resource://gre/modules/AppConstants.sys.mjs" -); -const { TestUtils } = ChromeUtils.importESModule( - "resource://testing-common/TestUtils.sys.mjs" -); - -const { RemoteSettingsWorker } = ChromeUtils.importESModule( - "resource://services-settings/RemoteSettingsWorker.sys.mjs" -); -const { RemoteSettingsClient } = ChromeUtils.importESModule( - "resource://services-settings/RemoteSettingsClient.sys.mjs" -); -const { Database } = ChromeUtils.importESModule( - "resource://services-settings/Database.sys.mjs" -); - const IS_ANDROID = AppConstants.platform == "android"; add_task(async function test_canonicaljson() { diff --git a/services/settings/test/unit/test_shutdown_handling.js b/services/settings/test/unit/test_shutdown_handling.js index 2c98f0ab9b..418b25a62d 100644 --- a/services/settings/test/unit/test_shutdown_handling.js +++ b/services/settings/test/unit/test_shutdown_handling.js @@ -3,20 +3,6 @@ http://creativecommons.org/publicdomain/zero/1.0/ */ "use strict"; -const { TestUtils } = ChromeUtils.importESModule( - "resource://testing-common/TestUtils.sys.mjs" -); - -const { Database } = ChromeUtils.importESModule( - "resource://services-settings/Database.sys.mjs" -); -const { RemoteSettingsWorker } = ChromeUtils.importESModule( - "resource://services-settings/RemoteSettingsWorker.sys.mjs" -); -const { RemoteSettingsClient } = ChromeUtils.importESModule( - "resource://services-settings/RemoteSettingsClient.sys.mjs" -); - add_task(async function test_shutdown_abort_after_start() { // Start a forever transaction: let counter = 0; diff --git a/services/settings/test/unit/xpcshell.toml b/services/settings/test/unit/xpcshell.toml index b305ea39f3..728767e604 100644 --- a/services/settings/test/unit/xpcshell.toml +++ b/services/settings/test/unit/xpcshell.toml @@ -1,5 +1,5 @@ [DEFAULT] -head = "../../../common/tests/unit/head_global.js ../../../common/tests/unit/head_helpers.js" +head = "../../../common/tests/unit/head_global.js ../../../common/tests/unit/head_helpers.js head_settings.js" firefox-appdir = "browser" tags = "remote-settings" support-files = ["test_remote_settings_signatures/**"] diff --git a/services/sync/modules/SyncedTabs.sys.mjs b/services/sync/modules/SyncedTabs.sys.mjs index 410244413e..058525995b 100644 --- a/services/sync/modules/SyncedTabs.sys.mjs +++ b/services/sync/modules/SyncedTabs.sys.mjs @@ -2,6 +2,8 @@ * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ +import { XPCOMUtils } from "resource://gre/modules/XPCOMUtils.sys.mjs"; + const lazy = {}; ChromeUtils.defineESModuleGetters(lazy, { @@ -40,6 +42,21 @@ ChromeUtils.defineLazyGetter(lazy, "log", () => { return log; }); +// We allow some test preferences to simulate many and inactive tabs. +XPCOMUtils.defineLazyPreferenceGetter( + lazy, + "NUM_FAKE_INACTIVE_TABS", + "services.sync.syncedTabs.numFakeInactiveTabs", + 0 +); + +XPCOMUtils.defineLazyPreferenceGetter( + lazy, + "NUM_FAKE_ACTIVE_TABS", + "services.sync.syncedTabs.numFakeActiveTabs", + 0 +); + // A private singleton that does the work. let SyncedTabsInternal = { /* Make a "tab" record. Returns a promise */ @@ -143,7 +160,27 @@ let SyncedTabsInternal = { let clientRepr = await this._makeClient(client); lazy.log.debug("Processing client", clientRepr); - for (let tab of client.tabs) { + let tabs = Array.from(client.tabs); // avoid modifying in-place. + // For QA, UX, etc, we allow "fake tabs" to be added to each device. + for (let i = 0; i < lazy.NUM_FAKE_INACTIVE_TABS; i++) { + tabs.push({ + icon: null, + lastUsed: 1000, + title: `Fake inactive tab ${i}`, + urlHistory: [`https://example.com/inactive/${i}`], + inactive: true, + }); + } + for (let i = 0; i < lazy.NUM_FAKE_ACTIVE_TABS; i++) { + tabs.push({ + icon: null, + lastUsed: Date.now() - 1000 + i, + title: `Fake tab ${i}`, + urlHistory: [`https://example.com/${i}`], + }); + } + + for (let tab of tabs) { let url = tab.urlHistory[0]; lazy.log.trace("remote tab", url); diff --git a/services/sync/modules/constants.sys.mjs b/services/sync/modules/constants.sys.mjs index 9efe941c0a..ac7960f468 100644 --- a/services/sync/modules/constants.sys.mjs +++ b/services/sync/modules/constants.sys.mjs @@ -4,7 +4,7 @@ // Don't manually modify this line, as it is automatically replaced on merge day // by the gecko_migration.py script. -export const WEAVE_VERSION = "1.128.0"; +export const WEAVE_VERSION = "1.129.0"; // Sync Server API version that the client supports. export const SYNC_API_VERSION = "1.5"; diff --git a/services/sync/tests/unit/test_bookmark_tracker.js b/services/sync/tests/unit/test_bookmark_tracker.js index 6084e48ebd..c47b573e7e 100644 --- a/services/sync/tests/unit/test_bookmark_tracker.js +++ b/services/sync/tests/unit/test_bookmark_tracker.js @@ -764,26 +764,7 @@ add_task(async function test_onFaviconChanged() { let iconURL = "data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAAEAAAABCAA" + "AAAA6fptVAAAACklEQVQI12NgAAAAAgAB4iG8MwAAAABJRU5ErkJggg=="; - - PlacesUtils.favicons.replaceFaviconDataFromDataURL( - iconURI, - iconURL, - 0, - Services.scriptSecurityManager.getSystemPrincipal() - ); - - await new Promise(resolve => { - PlacesUtils.favicons.setAndFetchFaviconForPage( - pageURI, - iconURI, - true, - PlacesUtils.favicons.FAVICON_LOAD_NON_PRIVATE, - () => { - resolve(); - }, - Services.scriptSecurityManager.getSystemPrincipal() - ); - }); + await PlacesTestUtils.setFaviconForPage(pageURI, iconURI, iconURL); await verifyTrackedItems([]); Assert.equal(tracker.score, 0); } finally { |