diff options
Diffstat (limited to 'toolkit/modules')
-rw-r--r-- | toolkit/modules/AppConstants.sys.mjs | 7 | ||||
-rw-r--r-- | toolkit/modules/AsyncPrefs.sys.mjs | 3 | ||||
-rw-r--r-- | toolkit/modules/GMPInstallManager.sys.mjs | 98 | ||||
-rw-r--r-- | toolkit/modules/IndexedDB.sys.mjs | 8 | ||||
-rw-r--r-- | toolkit/modules/LightweightThemeConsumer.sys.mjs | 5 | ||||
-rw-r--r-- | toolkit/modules/LogManager.sys.mjs | 475 | ||||
-rw-r--r-- | toolkit/modules/ProfileAge.sys.mjs | 21 | ||||
-rw-r--r-- | toolkit/modules/RemotePageAccessManager.sys.mjs | 1 | ||||
-rw-r--r-- | toolkit/modules/SelectionUtils.sys.mjs | 2 | ||||
-rw-r--r-- | toolkit/modules/Sqlite.sys.mjs | 41 | ||||
-rw-r--r-- | toolkit/modules/moz.build | 2 | ||||
-rw-r--r-- | toolkit/modules/sessionstore/SessionStoreHelper.sys.mjs | 110 | ||||
-rw-r--r-- | toolkit/modules/tests/browser/browser_BrowserUtils.js | 33 | ||||
-rw-r--r-- | toolkit/modules/tests/browser/file_getSelectionDetails_inputs.html | 7 | ||||
-rw-r--r-- | toolkit/modules/tests/xpcshell/test_LogManager.js | 377 | ||||
-rw-r--r-- | toolkit/modules/tests/xpcshell/test_MatchURLFilters.js | 4 | ||||
-rw-r--r-- | toolkit/modules/tests/xpcshell/test_ProfileAge.js | 31 | ||||
-rw-r--r-- | toolkit/modules/tests/xpcshell/xpcshell.toml | 2 |
18 files changed, 1162 insertions, 65 deletions
diff --git a/toolkit/modules/AppConstants.sys.mjs b/toolkit/modules/AppConstants.sys.mjs index bfc87fa622..f56813ee3e 100644 --- a/toolkit/modules/AppConstants.sys.mjs +++ b/toolkit/modules/AppConstants.sys.mjs @@ -474,6 +474,13 @@ export var AppConstants = Object.freeze({ false, #endif + MOZ_SELECTABLE_PROFILES: +#ifdef MOZ_SELECTABLE_PROFILES + true, +#else + false, +#endif + // Returns true for CN region build when distibution id set as 'MozillaOnline' isChinaRepack() { return ( diff --git a/toolkit/modules/AsyncPrefs.sys.mjs b/toolkit/modules/AsyncPrefs.sys.mjs index 07f08c119e..00cf1e7fd5 100644 --- a/toolkit/modules/AsyncPrefs.sys.mjs +++ b/toolkit/modules/AsyncPrefs.sys.mjs @@ -31,6 +31,9 @@ const kAllowedPrefs = new Set([ "reader.color_scheme", "reader.content_width", "reader.line_height", + "reader.text_alignment", + "reader.character_spacing", + "reader.word_spacing", "reader.custom_colors.foreground", "reader.custom_colors.background", "reader.custom_colors.unvisited-links", diff --git a/toolkit/modules/GMPInstallManager.sys.mjs b/toolkit/modules/GMPInstallManager.sys.mjs index c187215096..32355418a1 100644 --- a/toolkit/modules/GMPInstallManager.sys.mjs +++ b/toolkit/modules/GMPInstallManager.sys.mjs @@ -832,48 +832,62 @@ GMPDownloader.prototype = { gmpAddon.version, ]); let installPromise = gmpInstaller.install(); - return installPromise.then( - extractedPaths => { - // Success, set the prefs - let now = Math.round(Date.now() / 1000); - GMPPrefs.setInt(GMPPrefs.KEY_PLUGIN_LAST_UPDATE, now, gmpAddon.id); - // Remember our ABI, so that if the profile is migrated to another - // platform or from 32 -> 64 bit, we notice and don't try to load the - // unexecutable plugin library. - let abi = GMPUtils._expectedABI(gmpAddon); - log.info("Setting ABI to '" + abi + "' for " + gmpAddon.id); - GMPPrefs.setString(GMPPrefs.KEY_PLUGIN_ABI, abi, gmpAddon.id); - // We use the combination of the hash and version to ensure we are - // up to date. - GMPPrefs.setString( - GMPPrefs.KEY_PLUGIN_HASHVALUE, - gmpAddon.hashValue, - gmpAddon.id - ); - // Setting the version pref signals installation completion to consumers, - // if you need to set other prefs etc. do it before this. - GMPPrefs.setString( - GMPPrefs.KEY_PLUGIN_VERSION, - gmpAddon.version, - gmpAddon.id - ); - return extractedPaths; - }, - reason => { - GMPPrefs.setString( - GMPPrefs.KEY_PLUGIN_LAST_INSTALL_FAIL_REASON, - reason, - gmpAddon.id - ); - let now = Math.round(Date.now() / 1000); - GMPPrefs.setInt( - GMPPrefs.KEY_PLUGIN_LAST_INSTALL_FAILED, - now, - gmpAddon.id - ); - throw reason; - } - ); + return installPromise + .then( + extractedPaths => { + // Success, set the prefs + let now = Math.round(Date.now() / 1000); + GMPPrefs.setInt( + GMPPrefs.KEY_PLUGIN_LAST_UPDATE, + now, + gmpAddon.id + ); + // Remember our ABI, so that if the profile is migrated to another + // platform or from 32 -> 64 bit, we notice and don't try to load the + // unexecutable plugin library. + let abi = GMPUtils._expectedABI(gmpAddon); + log.info("Setting ABI to '" + abi + "' for " + gmpAddon.id); + GMPPrefs.setString(GMPPrefs.KEY_PLUGIN_ABI, abi, gmpAddon.id); + // We use the combination of the hash and version to ensure we are + // up to date. + GMPPrefs.setString( + GMPPrefs.KEY_PLUGIN_HASHVALUE, + gmpAddon.hashValue, + gmpAddon.id + ); + // Setting the version pref signals installation completion to consumers, + // if you need to set other prefs etc. do it before this. + GMPPrefs.setString( + GMPPrefs.KEY_PLUGIN_VERSION, + gmpAddon.version, + gmpAddon.id + ); + return extractedPaths; + }, + reason => { + GMPPrefs.setString( + GMPPrefs.KEY_PLUGIN_LAST_INSTALL_FAIL_REASON, + reason, + gmpAddon.id + ); + let now = Math.round(Date.now() / 1000); + GMPPrefs.setInt( + GMPPrefs.KEY_PLUGIN_LAST_INSTALL_FAILED, + now, + gmpAddon.id + ); + throw reason; + } + ) + .finally(() => { + log.info(`Deleting ${gmpAddon.id} temporary zip file ${zipPath}`); + // We need to send out an observer event to ensure the nsZipReaderCache + // clears its cache entries associated with our temporary file. Otherwise + // if the addons downloader reuses the temporary file path, then we may hit + // the cache and get different contents than expected. + Services.obs.notifyObservers(null, "flush-cache-entry", zipPath); + IOUtils.remove(zipPath); + }); }, reason => { GMPPrefs.setString( diff --git a/toolkit/modules/IndexedDB.sys.mjs b/toolkit/modules/IndexedDB.sys.mjs index b3b9f81b8d..1ba7bb6835 100644 --- a/toolkit/modules/IndexedDB.sys.mjs +++ b/toolkit/modules/IndexedDB.sys.mjs @@ -281,9 +281,7 @@ export class IndexedDB { * * @param {string} dbName * The name of the database to open. - * @param {object} options - * The options with which to open the database. - * @param {integer} options.version + * @param {integer} version * The schema version with which the database needs to be opened. If * the database does not exist, or its current schema version does * not match, the `onupgradeneeded` function will be called. @@ -295,8 +293,8 @@ export class IndexedDB { * * @returns {Promise<IndexedDB>} */ - static open(dbName, options, onupgradeneeded = null) { - let request = indexedDB.open(dbName, options); + static open(dbName, version, onupgradeneeded = null) { + let request = indexedDB.open(dbName, version); return this._wrapOpenRequest(request, onupgradeneeded); } diff --git a/toolkit/modules/LightweightThemeConsumer.sys.mjs b/toolkit/modules/LightweightThemeConsumer.sys.mjs index c2ad888c22..ea66b05b6f 100644 --- a/toolkit/modules/LightweightThemeConsumer.sys.mjs +++ b/toolkit/modules/LightweightThemeConsumer.sys.mjs @@ -8,7 +8,6 @@ const lazy = {}; // Get the theme variables from the app resource directory. // This allows per-app variables. ChromeUtils.defineESModuleGetters(lazy, { - NimbusFeatures: "resource://nimbus/ExperimentAPI.sys.mjs", PrivateBrowsingUtils: "resource://gre/modules/PrivateBrowsingUtils.sys.mjs", ThemeContentPropertyList: "resource:///modules/ThemeVariableMap.sys.mjs", ThemeVariableMap: "resource:///modules/ThemeVariableMap.sys.mjs", @@ -274,9 +273,7 @@ LightweightThemeConsumer.prototype = { // If enabled, apply the dark theme variant to private browsing windows. if ( - !lazy.NimbusFeatures.majorRelease2022.getVariable( - "feltPrivacyPBMDarkTheme" - ) || + !Services.prefs.getBoolPref("browser.theme.dark-private-windows") || !lazy.PrivateBrowsingUtils.isWindowPrivate(this._win) || lazy.PrivateBrowsingUtils.permanentPrivateBrowsing ) { diff --git a/toolkit/modules/LogManager.sys.mjs b/toolkit/modules/LogManager.sys.mjs new file mode 100644 index 0000000000..ee8f3fcee8 --- /dev/null +++ b/toolkit/modules/LogManager.sys.mjs @@ -0,0 +1,475 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ + +/** + * This module provides a file-based, persistent logging facility for scenarios where + * retaining those logs over time and across browser restarts is important. + * Unless you need this feature specifically, please use console.createInstance. + */ + +// See Bug 1889052 +// eslint-disable-next-line mozilla/use-console-createInstance +import { Log } from "resource://gre/modules/Log.sys.mjs"; + +const lazy = {}; + +ChromeUtils.defineESModuleGetters(lazy, { + FileUtils: "resource://gre/modules/FileUtils.sys.mjs", + NetUtil: "resource://gre/modules/NetUtil.sys.mjs", +}); + +const DEFAULT_MAX_ERROR_AGE = 20 * 24 * 60 * 60; // 20 days + +// "shared" logs (ie, where the same log name is used by multiple LogManager +// instances) are a fact of life here - eg, FirefoxAccounts logs are used by +// both Sync and Reading List. +// However, different instances have different pref branches, so we need to +// handle when one pref branch says "Debug" and the other says "Error" +// So we (a) keep singleton console and dump appenders and (b) keep track +// of the minimum (ie, most verbose) level and use that. +// This avoids (a) the most recent setter winning (as that is indeterminate) +// and (b) multiple dump/console appenders being added to the same log multiple +// times, which would cause messages to appear twice. + +// Singletons used by each instance. +var formatter; +var dumpAppender; +var consoleAppender; + +// A set of all preference roots used by all instances. +var allBranches = new Set(); + +const STREAM_SEGMENT_SIZE = 4096; +const PR_UINT32_MAX = 0xffffffff; + +/** + * Append to an nsIStorageStream + * + * This writes logging output to an in-memory stream which can later be read + * back as an nsIInputStream. It can be used to avoid expensive I/O operations + * during logging. Instead, one can periodically consume the input stream and + * e.g. write it to disk asynchronously. + */ +class StorageStreamAppender extends Log.Appender { + constructor(formatter) { + super(formatter); + this._name = "StorageStreamAppender"; + + this._converterStream = null; // holds the nsIConverterOutputStream + this._outputStream = null; // holds the underlying nsIOutputStream + + this._ss = null; + } + + get outputStream() { + if (!this._outputStream) { + // First create a raw stream. We can bail out early if that fails. + this._outputStream = this.newOutputStream(); + if (!this._outputStream) { + return null; + } + + // Wrap the raw stream in an nsIConverterOutputStream. We can reuse + // the instance if we already have one. + if (!this._converterStream) { + this._converterStream = Cc[ + "@mozilla.org/intl/converter-output-stream;1" + ].createInstance(Ci.nsIConverterOutputStream); + } + this._converterStream.init(this._outputStream, "UTF-8"); + } + return this._converterStream; + } + + newOutputStream() { + let ss = (this._ss = Cc["@mozilla.org/storagestream;1"].createInstance( + Ci.nsIStorageStream + )); + ss.init(STREAM_SEGMENT_SIZE, PR_UINT32_MAX, null); + return ss.getOutputStream(0); + } + + getInputStream() { + if (!this._ss) { + return null; + } + return this._ss.newInputStream(0); + } + + reset() { + if (!this._outputStream) { + return; + } + this.outputStream.close(); + this._outputStream = null; + this._ss = null; + } + + doAppend(formatted) { + if (!formatted) { + return; + } + try { + this.outputStream.writeString(formatted + "\n"); + } catch (ex) { + if (ex.result == Cr.NS_BASE_STREAM_CLOSED) { + // The underlying output stream is closed, so let's open a new one + // and try again. + this._outputStream = null; + } + try { + this.outputStream.writeString(formatted + "\n"); + } catch (ex) { + // Ah well, we tried, but something seems to be hosed permanently. + } + } + } +} + +/** + * A storage appender that is flushable to a file on disk. + * + * Policies for when to flush, to what file, log rotation etc are up to the consumer + * (although it does maintain a .sawError property to help the consumer decide + * based on its policies) + */ +class FlushableStorageAppender extends StorageStreamAppender { + constructor(formatter) { + super(formatter); + this.sawError = false; + } + + append(message) { + if (message.level >= Log.Level.Error) { + this.sawError = true; + } + StorageStreamAppender.prototype.append.call(this, message); + } + + reset() { + super.reset(); + this.sawError = false; + } + + /** + * Flush the current stream to a file. + * + * Somewhat counter-intuitively, you must pass a log which will be written to + * with details of the operation. + */ + async flushToFile(subdirArray, filename, log) { + let inStream = this.getInputStream(); + this.reset(); + if (!inStream) { + log.debug("Failed to flush log to a file - no input stream"); + return; + } + log.debug("Flushing file log"); + log.trace("Beginning stream copy to " + filename + ": " + Date.now()); + try { + await this._copyStreamToFile(inStream, subdirArray, filename, log); + log.trace("onCopyComplete", Date.now()); + } catch (ex) { + log.error("Failed to copy log stream to file", ex); + } + } + + /** + * Copy an input stream to the named file, doing everything off the main + * thread. + * subDirArray is an array of path components, relative to the profile + * directory, where the file will be created. + * outputFileName is the filename to create. + * Returns a promise that is resolved on completion or rejected with an error. + */ + async _copyStreamToFile(inputStream, subdirArray, outputFileName, log) { + let outputDirectory = PathUtils.join(PathUtils.profileDir, ...subdirArray); + await IOUtils.makeDirectory(outputDirectory); + let fullOutputFileName = PathUtils.join(outputDirectory, outputFileName); + + let outputStream = Cc[ + "@mozilla.org/network/file-output-stream;1" + ].createInstance(Ci.nsIFileOutputStream); + + outputStream.init( + new lazy.FileUtils.File(fullOutputFileName), + -1, + -1, + Ci.nsIFileOutputStream.DEFER_OPEN + ); + + await new Promise(resolve => + lazy.NetUtil.asyncCopy(inputStream, outputStream, () => resolve()) + ); + + outputStream.close(); + log.trace("finished copy to", fullOutputFileName); + } +} + +/** + * Each LogManager monitors preferences, resolves log levels and verbosity, + * and manages the creation, rotation and clean up of log files in a profile subdirectory. + */ +export class LogManager { + constructor(options = {}) { + this._prefObservers = []; + this.#init(options); + } + + static StorageStreamAppender = StorageStreamAppender; + + _cleaningUpFileLogs = false; + + #init({ + prefRoot, + logNames, + logFilePrefix, + logFileSubDirectoryEntries, + testTopicPrefix, + } = {}) { + this._prefs = Services.prefs.getBranch(prefRoot); + this._prefsBranch = prefRoot; + + this.logFilePrefix = logFilePrefix; + this._testTopicPrefix = testTopicPrefix; + + // At this point we don't allow a custom directory for the logs, nor allow + // it to be outside the profile directory. + // This returns an array of the the relative directory entries below the + // profile dir, and is the directory about:sync-log uses. + this.logFileSubDirectoryEntries = Object.freeze(logFileSubDirectoryEntries); + + if (!formatter) { + // Create a formatter and various appenders to attach to the logs. + formatter = new Log.BasicFormatter(); + consoleAppender = new Log.ConsoleAppender(formatter); + dumpAppender = new Log.DumpAppender(formatter); + } + + allBranches.add(this._prefsBranch); + // We create a preference observer for all our prefs so they are magically + // reflected if the pref changes after creation. + let setupAppender = ( + appender, + prefName, + defaultLevel, + findSmallest = false + ) => { + let observer = newVal => { + let level = Log.Level[newVal] || defaultLevel; + if (findSmallest) { + // As some of our appenders have global impact (ie, there is only one + // place 'dump' goes to), we need to find the smallest value from all + // prefs controlling this appender. + // For example, if consumerA has dump=Debug then consumerB sets + // dump=Error, we need to keep dump=Debug so consumerA is respected. + for (let branch of allBranches) { + let lookPrefBranch = Services.prefs.getBranch(branch); + let lookVal = + Log.Level[lookPrefBranch.getStringPref(prefName, null)]; + if (lookVal && lookVal < level) { + level = lookVal; + } + } + } + appender.level = level; + }; + this._prefs.addObserver(prefName, observer); + this._prefObservers.push([prefName, observer]); + // and call the observer now with the current pref value. + observer(this._prefs.getStringPref(prefName, null)); + return observer; + }; + + this._observeConsolePref = setupAppender( + consoleAppender, + "log.appender.console", + Log.Level.Fatal, + true + ); + this._observeDumpPref = setupAppender( + dumpAppender, + "log.appender.dump", + Log.Level.Error, + true + ); + + // The file appender doesn't get the special singleton behaviour. + let fapp = (this._fileAppender = new FlushableStorageAppender(formatter)); + // the stream gets a default of Debug as the user must go out of their way + // to see the stuff spewed to it. + this._observeStreamPref = setupAppender( + fapp, + "log.appender.file.level", + Log.Level.Debug + ); + + // now attach the appenders to all our logs. + for (let logName of logNames) { + let log = Log.repository.getLogger(logName); + for (let appender of [fapp, dumpAppender, consoleAppender]) { + log.addAppender(appender); + } + } + // and use the first specified log as a "root" for our log. + this._log = Log.repository.getLogger(logNames[0] + ".LogManager"); + } + + /** + * Cleanup this instance + */ + finalize() { + for (let [prefName, observer] of this._prefObservers) { + this._prefs.removeObserver(prefName, observer); + } + this._prefObservers = []; + try { + allBranches.delete(this._prefsBranch); + } catch (e) {} + this._prefs = null; + } + + get sawError() { + return this._fileAppender.sawError; + } + + // Result values for resetFileLog. + SUCCESS_LOG_WRITTEN = "success-log-written"; + ERROR_LOG_WRITTEN = "error-log-written"; + + /** + * Possibly generate a log file for all accumulated log messages and refresh + * the input & output streams. + * Whether a "success" or "error" log is written is determined based on + * whether an "Error" log entry was written to any of the logs. + * Returns a promise that resolves on completion with either null (for no + * file written or on error), SUCCESS_LOG_WRITTEN if a "success" log was + * written, or ERROR_LOG_WRITTEN if an "error" log was written. + */ + async resetFileLog() { + try { + let flushToFile; + let reasonPrefix; + let reason; + if (this._fileAppender.sawError) { + reason = this.ERROR_LOG_WRITTEN; + flushToFile = this._prefs.getBoolPref( + "log.appender.file.logOnError", + true + ); + reasonPrefix = "error"; + } else { + reason = this.SUCCESS_LOG_WRITTEN; + flushToFile = this._prefs.getBoolPref( + "log.appender.file.logOnSuccess", + false + ); + reasonPrefix = "success"; + } + + // might as well avoid creating an input stream if we aren't going to use it. + if (!flushToFile) { + this._fileAppender.reset(); + return null; + } + + // We have reasonPrefix at the start of the filename so all "error" + // logs are grouped in about:sync-log. + let filename = + reasonPrefix + "-" + this.logFilePrefix + "-" + Date.now() + ".txt"; + await this._fileAppender.flushToFile( + this.logFileSubDirectoryEntries, + filename, + this._log + ); + // It's not completely clear to markh why we only do log cleanups + // for errors, but for now the Sync semantics have been copied... + // (one theory is that only cleaning up on error makes it less + // likely old error logs would be removed, but that's not true if + // there are occasional errors - let's address this later!) + if (reason == this.ERROR_LOG_WRITTEN && !this._cleaningUpFileLogs) { + this._log.trace("Running cleanup."); + try { + await this.cleanupLogs(); + } catch (err) { + this._log.error("Failed to cleanup logs", err); + } + } + return reason; + } catch (ex) { + this._log.error("Failed to resetFileLog", ex); + return null; + } + } + + /** + * Finds all logs older than maxErrorAge and deletes them using async I/O. + */ + cleanupLogs() { + let maxAge = this._prefs.getIntPref( + "log.appender.file.maxErrorAge", + DEFAULT_MAX_ERROR_AGE + ); + let threshold = Date.now() - 1000 * maxAge; + this._log.debug("Log cleanup threshold time: " + threshold); + + let shouldDelete = fileInfo => { + return fileInfo.lastModified < threshold; + }; + return this._deleteLogFiles(shouldDelete); + } + + /** + * Finds all logs and removes them. + */ + removeAllLogs() { + return this._deleteLogFiles(() => true); + } + + /** + * Delete some log files. A callback is invoked for each found log file to + * determine if that file should be removed. + */ + async _deleteLogFiles(cbShouldDelete) { + this._cleaningUpFileLogs = true; + let logDir = lazy.FileUtils.getDir( + "ProfD", + this.logFileSubDirectoryEntries + ); + for (const path of await IOUtils.getChildren(logDir.path)) { + const name = PathUtils.filename(path); + + if (!name.startsWith("error-") && !name.startsWith("success-")) { + continue; + } + + try { + const info = await IOUtils.stat(path); + if (!cbShouldDelete(info)) { + continue; + } + + this._log.trace(` > Cleanup removing ${name} (${info.lastModified})`); + await IOUtils.remove(path); + this._log.trace(`Deleted ${name}`); + } catch (ex) { + this._log.debug( + `Encountered error trying to clean up old log file ${name}`, + ex + ); + } + } + this._cleaningUpFileLogs = false; + this._log.debug("Done deleting files."); + // This notification is used only for tests. + if (this._testTopicPrefix) { + Services.obs.notifyObservers( + null, + `${this._testTopicPrefix}cleanup-logs` + ); + ("cleanup-logs"); + } + } +} diff --git a/toolkit/modules/ProfileAge.sys.mjs b/toolkit/modules/ProfileAge.sys.mjs index ea824f5a91..94e991749f 100644 --- a/toolkit/modules/ProfileAge.sys.mjs +++ b/toolkit/modules/ProfileAge.sys.mjs @@ -161,6 +161,27 @@ class ProfileAgeImpl { } return Promise.resolve(undefined); } + + /** + * Record (and persist) when a backup recovery happened. We just store a + * single value - the timestamp at the time of recovery. + * + * Returns a promise that is resolved once the file has been written. + */ + recordRecoveredFromBackup(time = Date.now()) { + this._times.recoveredFromBackup = time; + return this.writeTimes(); + } + + /* Returns a promise that resolves to the time the profile was recovered from + * a backup or undefined if not recorded. + */ + get recoveredFromBackup() { + if ("recoveredFromBackup" in this._times) { + return Promise.resolve(this._times.recoveredFromBackup); + } + return Promise.resolve(undefined); + } } // A Map from profile directory to a promise that resolves to the ProfileAgeImpl. diff --git a/toolkit/modules/RemotePageAccessManager.sys.mjs b/toolkit/modules/RemotePageAccessManager.sys.mjs index 61c00880cb..838f6e9157 100644 --- a/toolkit/modules/RemotePageAccessManager.sys.mjs +++ b/toolkit/modules/RemotePageAccessManager.sys.mjs @@ -103,6 +103,7 @@ export let RemotePageAccessManager = { "security.certerror.hideAddException", "security.xfocsp.errorReporting.automatic", "security.xfocsp.errorReporting.enabled", + "security.xfocsp.hideOpenInNewWindow", "network.trr.display_fallback_warning", ], RPMSetPref: [ diff --git a/toolkit/modules/SelectionUtils.sys.mjs b/toolkit/modules/SelectionUtils.sys.mjs index 8dcbc0c494..5eed1714b5 100644 --- a/toolkit/modules/SelectionUtils.sys.mjs +++ b/toolkit/modules/SelectionUtils.sys.mjs @@ -75,7 +75,7 @@ export var SelectionUtils = { } } - let collapsed = selection.isCollapsed; + let collapsed = selection.areNormalAndCrossShadowBoundaryRangesCollapsed; if (selectionStr) { // Have some text, let's figure out if it looks like a URL that isn't diff --git a/toolkit/modules/Sqlite.sys.mjs b/toolkit/modules/Sqlite.sys.mjs index b1f48c28be..ca58904d6b 100644 --- a/toolkit/modules/Sqlite.sys.mjs +++ b/toolkit/modules/Sqlite.sys.mjs @@ -1184,9 +1184,15 @@ ConnectionData.prototype = Object.freeze({ * @param {string} destFilePath * The path on the local filesystem to write the database copy. Any existing * file at this path will be overwritten. + * @param {number} [pagesPerStep=0] + * The number of pages to copy per step. If not supplied or is 0, falls back + * to the platform default which is currently 5. + * @param {number} [stepDelayMs=0] + * The number of milliseconds to wait between copying step. If not supplied + * or is 0, falls back to the platform default which is currently 250. * @return Promise<undefined, nsresult> */ - async backupToFile(destFilePath) { + async backupToFile(destFilePath, pagesPerStep = 0, stepDelayMs = 0) { if (!this._dbConn) { return Promise.reject( new Error("No opened database connection to create a backup from.") @@ -1194,13 +1200,18 @@ ConnectionData.prototype = Object.freeze({ } let destFile = await IOUtils.getFile(destFilePath); return new Promise((resolve, reject) => { - this._dbConn.backupToFileAsync(destFile, result => { - if (Components.isSuccessCode(result)) { - resolve(); - } else { - reject(result); - } - }); + this._dbConn.backupToFileAsync( + destFile, + result => { + if (Components.isSuccessCode(result)) { + resolve(); + } else { + reject(result); + } + }, + pagesPerStep, + stepDelayMs + ); }); }, }); @@ -2002,10 +2013,20 @@ OpenedConnection.prototype = Object.freeze({ * @param {string} destFilePath * The path on the local filesystem to write the database copy. Any existing * file at this path will be overwritten. + * @param {number} [pagesPerStep=0] + * The number of pages to copy per step. If not supplied or is 0, falls back + * to the platform default which is currently 5. + * @param {number} [stepDelayMs=0] + * The number of milliseconds to wait between copying step. If not supplied + * or is 0, falls back to the platform default which is currently 250. * @return Promise<undefined, nsresult> */ - backup(destFilePath) { - return this._connectionData.backupToFile(destFilePath); + backup(destFilePath, pagesPerStep = 0, stepDelayMs = 0) { + return this._connectionData.backupToFile( + destFilePath, + pagesPerStep, + stepDelayMs + ); }, }); diff --git a/toolkit/modules/moz.build b/toolkit/modules/moz.build index ab7dcb284b..880436b3b4 100644 --- a/toolkit/modules/moz.build +++ b/toolkit/modules/moz.build @@ -186,6 +186,7 @@ EXTRA_JS_MODULES += [ "KeywordUtils.sys.mjs", "LayoutUtils.sys.mjs", "Log.sys.mjs", + "LogManager.sys.mjs", "NewTabUtils.sys.mjs", "NLP.sys.mjs", "ObjectUtils.sys.mjs", @@ -227,6 +228,7 @@ EXTRA_JS_MODULES.sessionstore += [ "sessionstore/PrivacyFilter.sys.mjs", "sessionstore/PrivacyLevel.sys.mjs", "sessionstore/SessionHistory.sys.mjs", + "sessionstore/SessionStoreHelper.sys.mjs", ] EXTRA_JS_MODULES.third_party.fathom += ["third_party/fathom/fathom.mjs"] diff --git a/toolkit/modules/sessionstore/SessionStoreHelper.sys.mjs b/toolkit/modules/sessionstore/SessionStoreHelper.sys.mjs new file mode 100644 index 0000000000..d443cdb647 --- /dev/null +++ b/toolkit/modules/sessionstore/SessionStoreHelper.sys.mjs @@ -0,0 +1,110 @@ +/* -*- indent-tabs-mode: nil; js-indent-level: 2 -*- + * This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ + +/** + * The external API exported by this module. + */ +export var SessionStoreHelper = Object.freeze({ + buildRestoreData(formdata, scroll) { + return SessionStoreHelperInternal.buildRestoreData(formdata, scroll); + }, +}); + +/** + * The internal API for the SessionStoreHelper module. + */ +var SessionStoreHelperInternal = { + /** + * Builds a single nsISessionStoreRestoreData tree for the provided |formdata| + * and |scroll| trees. + */ + buildRestoreData(formdata, scroll) { + function addFormEntries(root, fields, isXpath) { + for (let [key, value] of Object.entries(fields)) { + switch (typeof value) { + case "string": + root.addTextField(isXpath, key, value); + break; + case "boolean": + root.addCheckbox(isXpath, key, value); + break; + case "object": { + if (value === null) { + break; + } + if ( + value.hasOwnProperty("type") && + value.hasOwnProperty("fileList") + ) { + root.addFileList(isXpath, key, value.type, value.fileList); + break; + } + if ( + value.hasOwnProperty("selectedIndex") && + value.hasOwnProperty("value") + ) { + root.addSingleSelect( + isXpath, + key, + value.selectedIndex, + value.value + ); + break; + } + if ( + value.hasOwnProperty("value") && + value.hasOwnProperty("state") + ) { + root.addCustomElement(isXpath, key, value.value, value.state); + break; + } + if ( + key === "sessionData" && + ["about:sessionrestore", "about:welcomeback"].includes( + formdata.url + ) + ) { + root.addTextField(isXpath, key, JSON.stringify(value)); + break; + } + if (Array.isArray(value)) { + root.addMultipleSelect(isXpath, key, value); + break; + } + } + } + } + } + + let root = SessionStoreUtils.constructSessionStoreRestoreData(); + if (scroll?.hasOwnProperty("scroll")) { + root.scroll = scroll.scroll; + } + if (formdata?.hasOwnProperty("url")) { + root.url = formdata.url; + if (formdata.hasOwnProperty("innerHTML")) { + // eslint-disable-next-line no-unsanitized/property + root.innerHTML = formdata.innerHTML; + } + if (formdata.hasOwnProperty("xpath")) { + addFormEntries(root, formdata.xpath, /* isXpath */ true); + } + if (formdata.hasOwnProperty("id")) { + addFormEntries(root, formdata.id, /* isXpath */ false); + } + } + let childrenLength = Math.max( + scroll?.children?.length || 0, + formdata?.children?.length || 0 + ); + for (let i = 0; i < childrenLength; i++) { + root.addChild( + this.buildRestoreData(formdata?.children?.[i], scroll?.children?.[i]), + i + ); + } + return root; + }, +}; diff --git a/toolkit/modules/tests/browser/browser_BrowserUtils.js b/toolkit/modules/tests/browser/browser_BrowserUtils.js index da28c07b69..de85566d98 100644 --- a/toolkit/modules/tests/browser/browser_BrowserUtils.js +++ b/toolkit/modules/tests/browser/browser_BrowserUtils.js @@ -21,7 +21,7 @@ add_task(async function test_getSelectionDetails_input() { content.getSelection().removeAllRanges(); let info = SelectionUtils.getSelectionDetails(content); Assert.equal(text, info.text); - Assert.ok(!info.collapsed); + Assert.strictEqual(info.docSelectionIsCollapsed, false); Assert.equal(linkURL, info.linkURL); } @@ -48,3 +48,34 @@ add_task(async function test_getSelectionDetails_input() { }); }); }); + +add_task(async function test_getSelectionDetails_shadow_selection() { + const url = kFixtureBaseURL + "file_getSelectionDetails_inputs.html"; + await SpecialPowers.pushPrefEnv({ + set: [["dom.shadowdom.selection_across_boundary.enabled", true]], + }); + await BrowserTestUtils.withNewTab({ gBrowser, url }, async browser => { + await SpecialPowers.spawn(browser, [], async () => { + function checkSelection() { + const { SelectionUtils } = ChromeUtils.importESModule( + "resource://gre/modules/SelectionUtils.sys.mjs" + ); + + const text = content.document.getElementById("outer"); + const host = content.document.getElementById("host"); + content + .getSelection() + .setBaseAndExtent( + text, + 0, + host.shadowRoot.getElementById("inner").firstChild, + 3 + ); + let info = SelectionUtils.getSelectionDetails(content); + // TODO(sefeng): verify info.text after bug 1881095 is fixed + Assert.strictEqual(info.docSelectionIsCollapsed, false); + } + checkSelection(); + }); + }); +}); diff --git a/toolkit/modules/tests/browser/file_getSelectionDetails_inputs.html b/toolkit/modules/tests/browser/file_getSelectionDetails_inputs.html index 2e49146785..a8946ded63 100644 --- a/toolkit/modules/tests/browser/file_getSelectionDetails_inputs.html +++ b/toolkit/modules/tests/browser/file_getSelectionDetails_inputs.html @@ -5,5 +5,12 @@ <input id="url-with-scheme" value="https://test.example.com"> <input id="not-url" value="foo. bar"> <input id="not-url-number" value="3.5"> + + <span id="outer">OuterText</span> + <div id="host"> + <template shadowrootmode="open"> + <span id="inner">innerText</span> + </template> + </div> </body> </html> diff --git a/toolkit/modules/tests/xpcshell/test_LogManager.js b/toolkit/modules/tests/xpcshell/test_LogManager.js new file mode 100644 index 0000000000..fa5e5abc2e --- /dev/null +++ b/toolkit/modules/tests/xpcshell/test_LogManager.js @@ -0,0 +1,377 @@ +/* Any copyright is dedicated to the Public Domain. + http://creativecommons.org/publicdomain/zero/1.0/ */ + +// NOTE: The sync test_errorhandler_* tests have quite good coverage for +// other aspects of this. + +const { LogManager } = ChromeUtils.importESModule( + "resource://gre/modules/LogManager.sys.mjs" +); +const { Log } = ChromeUtils.importESModule( + "resource://gre/modules/Log.sys.mjs" +); +const { FileUtils } = ChromeUtils.importESModule( + "resource://gre/modules/FileUtils.sys.mjs" +); +const logManagerDefaultOptions = { + logFileSubDirectoryEntries: ["weave", "logs"], + testTopicPrefix: "services-tests:common:log-manager:", +}; + +// Returns an array of [consoleAppender, dumpAppender, [fileAppenders]] for +// the specified log. Note that fileAppenders will usually have length=1 +function getAppenders(log) { + let capps = log.appenders.filter(app => app instanceof Log.ConsoleAppender); + equal(capps.length, 1, "should only have one console appender"); + let dapps = log.appenders.filter(app => app instanceof Log.DumpAppender); + equal(dapps.length, 1, "should only have one dump appender"); + let fapps = log.appenders.filter( + app => app instanceof LogManager.StorageStreamAppender + ); + return [capps[0], dapps[0], fapps]; +} + +// Test that the correct thing happens when no prefs exist for the log manager. +add_task(async function test_noPrefs() { + // tell the log manager to init with a pref branch that doesn't exist. + let lm = new LogManager({ + ...logManagerDefaultOptions, + prefRoot: "no-such-branch.", + logNames: ["TestLog"], + logFilePrefix: "test", + }); + + let log = Log.repository.getLogger("TestLog"); + let [capp, dapp, fapps] = getAppenders(log); + // The console appender gets "Fatal" while the "dump" appender gets "Error" levels + equal(capp.level, Log.Level.Fatal); + equal(dapp.level, Log.Level.Error); + // and the file (stream) appender gets Debug by default + equal(fapps.length, 1, "only 1 file appender"); + equal(fapps[0].level, Log.Level.Debug); + lm.finalize(); +}); + +// Test that changes to the prefs used by the log manager are updated dynamically. +add_task(async function test_PrefChanges() { + Services.prefs.setStringPref( + "log-manager.test.log.appender.console", + "Trace" + ); + Services.prefs.setStringPref("log-manager.test.log.appender.dump", "Trace"); + Services.prefs.setStringPref( + "log-manager.test.log.appender.file.level", + "Trace" + ); + let lm = new LogManager({ + ...logManagerDefaultOptions, + prefRoot: "log-manager.test.", + logNames: ["TestLog2"], + logFilePrefix: "test", + }); + + let log = Log.repository.getLogger("TestLog2"); + let [capp, dapp, [fapp]] = getAppenders(log); + equal(capp.level, Log.Level.Trace); + equal(dapp.level, Log.Level.Trace); + equal(fapp.level, Log.Level.Trace); + // adjust the prefs and they should magically be reflected in the appenders. + Services.prefs.setStringPref( + "log-manager.test.log.appender.console", + "Debug" + ); + Services.prefs.setStringPref("log-manager.test.log.appender.dump", "Debug"); + Services.prefs.setStringPref( + "log-manager.test.log.appender.file.level", + "Debug" + ); + equal(capp.level, Log.Level.Debug); + equal(dapp.level, Log.Level.Debug); + equal(fapp.level, Log.Level.Debug); + // and invalid values should cause them to fallback to their defaults. + Services.prefs.setStringPref("log-manager.test.log.appender.console", "xxx"); + Services.prefs.setStringPref("log-manager.test.log.appender.dump", "xxx"); + Services.prefs.setStringPref( + "log-manager.test.log.appender.file.level", + "xxx" + ); + equal(capp.level, Log.Level.Fatal); + equal(dapp.level, Log.Level.Error); + equal(fapp.level, Log.Level.Debug); + lm.finalize(); +}); + +// Test that the same log used by multiple log managers does the right thing. +add_task(async function test_SharedLogs() { + // create the prefs for the first instance. + Services.prefs.setStringPref( + "log-manager-1.test.log.appender.console", + "Trace" + ); + Services.prefs.setStringPref("log-manager-1.test.log.appender.dump", "Trace"); + Services.prefs.setStringPref( + "log-manager-1.test.log.appender.file.level", + "Trace" + ); + let lm1 = new LogManager({ + ...logManagerDefaultOptions, + prefRoot: "log-manager-1.test.", + logNames: ["TestLog3"], + logFilePrefix: "test", + }); + + // and the second. + Services.prefs.setStringPref( + "log-manager-2.test.log.appender.console", + "Debug" + ); + Services.prefs.setStringPref("log-manager-2.test.log.appender.dump", "Debug"); + Services.prefs.setStringPref( + "log-manager-2.test.log.appender.file.level", + "Debug" + ); + let lm2 = new LogManager({ + ...logManagerDefaultOptions, + prefRoot: "log-manager-2.test.", + logNames: ["TestLog3"], + logFilePrefix: "test", + }); + + let log = Log.repository.getLogger("TestLog3"); + let [capp, dapp] = getAppenders(log); + + // console and dump appenders should be "trace" as it is more verbose than + // "debug" + equal(capp.level, Log.Level.Trace); + equal(dapp.level, Log.Level.Trace); + + // Set the prefs on the -1 branch to "Error" - it should then end up with + // "Debug" from the -2 branch. + Services.prefs.setStringPref( + "log-manager-1.test.log.appender.console", + "Error" + ); + Services.prefs.setStringPref("log-manager-1.test.log.appender.dump", "Error"); + Services.prefs.setStringPref( + "log-manager-1.test.log.appender.file.level", + "Error" + ); + + equal(capp.level, Log.Level.Debug); + equal(dapp.level, Log.Level.Debug); + + lm1.finalize(); + lm2.finalize(); +}); + +// A little helper to test what log files exist. We expect exactly zero (if +// prefix is null) or exactly one with the specified prefix. +function checkLogFile(prefix) { + let logsdir = FileUtils.getDir("ProfD", ["weave", "logs"]); + let entries = logsdir.directoryEntries; + if (!prefix) { + // expecting no files. + ok(!entries.hasMoreElements()); + } else { + // expecting 1 file. + ok(entries.hasMoreElements()); + let logfile = entries.getNext().QueryInterface(Ci.nsIFile); + equal(logfile.leafName.slice(-4), ".txt"); + ok(logfile.leafName.startsWith(prefix + "-test-"), logfile.leafName); + // and remove it ready for the next check. + logfile.remove(false); + } +} + +// Test that we correctly write error logs by default +add_task(async function test_logFileErrorDefault() { + let lm = new LogManager({ + ...logManagerDefaultOptions, + prefRoot: "log-manager.test.", + logNames: ["TestLog2"], + logFilePrefix: "test", + }); + + let log = Log.repository.getLogger("TestLog2"); + log.error("an error message"); + await lm.resetFileLog(lm.REASON_ERROR); + // One error log file exists. + checkLogFile("error"); + + lm.finalize(); +}); + +// Test that we correctly write success logs. +add_task(async function test_logFileSuccess() { + Services.prefs.setBoolPref( + "log-manager.test.log.appender.file.logOnError", + false + ); + Services.prefs.setBoolPref( + "log-manager.test.log.appender.file.logOnSuccess", + false + ); + + let lm = new LogManager({ + ...logManagerDefaultOptions, + prefRoot: "log-manager.test.", + logNames: ["TestLog2"], + logFilePrefix: "test", + }); + + let log = Log.repository.getLogger("TestLog2"); + log.info("an info message"); + await lm.resetFileLog(); + // Zero log files exist. + checkLogFile(null); + + // Reset logOnSuccess and do it again - log should appear. + Services.prefs.setBoolPref( + "log-manager.test.log.appender.file.logOnSuccess", + true + ); + log.info("an info message"); + await lm.resetFileLog(); + + checkLogFile("success"); + + // Now test with no "reason" specified and no "error" record. + log.info("an info message"); + await lm.resetFileLog(); + // should get a "success" entry. + checkLogFile("success"); + + // With no "reason" and an error record - should get no success log. + log.error("an error message"); + await lm.resetFileLog(); + // should get no entry + checkLogFile(null); + + // And finally now with no error, to ensure that the fact we had an error + // previously doesn't persist after the .resetFileLog call. + log.info("an info message"); + await lm.resetFileLog(); + checkLogFile("success"); + + lm.finalize(); +}); + +// Test that we correctly write error logs. +add_task(async function test_logFileError() { + Services.prefs.setBoolPref( + "log-manager.test.log.appender.file.logOnError", + false + ); + Services.prefs.setBoolPref( + "log-manager.test.log.appender.file.logOnSuccess", + false + ); + + let lm = new LogManager({ + ...logManagerDefaultOptions, + prefRoot: "log-manager.test.", + logNames: ["TestLog2"], + logFilePrefix: "test", + }); + + let log = Log.repository.getLogger("TestLog2"); + log.info("an info message"); + let reason = await lm.resetFileLog(); + Assert.equal(reason, null, "null returned when no file created."); + // Zero log files exist. + checkLogFile(null); + + // Reset logOnSuccess - success logs should appear if no error records. + Services.prefs.setBoolPref( + "log-manager.test.log.appender.file.logOnSuccess", + true + ); + log.info("an info message"); + reason = await lm.resetFileLog(); + Assert.equal(reason, lm.SUCCESS_LOG_WRITTEN); + checkLogFile("success"); + + // Set logOnError and unset logOnSuccess - error logs should appear. + Services.prefs.setBoolPref( + "log-manager.test.log.appender.file.logOnSuccess", + false + ); + Services.prefs.setBoolPref( + "log-manager.test.log.appender.file.logOnError", + true + ); + log.error("an error message"); + reason = await lm.resetFileLog(); + Assert.equal(reason, lm.ERROR_LOG_WRITTEN); + checkLogFile("error"); + + // Now test with no "error" record. + log.info("an info message"); + reason = await lm.resetFileLog(); + // should get no file + Assert.equal(reason, null); + checkLogFile(null); + + // With an error record we should get an error log. + log.error("an error message"); + reason = await lm.resetFileLog(); + // should get en error log + Assert.equal(reason, lm.ERROR_LOG_WRITTEN); + checkLogFile("error"); + + // And finally now with success, to ensure that the fact we had an error + // previously doesn't persist after the .resetFileLog call. + log.info("an info message"); + await lm.resetFileLog(); + checkLogFile(null); + + lm.finalize(); +}); + +function countLogFiles() { + let logsdir = FileUtils.getDir("ProfD", ["weave", "logs"]); + let count = 0; + for (let entry of logsdir.directoryEntries) { + void entry; + count += 1; + } + return count; +} + +// Test that removeAllLogs removes all log files. +add_task(async function test_logFileError() { + Services.prefs.setBoolPref( + "log-manager.test.log.appender.file.logOnError", + true + ); + Services.prefs.setBoolPref( + "log-manager.test.log.appender.file.logOnSuccess", + true + ); + + let lm = new LogManager({ + ...logManagerDefaultOptions, + prefRoot: "log-manager.test.", + logNames: ["TestLog2"], + logFilePrefix: "test", + }); + + let log = Log.repository.getLogger("TestLog2"); + log.info("an info message"); + let reason = await lm.resetFileLog(); + Assert.equal(reason, lm.SUCCESS_LOG_WRITTEN, "success log was written."); + + log.error("an error message"); + reason = await lm.resetFileLog(); + Assert.equal(reason, lm.ERROR_LOG_WRITTEN); + + Assert.equal(countLogFiles(), 2, "expect 2 log files"); + await lm.removeAllLogs(); + Assert.equal( + countLogFiles(), + 0, + "should be no log files after removing them" + ); + + lm.finalize(); +}); diff --git a/toolkit/modules/tests/xpcshell/test_MatchURLFilters.js b/toolkit/modules/tests/xpcshell/test_MatchURLFilters.js index e9e8813b77..7aaee5fece 100644 --- a/toolkit/modules/tests/xpcshell/test_MatchURLFilters.js +++ b/toolkit/modules/tests/xpcshell/test_MatchURLFilters.js @@ -226,14 +226,14 @@ add_task(async function test_match_url_filters() { // TODO: should we explicitly cover hostContains, hostPrefix, hostSuffix for // these sub-cases? { shouldFail, filters: [{ hostEquals: "blank" }], url: "about:blank" }, - { shouldFail, filters: [{ hostEquals: "blank" }], url: "about://blank" }, + { shouldPass, filters: [{ hostEquals: "blank" }], url: "about://blank" }, { shouldFail, filters: [{ hostEquals: "testDataURL" }], url: "data:,testDataURL", }, { shouldPass, filters: [{ hostEquals: "" }], url: "about:blank" }, - { shouldPass, filters: [{ hostEquals: "" }], url: "about://blank" }, + { shouldFail, filters: [{ hostEquals: "" }], url: "about://blank" }, { shouldPass, filters: [{ hostEquals: "" }], url: "data:,testDataURL" }, // Path filters (pathEquals, pathContains, pathPrefix, pathSuffix). diff --git a/toolkit/modules/tests/xpcshell/test_ProfileAge.js b/toolkit/modules/tests/xpcshell/test_ProfileAge.js index 9a659a5894..e717b0dcd0 100644 --- a/toolkit/modules/tests/xpcshell/test_ProfileAge.js +++ b/toolkit/modules/tests/xpcshell/test_ProfileAge.js @@ -38,6 +38,7 @@ add_task( withDummyProfile(async profile => { const CREATED_TIME = Date.now() - 2000; const RESET_TIME = Date.now() - 1000; + const RECOVERY_TIME = Date.now() - 500; await IOUtils.writeJSON(PathUtils.join(profile, "times.json"), { created: CREATED_TIME, @@ -66,12 +67,21 @@ add_task( ); await promise; + let recoveryPromise = times.recordRecoveredFromBackup(RECOVERY_TIME); + Assert.equal( + await times2.recoveredFromBackup, + RECOVERY_TIME, + "Should have seen the right backup recovery time in the second instance immediately." + ); + await recoveryPromise; + let results = await IOUtils.readJSON(PathUtils.join(profile, "times.json")); Assert.deepEqual( results, { created: CREATED_TIME, reset: RESET_TIME, + recoveredFromBackup: RECOVERY_TIME, }, "Should have seen the right results." ); @@ -118,3 +128,24 @@ add_task( ); }) ); + +add_task( + withDummyProfile(async profile => { + const RECOVERY_TIME = Date.now() - 1000; + const RECOVERY_TIME2 = Date.now() - 2000; + + // The last call to recordRecoveredFromBackup should always win. + let times = await ProfileAge(profile); + await Promise.all([ + times.recordRecoveredFromBackup(RECOVERY_TIME), + times.recordRecoveredFromBackup(RECOVERY_TIME2), + ]); + + let results = await IOUtils.readJSON(PathUtils.join(profile, "times.json")); + Assert.equal( + results.recoveredFromBackup, + RECOVERY_TIME2, + "Should have seen the right results." + ); + }) +); diff --git a/toolkit/modules/tests/xpcshell/xpcshell.toml b/toolkit/modules/tests/xpcshell/xpcshell.toml index 52328fc24e..365646c907 100644 --- a/toolkit/modules/tests/xpcshell/xpcshell.toml +++ b/toolkit/modules/tests/xpcshell/xpcshell.toml @@ -63,6 +63,8 @@ tags = "remote-settings" ["test_Log.js"] +["test_LogManager.js"] + ["test_Log_double_ext.js"] ["test_Log_nsIStackFrame.js"] |