summaryrefslogtreecommitdiffstats
path: root/services/sync
diff options
context:
space:
mode:
authorDaniel Baumann <daniel.baumann@progress-linux.org>2024-04-19 01:14:29 +0000
committerDaniel Baumann <daniel.baumann@progress-linux.org>2024-04-19 01:14:29 +0000
commitfbaf0bb26397aa498eb9156f06d5a6fe34dd7dd8 (patch)
tree4c1ccaf5486d4f2009f9a338a98a83e886e29c97 /services/sync
parentReleasing progress-linux version 124.0.1-1~progress7.99u1. (diff)
downloadfirefox-fbaf0bb26397aa498eb9156f06d5a6fe34dd7dd8.tar.xz
firefox-fbaf0bb26397aa498eb9156f06d5a6fe34dd7dd8.zip
Merging upstream version 125.0.1.
Signed-off-by: Daniel Baumann <daniel.baumann@progress-linux.org>
Diffstat (limited to 'services/sync')
-rw-r--r--services/sync/Weave.sys.mjs2
-rw-r--r--services/sync/docs/engines.rst4
-rw-r--r--services/sync/modules-testing/fakeservices.sys.mjs16
-rw-r--r--services/sync/modules-testing/fxa_utils.sys.mjs2
-rw-r--r--services/sync/modules/UIState.sys.mjs2
-rw-r--r--services/sync/modules/bridged_engine.sys.mjs4
-rw-r--r--services/sync/modules/collection_validator.sys.mjs4
-rw-r--r--services/sync/modules/constants.sys.mjs2
-rw-r--r--services/sync/modules/engines.sys.mjs30
-rw-r--r--services/sync/modules/engines/bookmarks.sys.mjs8
-rw-r--r--services/sync/modules/engines/clients.sys.mjs2
-rw-r--r--services/sync/modules/engines/extension-storage.sys.mjs4
-rw-r--r--services/sync/modules/engines/forms.sys.mjs2
-rw-r--r--services/sync/modules/engines/prefs.sys.mjs6
-rw-r--r--services/sync/modules/engines/tabs.sys.mjs4
-rw-r--r--services/sync/modules/record.sys.mjs6
-rw-r--r--services/sync/modules/sync_auth.sys.mjs35
-rw-r--r--services/sync/modules/telemetry.sys.mjs4
-rw-r--r--services/sync/tests/tps/.eslintrc.js2
-rw-r--r--services/sync/tests/unit/head_helpers.js4
-rw-r--r--services/sync/tests/unit/head_http_server.js6
-rw-r--r--services/sync/tests/unit/test_addon_utils.js2
-rw-r--r--services/sync/tests/unit/test_addons_validator.js2
-rw-r--r--services/sync/tests/unit/test_bookmark_engine.js2
-rw-r--r--services/sync/tests/unit/test_bookmark_tracker.js14
-rw-r--r--services/sync/tests/unit/test_clients_engine.js27
-rw-r--r--services/sync/tests/unit/test_declined.js4
-rw-r--r--services/sync/tests/unit/test_engine_abort.js2
-rw-r--r--services/sync/tests/unit/test_errorhandler_1.js22
-rw-r--r--services/sync/tests/unit/test_errorhandler_2.js77
-rw-r--r--services/sync/tests/unit/test_errorhandler_filelog.js2
-rw-r--r--services/sync/tests/unit/test_fxa_node_reassignment.js2
-rw-r--r--services/sync/tests/unit/test_history_engine.js4
-rw-r--r--services/sync/tests/unit/test_history_store.js2
-rw-r--r--services/sync/tests/unit/test_history_tracker.js4
-rw-r--r--services/sync/tests/unit/test_hmac_error.js2
-rw-r--r--services/sync/tests/unit/test_httpd_sync_server.js2
-rw-r--r--services/sync/tests/unit/test_interval_triggers.js2
-rw-r--r--services/sync/tests/unit/test_password_engine.js9
-rw-r--r--services/sync/tests/unit/test_resource.js2
-rw-r--r--services/sync/tests/unit/test_service_sync_401.js2
-rw-r--r--services/sync/tests/unit/test_service_verifyLogin.js11
-rw-r--r--services/sync/tests/unit/test_sync_auth_manager.js35
-rw-r--r--services/sync/tests/unit/test_syncedtabs.js2
-rw-r--r--services/sync/tests/unit/test_syncscheduler.js2
-rw-r--r--services/sync/tests/unit/test_tab_quickwrite.js2
-rw-r--r--services/sync/tests/unit/test_telemetry.js4
-rw-r--r--services/sync/tests/unit/test_uistate.js4
-rw-r--r--services/sync/tps/extensions/tps/resource/modules/bookmarkValidator.sys.mjs2
-rw-r--r--services/sync/tps/extensions/tps/resource/modules/tabs.sys.mjs2
-rw-r--r--services/sync/tps/extensions/tps/resource/modules/windows.sys.mjs2
-rw-r--r--services/sync/tps/extensions/tps/resource/tps.sys.mjs2
52 files changed, 194 insertions, 208 deletions
diff --git a/services/sync/Weave.sys.mjs b/services/sync/Weave.sys.mjs
index 05a7031a73..1c4bb44928 100644
--- a/services/sync/Weave.sys.mjs
+++ b/services/sync/Weave.sys.mjs
@@ -156,7 +156,7 @@ AboutWeaveLog.prototype = {
"nsISupportsWeakReference",
]),
- getURIFlags(aURI) {
+ getURIFlags() {
return 0;
},
diff --git a/services/sync/docs/engines.rst b/services/sync/docs/engines.rst
index 7a4fa721af..a9a60d4e79 100644
--- a/services/sync/docs/engines.rst
+++ b/services/sync/docs/engines.rst
@@ -80,7 +80,7 @@ are open on other devices. There's no database - if we haven't synced yet we
don't know what other tabs are open, and when we do know, the list is just
stored in memory.
-The `SyncedTabs module <https://searchfox.org/mozilla-central/source/services/sync/modules/SyncedTabs.jsm>`_
+The `SyncedTabs module <https://searchfox.org/mozilla-central/source/services/sync/modules/SyncedTabs.sys.mjs>`_
is the main interface the browser uses to get the list of tabs from other
devices.
@@ -111,7 +111,7 @@ treat them as a single engine in practice.
As a result, only a shim is in the `services/sync/modules/engines/` directory,
while the actual logic is
-`next to the storage implementation <https://searchfox.org/mozilla-central/source/toolkit/components/formautofill/FormAutofillSync.jsm>`_.
+`next to the storage implementation <https://searchfox.org/mozilla-central/source/toolkit/components/formautofill/FormAutofillSync.sys.mjs>`_.
This engine has a unique twist on the "mirror" concept described above -
whenever a change is made to a fields, the original value of the field is
diff --git a/services/sync/modules-testing/fakeservices.sys.mjs b/services/sync/modules-testing/fakeservices.sys.mjs
index 4fd7534bf1..bf2d143203 100644
--- a/services/sync/modules-testing/fakeservices.sys.mjs
+++ b/services/sync/modules-testing/fakeservices.sys.mjs
@@ -29,7 +29,7 @@ export function FakeFilesystemService(contents) {
self.fakeContents["weave/" + filePath + ".json"] = JSON.stringify(json);
};
- Utils.jsonLoad = async function jsonLoad(filePath, that) {
+ Utils.jsonLoad = async function jsonLoad(filePath) {
let obj;
let json = self.fakeContents["weave/" + filePath + ".json"];
if (json) {
@@ -38,14 +38,14 @@ export function FakeFilesystemService(contents) {
return obj;
};
- Utils.jsonMove = function jsonMove(aFrom, aTo, that) {
+ Utils.jsonMove = function jsonMove(aFrom, aTo) {
const fromPath = "weave/" + aFrom + ".json";
self.fakeContents["weave/" + aTo + ".json"] = self.fakeContents[fromPath];
delete self.fakeContents[fromPath];
return Promise.resolve();
};
- Utils.jsonRemove = function jsonRemove(filePath, that) {
+ Utils.jsonRemove = function jsonRemove(filePath) {
delete self.fakeContents["weave/" + filePath + ".json"];
return Promise.resolve();
};
@@ -79,19 +79,17 @@ export function FakeCryptoService() {
delete Weave.Crypto; // get rid of the getter first
Weave.Crypto = this;
- RawCryptoWrapper.prototype.ciphertextHMAC = function ciphertextHMAC(
- keyBundle
- ) {
+ RawCryptoWrapper.prototype.ciphertextHMAC = function ciphertextHMAC() {
return fakeSHA256HMAC(this.ciphertext);
};
}
FakeCryptoService.prototype = {
- async encrypt(clearText, symmetricKey, iv) {
+ async encrypt(clearText) {
return clearText;
},
- async decrypt(cipherText, symmetricKey, iv) {
+ async decrypt(cipherText) {
return cipherText;
},
@@ -104,7 +102,7 @@ FakeCryptoService.prototype = {
return btoa("fake-fake-fake-random-iv");
},
- expandData: function expandData(data, len) {
+ expandData: function expandData(data) {
return data;
},
diff --git a/services/sync/modules-testing/fxa_utils.sys.mjs b/services/sync/modules-testing/fxa_utils.sys.mjs
index c953f0eaa3..fdb9e261c9 100644
--- a/services/sync/modules-testing/fxa_utils.sys.mjs
+++ b/services/sync/modules-testing/fxa_utils.sys.mjs
@@ -23,7 +23,7 @@ export var initializeIdentityWithTokenServerResponse = function (response) {
}
// A mock request object.
- function MockRESTRequest(url) {}
+ function MockRESTRequest() {}
MockRESTRequest.prototype = {
_log: requestLog,
setHeader() {},
diff --git a/services/sync/modules/UIState.sys.mjs b/services/sync/modules/UIState.sys.mjs
index 8981d81f7d..6a45130cb1 100644
--- a/services/sync/modules/UIState.sys.mjs
+++ b/services/sync/modules/UIState.sys.mjs
@@ -87,7 +87,7 @@ const UIStateInternal = {
this._initialized = false;
},
- observe(subject, topic, data) {
+ observe(subject, topic) {
switch (topic) {
case "weave:service:sync:start":
this.toggleSyncActivity(true);
diff --git a/services/sync/modules/bridged_engine.sys.mjs b/services/sync/modules/bridged_engine.sys.mjs
index 45e5f685cd..3e40a80505 100644
--- a/services/sync/modules/bridged_engine.sys.mjs
+++ b/services/sync/modules/bridged_engine.sys.mjs
@@ -43,7 +43,7 @@ class BridgedStore {
this._batchChunkSize = 500;
}
- async applyIncomingBatch(records, countTelemetry) {
+ async applyIncomingBatch(records) {
for (let chunk of lazy.PlacesUtils.chunkArray(
records,
this._batchChunkSize
@@ -145,7 +145,7 @@ class InterruptedError extends Error {
/**
* Adapts a `Log.sys.mjs` logger to a `mozIServicesLogSink`. This class is copied
- * from `SyncedBookmarksMirror.jsm`.
+ * from `SyncedBookmarksMirror.sys.mjs`.
*/
export class LogAdapter {
constructor(log) {
diff --git a/services/sync/modules/collection_validator.sys.mjs b/services/sync/modules/collection_validator.sys.mjs
index a64ede10e9..1f40110ca9 100644
--- a/services/sync/modules/collection_validator.sys.mjs
+++ b/services/sync/modules/collection_validator.sys.mjs
@@ -114,13 +114,13 @@ export class CollectionValidator {
// Return whether or not a server item should be present on the client. Expected
// to be overridden.
- clientUnderstands(item) {
+ clientUnderstands() {
return true;
}
// Return whether or not a client item should be present on the server. Expected
// to be overridden
- async syncedByClient(item) {
+ async syncedByClient() {
return true;
}
diff --git a/services/sync/modules/constants.sys.mjs b/services/sync/modules/constants.sys.mjs
index 35c0ac2f0b..958e3345e6 100644
--- a/services/sync/modules/constants.sys.mjs
+++ b/services/sync/modules/constants.sys.mjs
@@ -4,7 +4,7 @@
// Don't manually modify this line, as it is automatically replaced on merge day
// by the gecko_migration.py script.
-export const WEAVE_VERSION = "1.126.0";
+export const WEAVE_VERSION = "1.127.0";
// Sync Server API version that the client supports.
export const SYNC_API_VERSION = "1.5";
diff --git a/services/sync/modules/engines.sys.mjs b/services/sync/modules/engines.sys.mjs
index 0d490ac4b3..63b4c02cc5 100644
--- a/services/sync/modules/engines.sys.mjs
+++ b/services/sync/modules/engines.sys.mjs
@@ -113,12 +113,12 @@ Tracker.prototype = {
},
// Also unsupported.
- async addChangedID(id, when) {
+ async addChangedID() {
throw new TypeError("Can't add changed ID to this tracker");
},
// Ditto.
- async removeChangedID(...ids) {
+ async removeChangedID() {
throw new TypeError("Can't remove changed IDs from this tracker");
},
@@ -155,7 +155,7 @@ Tracker.prototype = {
// Override these in your subclasses.
onStart() {},
onStop() {},
- async observe(subject, topic, data) {},
+ async observe() {},
engineIsEnabled() {
if (!this.engine) {
@@ -437,7 +437,7 @@ Store.prototype = {
* @param record
* The store record to create an item from
*/
- async create(record) {
+ async create() {
throw new Error("override create in a subclass");
},
@@ -450,7 +450,7 @@ Store.prototype = {
* @param record
* The store record to delete an item from
*/
- async remove(record) {
+ async remove() {
throw new Error("override remove in a subclass");
},
@@ -463,7 +463,7 @@ Store.prototype = {
* @param record
* The record to use to update an item from
*/
- async update(record) {
+ async update() {
throw new Error("override update in a subclass");
},
@@ -477,7 +477,7 @@ Store.prototype = {
* string record ID
* @return boolean indicating whether record exists locally
*/
- async itemExists(id) {
+ async itemExists() {
throw new Error("override itemExists in a subclass");
},
@@ -495,7 +495,7 @@ Store.prototype = {
* constructor for the newly-created record.
* @return record type for this engine
*/
- async createRecord(id, collection) {
+ async createRecord() {
throw new Error("override createRecord in a subclass");
},
@@ -507,7 +507,7 @@ Store.prototype = {
* @param newID
* string new record ID
*/
- async changeItemID(oldID, newID) {
+ async changeItemID() {
throw new Error("override changeItemID in a subclass");
},
@@ -1040,7 +1040,7 @@ SyncEngine.prototype = {
* Note: Overriding engines must take resyncs into account -- score will not
* be cleared.
*/
- shouldSkipSync(syncReason) {
+ shouldSkipSync() {
return false;
},
@@ -1550,7 +1550,7 @@ SyncEngine.prototype = {
// Indicates whether an incoming item should be deleted from the server at
// the end of the sync. Engines can override this method to clean up records
// that shouldn't be on the server.
- _shouldDeleteRemotely(remoteItem) {
+ _shouldDeleteRemotely() {
return false;
},
@@ -1560,7 +1560,7 @@ SyncEngine.prototype = {
*
* @return GUID of the similar item; falsy otherwise
*/
- async _findDupe(item) {
+ async _findDupe() {
// By default, assume there's no dupe items for the engine
},
@@ -1568,7 +1568,7 @@ SyncEngine.prototype = {
* Called before a remote record is discarded due to failed reconciliation.
* Used by bookmark sync to merge folder child orders.
*/
- beforeRecordDiscard(localRecord, remoteRecord, remoteIsNewer) {},
+ beforeRecordDiscard() {},
// Called when the server has a record marked as deleted, but locally we've
// changed it more recently than the deletion. If we return false, the
@@ -1576,7 +1576,7 @@ SyncEngine.prototype = {
// record to the server -- any extra work that's needed as part of this
// process should be done at this point (such as mark the record's parent
// for reuploading in the case of bookmarks).
- async _shouldReviveRemotelyDeletedRecord(remoteItem) {
+ async _shouldReviveRemotelyDeletedRecord() {
return true;
},
@@ -1948,7 +1948,7 @@ SyncEngine.prototype = {
}
},
- async _onRecordsWritten(succeeded, failed, serverModifiedTime) {
+ async _onRecordsWritten() {
// Implement this method to take specific actions against successfully
// uploaded records and failed records.
},
diff --git a/services/sync/modules/engines/bookmarks.sys.mjs b/services/sync/modules/engines/bookmarks.sys.mjs
index 3c1396f67d..4995da6899 100644
--- a/services/sync/modules/engines/bookmarks.sys.mjs
+++ b/services/sync/modules/engines/bookmarks.sys.mjs
@@ -513,7 +513,7 @@ BookmarksEngine.prototype = {
await this._apply();
},
- async _reconcile(item) {
+ async _reconcile() {
return true;
},
@@ -752,7 +752,7 @@ BookmarksStore.prototype = {
});
},
- async applyIncomingBatch(records, countTelemetry) {
+ async applyIncomingBatch(records) {
let buf = await this.ensureOpenMirror();
for (let chunk of lazy.PlacesUtils.chunkArray(
records,
@@ -921,11 +921,11 @@ Object.setPrototypeOf(BookmarksTracker.prototype, Tracker.prototype);
class BookmarksChangeset extends Changeset {
// Only `_reconcile` calls `getModifiedTimestamp` and `has`, and the engine
// does its own reconciliation.
- getModifiedTimestamp(id) {
+ getModifiedTimestamp() {
throw new Error("Don't use timestamps to resolve bookmark conflicts");
}
- has(id) {
+ has() {
throw new Error("Don't use the changeset to resolve bookmark conflicts");
}
diff --git a/services/sync/modules/engines/clients.sys.mjs b/services/sync/modules/engines/clients.sys.mjs
index eda92bd75b..cb391982e0 100644
--- a/services/sync/modules/engines/clients.sys.mjs
+++ b/services/sync/modules/engines/clients.sys.mjs
@@ -1107,7 +1107,7 @@ ClientsTracker.prototype = {
Svc.Obs.remove("fxaccounts:new_device_id", this.asyncObserver);
},
- async observe(subject, topic, data) {
+ async observe(subject, topic) {
switch (topic) {
case "nsPref:changed":
this._log.debug("client.name preference changed");
diff --git a/services/sync/modules/engines/extension-storage.sys.mjs b/services/sync/modules/engines/extension-storage.sys.mjs
index d2671978c8..693d94f647 100644
--- a/services/sync/modules/engines/extension-storage.sys.mjs
+++ b/services/sync/modules/engines/extension-storage.sys.mjs
@@ -124,7 +124,7 @@ ExtensionStorageEngineBridge.prototype = {
},
_takeMigrationInfo() {
- return new Promise((resolve, reject) => {
+ return new Promise(resolve => {
this.component
.QueryInterface(Ci.mozIExtensionStorageArea)
.takeMigrationInfo({
@@ -291,7 +291,7 @@ ExtensionStorageTracker.prototype = {
lazy.Svc.Obs.remove("ext.storage.sync-changed", this.asyncObserver);
},
- async observe(subject, topic, data) {
+ async observe(subject, topic) {
if (this.ignoreAll) {
return;
}
diff --git a/services/sync/modules/engines/forms.sys.mjs b/services/sync/modules/engines/forms.sys.mjs
index 3516327659..0d63eb96d1 100644
--- a/services/sync/modules/engines/forms.sys.mjs
+++ b/services/sync/modules/engines/forms.sys.mjs
@@ -189,7 +189,7 @@ FormStore.prototype = {
await this._processChange(change);
},
- async update(record) {
+ async update() {
this._log.trace("Ignoring form record update request!");
},
diff --git a/services/sync/modules/engines/prefs.sys.mjs b/services/sync/modules/engines/prefs.sys.mjs
index f29a9e7b59..cb494ec70e 100644
--- a/services/sync/modules/engines/prefs.sys.mjs
+++ b/services/sync/modules/engines/prefs.sys.mjs
@@ -386,7 +386,7 @@ PrefStore.prototype = {
return allprefs;
},
- async changeItemID(oldID, newID) {
+ async changeItemID() {
this._log.trace("PrefStore GUID is constant!");
},
@@ -406,11 +406,11 @@ PrefStore.prototype = {
return record;
},
- async create(record) {
+ async create() {
this._log.trace("Ignoring create request");
},
- async remove(record) {
+ async remove() {
this._log.trace("Ignoring remove request");
},
diff --git a/services/sync/modules/engines/tabs.sys.mjs b/services/sync/modules/engines/tabs.sys.mjs
index 861e051d1a..93747665f2 100644
--- a/services/sync/modules/engines/tabs.sys.mjs
+++ b/services/sync/modules/engines/tabs.sys.mjs
@@ -430,7 +430,7 @@ export const TabProvider = {
.then(iconData => {
thisTab.icon = iconData.uri.spec;
})
- .catch(ex => {
+ .catch(() => {
log.trace(
`Failed to fetch favicon for ${url}`,
thisTab.urlHistory[0]
@@ -503,7 +503,7 @@ TabTracker.prototype = {
}
},
- async observe(subject, topic, data) {
+ async observe(subject, topic) {
switch (topic) {
case "domwindowopened":
let onLoad = () => {
diff --git a/services/sync/modules/record.sys.mjs b/services/sync/modules/record.sys.mjs
index 7d5918a8ca..f8580cfbd4 100644
--- a/services/sync/modules/record.sys.mjs
+++ b/services/sync/modules/record.sys.mjs
@@ -182,7 +182,7 @@ RawCryptoWrapper.prototype = {
* @param {Cleartext} outgoingCleartext The cleartext to upload.
* @returns {String} The serialized cleartext.
*/
- transformBeforeEncrypt(outgoingCleartext) {
+ transformBeforeEncrypt() {
throw new TypeError("Override to stringify outgoing records");
},
@@ -194,7 +194,7 @@ RawCryptoWrapper.prototype = {
* @param {String} incomingCleartext The decrypted cleartext string.
* @returns {Cleartext} The parsed cleartext.
*/
- transformAfterDecrypt(incomingCleartext) {
+ transformAfterDecrypt() {
throw new TypeError("Override to parse incoming records");
},
@@ -527,7 +527,7 @@ CollectionKeyManager.prototype = {
/**
* Create a WBO for the current keys.
*/
- asWBO(collection, id) {
+ asWBO() {
return this._makeWBO(this._collections, this._default);
},
diff --git a/services/sync/modules/sync_auth.sys.mjs b/services/sync/modules/sync_auth.sys.mjs
index 6b8da4061c..cfa76827d5 100644
--- a/services/sync/modules/sync_auth.sys.mjs
+++ b/services/sync/modules/sync_auth.sys.mjs
@@ -164,7 +164,7 @@ SyncAuthManager.prototype = {
this._token = null;
},
- async observe(subject, topic, data) {
+ async observe(subject, topic) {
this._log.debug("observed " + topic);
if (!this.username) {
this._log.info("Sync is not configured, so ignoring the notification");
@@ -276,7 +276,7 @@ SyncAuthManager.prototype = {
* allows us to avoid a network request for when we actually need the
* migration info.
*/
- prefetchMigrationSentinel(service) {
+ prefetchMigrationSentinel() {
// nothing to do here until we decide to migrate away from FxA.
},
@@ -387,22 +387,28 @@ SyncAuthManager.prototype = {
// Do the token dance, with a retry in case of transient auth failure.
// We need to prove that we know the sync key in order to get a token
// from the tokenserver.
- let getToken = async key => {
+ let getToken = async (key, accessToken) => {
this._log.info("Getting a sync token from", this._tokenServerUrl);
- let token = await this._fetchTokenUsingOAuth(key);
+ let token = await this._fetchTokenUsingOAuth(key, accessToken);
this._log.trace("Successfully got a token");
return token;
};
+ const ttl = fxAccountsCommon.OAUTH_TOKEN_FOR_SYNC_LIFETIME_SECONDS;
try {
let token, key;
try {
this._log.info("Getting sync key");
- key = await fxa.keys.getKeyForScope(SCOPE_OLD_SYNC);
+ const tokenAndKey = await fxa.getOAuthTokenAndKey({
+ scope: SCOPE_OLD_SYNC,
+ ttl,
+ });
+
+ key = tokenAndKey.key;
if (!key) {
throw new Error("browser does not have the sync key, cannot sync");
}
- token = await getToken(key);
+ token = await getToken(key, tokenAndKey.token);
} catch (err) {
// If we get a 401 fetching the token it may be that our auth tokens needed
// to be regenerated; retry exactly once.
@@ -412,8 +418,11 @@ SyncAuthManager.prototype = {
this._log.warn(
"Token server returned 401, retrying token fetch with fresh credentials"
);
- key = await fxa.keys.getKeyForScope(SCOPE_OLD_SYNC);
- token = await getToken(key);
+ const tokenAndKey = await fxa.getOAuthTokenAndKey({
+ scope: SCOPE_OLD_SYNC,
+ ttl,
+ });
+ token = await getToken(tokenAndKey.key, tokenAndKey.token);
}
// TODO: Make it be only 80% of the duration, so refresh the token
// before it actually expires. This is to avoid sync storage errors
@@ -437,7 +446,7 @@ SyncAuthManager.prototype = {
// A hawkclient error.
} else if (err.code && err.code === 401) {
err = new AuthenticationError(err, "hawkclient");
- // An FxAccounts.jsm error.
+ // An FxAccounts.sys.mjs error.
} else if (err.message == fxAccountsCommon.ERROR_AUTH_ERROR) {
err = new AuthenticationError(err, "fxaccounts");
}
@@ -460,17 +469,13 @@ SyncAuthManager.prototype = {
},
/**
- * Generates an OAuth access_token using the OLD_SYNC scope and exchanges it
- * for a TokenServer token.
- *
+ * Exchanges an OAuth access_token for a TokenServer token.
* @returns {Promise}
* @private
*/
- async _fetchTokenUsingOAuth(key) {
+ async _fetchTokenUsingOAuth(key, accessToken) {
this._log.debug("Getting a token using OAuth");
const fxa = this._fxaService;
- const ttl = fxAccountsCommon.OAUTH_TOKEN_FOR_SYNC_LIFETIME_SECONDS;
- const accessToken = await fxa.getOAuthToken({ scope: SCOPE_OLD_SYNC, ttl });
const headers = {
"X-KeyId": key.kid,
};
diff --git a/services/sync/modules/telemetry.sys.mjs b/services/sync/modules/telemetry.sys.mjs
index c08f405b0e..28888ef277 100644
--- a/services/sync/modules/telemetry.sys.mjs
+++ b/services/sync/modules/telemetry.sys.mjs
@@ -241,10 +241,14 @@ export class ErrorSanitizer {
NotAllowedError: this.E_PERMISSION_DENIED,
};
+ // IOUtils error messages include the specific nsresult error code that caused them.
+ static NS_ERROR_RE = new RegExp(/ \(NS_ERROR_.*\)$/);
+
static #cleanOSErrorMessage(message, error = undefined) {
if (DOMException.isInstance(error)) {
const sub = this.DOMErrorSubstitutions[error.name];
message = message.replaceAll("\\", "/");
+ message = message.replace(this.NS_ERROR_RE, "");
if (sub) {
return `${sub} ${message}`;
}
diff --git a/services/sync/tests/tps/.eslintrc.js b/services/sync/tests/tps/.eslintrc.js
index 182e87933b..63c8344934 100644
--- a/services/sync/tests/tps/.eslintrc.js
+++ b/services/sync/tests/tps/.eslintrc.js
@@ -2,7 +2,7 @@
module.exports = {
globals: {
- // Injected into tests via tps.jsm
+ // Injected into tests via tps.sys.mjs
Addons: false,
Addresses: false,
Bookmarks: false,
diff --git a/services/sync/tests/unit/head_helpers.js b/services/sync/tests/unit/head_helpers.js
index e79e55e57f..865117e5d2 100644
--- a/services/sync/tests/unit/head_helpers.js
+++ b/services/sync/tests/unit/head_helpers.js
@@ -530,8 +530,8 @@ async function sync_engine_and_validate_telem(
// Returns a promise that resolves once the specified observer notification
// has fired.
-function promiseOneObserver(topic, callback) {
- return new Promise((resolve, reject) => {
+function promiseOneObserver(topic) {
+ return new Promise(resolve => {
let observer = function (subject, data) {
Svc.Obs.remove(topic, observer);
resolve({ subject, data });
diff --git a/services/sync/tests/unit/head_http_server.js b/services/sync/tests/unit/head_http_server.js
index 84dbb33951..d8603465c1 100644
--- a/services/sync/tests/unit/head_http_server.js
+++ b/services/sync/tests/unit/head_http_server.js
@@ -687,8 +687,8 @@ function track_collections_helper() {
* prototype, and override as appropriate.
*/
var SyncServerCallback = {
- onCollectionDeleted: function onCollectionDeleted(user, collection) {},
- onItemDeleted: function onItemDeleted(user, collection, wboID) {},
+ onCollectionDeleted: function onCollectionDeleted() {},
+ onItemDeleted: function onItemDeleted() {},
/**
* Called at the top of every request.
@@ -699,7 +699,7 @@ var SyncServerCallback = {
* must be taken to not screw with the response body or headers that may
* conflict with normal operation of this server.
*/
- onRequest: function onRequest(request, response) {},
+ onRequest: function onRequest() {},
};
/**
diff --git a/services/sync/tests/unit/test_addon_utils.js b/services/sync/tests/unit/test_addon_utils.js
index c039bee16c..e9e49fb9ea 100644
--- a/services/sync/tests/unit/test_addon_utils.js
+++ b/services/sync/tests/unit/test_addon_utils.js
@@ -119,7 +119,7 @@ add_task(async function test_source_uri_rewrite() {
let installCalled = false;
Object.getPrototypeOf(AddonUtils).installAddonFromSearchResult =
- async function testInstallAddon(addon, metadata) {
+ async function testInstallAddon(addon) {
Assert.equal(
SERVER_ADDRESS + "/require.xpi?src=sync",
addon.sourceURI.spec
diff --git a/services/sync/tests/unit/test_addons_validator.js b/services/sync/tests/unit/test_addons_validator.js
index 60f2f8bf43..91f3f7b31b 100644
--- a/services/sync/tests/unit/test_addons_validator.js
+++ b/services/sync/tests/unit/test_addons_validator.js
@@ -49,7 +49,7 @@ function getDummyServerAndClient() {
add_task(async function test_valid() {
let { server, client } = getDummyServerAndClient();
let validator = new AddonValidator({
- _findDupe(item) {
+ _findDupe() {
return null;
},
isAddonSyncable(item) {
diff --git a/services/sync/tests/unit/test_bookmark_engine.js b/services/sync/tests/unit/test_bookmark_engine.js
index 6274a6b836..2f5ac9dcd3 100644
--- a/services/sync/tests/unit/test_bookmark_engine.js
+++ b/services/sync/tests/unit/test_bookmark_engine.js
@@ -940,7 +940,7 @@ add_bookmark_test(async function test_sync_dateAdded(engine) {
// Make sure it's within 24 hours of the right timestamp... This is a little
// dodgey but we only really care that it's basically accurate and has the
// right day.
- ok(Math.abs(Date.now() - record3.dateAdded) < 24 * 60 * 60 * 1000);
+ Assert.less(Math.abs(Date.now() - record3.dateAdded), 24 * 60 * 60 * 1000);
let record4 = await store.createRecord(item4GUID);
equal(
diff --git a/services/sync/tests/unit/test_bookmark_tracker.js b/services/sync/tests/unit/test_bookmark_tracker.js
index 9cfbb4de78..8c26232cd5 100644
--- a/services/sync/tests/unit/test_bookmark_tracker.js
+++ b/services/sync/tests/unit/test_bookmark_tracker.js
@@ -54,8 +54,16 @@ async function verifyTrackedItems(tracked) {
let trackedIDs = new Set(Object.keys(changedIDs));
for (let guid of tracked) {
ok(guid in changedIDs, `${guid} should be tracked`);
- ok(changedIDs[guid].modified > 0, `${guid} should have a modified time`);
- ok(changedIDs[guid].counter >= -1, `${guid} should have a change counter`);
+ Assert.greater(
+ changedIDs[guid].modified,
+ 0,
+ `${guid} should have a modified time`
+ );
+ Assert.greaterOrEqual(
+ changedIDs[guid].counter,
+ -1,
+ `${guid} should have a change counter`
+ );
trackedIDs.delete(guid);
}
equal(
@@ -770,7 +778,7 @@ add_task(async function test_onFaviconChanged() {
iconURI,
true,
PlacesUtils.favicons.FAVICON_LOAD_NON_PRIVATE,
- (uri, dataLen, data, mimeType) => {
+ () => {
resolve();
},
Services.scriptSecurityManager.getSystemPrincipal()
diff --git a/services/sync/tests/unit/test_clients_engine.js b/services/sync/tests/unit/test_clients_engine.js
index d910a67503..9a5115db4e 100644
--- a/services/sync/tests/unit/test_clients_engine.js
+++ b/services/sync/tests/unit/test_clients_engine.js
@@ -125,7 +125,7 @@ add_task(async function test_bad_hmac() {
check_clients_count(0);
await syncClientsEngine(server);
check_clients_count(1);
- ok(engine.lastRecordUpload > 0);
+ Assert.greater(engine.lastRecordUpload, 0);
ok(!engine.isFirstSync);
// Our uploaded record has a version.
@@ -275,7 +275,7 @@ add_task(async function test_full_sync() {
strictEqual(engine.lastRecordUpload, 0);
ok(engine.isFirstSync);
await syncClientsEngine(server);
- ok(engine.lastRecordUpload > 0);
+ Assert.greater(engine.lastRecordUpload, 0);
ok(!engine.isFirstSync);
deepEqual(
user.collection("clients").keys().sort(),
@@ -333,7 +333,7 @@ add_task(async function test_sync() {
ok(engine.isFirstSync);
await syncClientsEngine(server);
ok(!!clientWBO().payload);
- ok(engine.lastRecordUpload > 0);
+ Assert.greater(engine.lastRecordUpload, 0);
ok(!engine.isFirstSync);
_(
@@ -344,7 +344,7 @@ add_task(async function test_sync() {
clientWBO().payload = undefined;
await syncClientsEngine(server);
ok(!!clientWBO().payload);
- ok(engine.lastRecordUpload > lastweek);
+ Assert.greater(engine.lastRecordUpload, lastweek);
ok(!engine.isFirstSync);
_("Remove client record.");
@@ -394,8 +394,8 @@ add_task(async function test_client_name_change() {
changedIDs = await tracker.getChangedIDs();
equal(Object.keys(changedIDs).length, 1);
ok(engine.localID in changedIDs);
- ok(tracker.score > initialScore);
- ok(tracker.score >= SCORE_INCREMENT_XLARGE);
+ Assert.greater(tracker.score, initialScore);
+ Assert.greaterOrEqual(tracker.score, SCORE_INCREMENT_XLARGE);
await tracker.stop();
@@ -425,8 +425,8 @@ add_task(async function test_fxa_device_id_change() {
changedIDs = await tracker.getChangedIDs();
equal(Object.keys(changedIDs).length, 1);
ok(engine.localID in changedIDs);
- ok(tracker.score > initialScore);
- ok(tracker.score >= SINGLE_USER_THRESHOLD);
+ Assert.greater(tracker.score, initialScore);
+ Assert.greaterOrEqual(tracker.score, SINGLE_USER_THRESHOLD);
await tracker.stop();
@@ -477,7 +477,10 @@ add_task(async function test_last_modified() {
await engine._uploadOutgoing();
_("Local record should have updated timestamp");
- ok(engine._store._remoteClients[activeID].serverLastModified >= now);
+ Assert.greaterOrEqual(
+ engine._store._remoteClients[activeID].serverLastModified,
+ now
+ );
_("Record on the server should have new name but not serverLastModified");
let payload = collection.cleartext(activeID);
@@ -732,7 +735,7 @@ add_task(async function test_filter_duplicate_names() {
strictEqual(engine.lastRecordUpload, 0);
ok(engine.isFirstSync);
await syncClientsEngine(server);
- ok(engine.lastRecordUpload > 0);
+ Assert.greater(engine.lastRecordUpload, 0);
ok(!engine.isFirstSync);
deepEqual(
user.collection("clients").keys().sort(),
@@ -776,7 +779,7 @@ add_task(async function test_filter_duplicate_names() {
// Check that a subsequent Sync doesn't report anything as being processed.
let counts;
- Svc.Obs.add("weave:engine:sync:applied", function observe(subject, data) {
+ Svc.Obs.add("weave:engine:sync:applied", function observe(subject) {
Svc.Obs.remove("weave:engine:sync:applied", observe);
counts = subject;
});
@@ -915,7 +918,7 @@ add_task(async function test_command_sync() {
_("Checking record was uploaded.");
notEqual(clientWBO(engine.localID).payload, undefined);
- ok(engine.lastRecordUpload > 0);
+ Assert.greater(engine.lastRecordUpload, 0);
ok(!engine.isFirstSync);
notEqual(clientWBO(remoteId).payload, undefined);
diff --git a/services/sync/tests/unit/test_declined.js b/services/sync/tests/unit/test_declined.js
index af7f8eb8c5..aecd33ee6e 100644
--- a/services/sync/tests/unit/test_declined.js
+++ b/services/sync/tests/unit/test_declined.js
@@ -79,7 +79,7 @@ add_task(async function testOldMeta() {
let declinedEngines = new DeclinedEngines(Service);
- function onNotDeclined(subject, topic, data) {
+ function onNotDeclined(subject) {
Observers.remove("weave:engines:notdeclined", onNotDeclined);
Assert.ok(
subject.undecided.has("actual"),
@@ -129,7 +129,7 @@ add_task(async function testDeclinedMeta() {
let declinedEngines = new DeclinedEngines(Service);
- function onNotDeclined(subject, topic, data) {
+ function onNotDeclined(subject) {
Observers.remove("weave:engines:notdeclined", onNotDeclined);
Assert.ok(
subject.undecided.has("actual"),
diff --git a/services/sync/tests/unit/test_engine_abort.js b/services/sync/tests/unit/test_engine_abort.js
index f9bbf9d338..a7c62afb4a 100644
--- a/services/sync/tests/unit/test_engine_abort.js
+++ b/services/sync/tests/unit/test_engine_abort.js
@@ -37,7 +37,7 @@ add_task(async function test_processIncoming_abort() {
);
meta_global.payload.engines = { rotary: { version: engine.version, syncID } };
_("Fake applyIncoming to abort.");
- engine._store.applyIncoming = async function (record) {
+ engine._store.applyIncoming = async function () {
let ex = {
code: SyncEngine.prototype.eEngineAbortApplyIncoming,
cause: "Nooo",
diff --git a/services/sync/tests/unit/test_errorhandler_1.js b/services/sync/tests/unit/test_errorhandler_1.js
index 2d52b93a02..f5e96ed44e 100644
--- a/services/sync/tests/unit/test_errorhandler_1.js
+++ b/services/sync/tests/unit/test_errorhandler_1.js
@@ -286,13 +286,10 @@ add_task(async function test_info_collections_login_server_maintenance_error() {
await configureIdentity({ username: "broken.info" }, server);
let backoffInterval;
- Svc.Obs.add(
- "weave:service:backoff:interval",
- function observe(subject, data) {
- Svc.Obs.remove("weave:service:backoff:interval", observe);
- backoffInterval = subject;
- }
- );
+ Svc.Obs.add("weave:service:backoff:interval", function observe(subject) {
+ Svc.Obs.remove("weave:service:backoff:interval", observe);
+ backoffInterval = subject;
+ });
Assert.ok(!Status.enforceBackoff);
Assert.equal(Status.service, STATUS_OK);
@@ -318,13 +315,10 @@ add_task(async function test_meta_global_login_server_maintenance_error() {
await configureIdentity({ username: "broken.meta" }, server);
let backoffInterval;
- Svc.Obs.add(
- "weave:service:backoff:interval",
- function observe(subject, data) {
- Svc.Obs.remove("weave:service:backoff:interval", observe);
- backoffInterval = subject;
- }
- );
+ Svc.Obs.add("weave:service:backoff:interval", function observe(subject) {
+ Svc.Obs.remove("weave:service:backoff:interval", observe);
+ backoffInterval = subject;
+ });
Assert.ok(!Status.enforceBackoff);
Assert.equal(Status.service, STATUS_OK);
diff --git a/services/sync/tests/unit/test_errorhandler_2.js b/services/sync/tests/unit/test_errorhandler_2.js
index 5cab4d832d..2a7e6ba619 100644
--- a/services/sync/tests/unit/test_errorhandler_2.js
+++ b/services/sync/tests/unit/test_errorhandler_2.js
@@ -74,13 +74,10 @@ add_task(async function test_crypto_keys_login_server_maintenance_error() {
Service.collectionKeys.clear();
let backoffInterval;
- Svc.Obs.add(
- "weave:service:backoff:interval",
- function observe(subject, data) {
- Svc.Obs.remove("weave:service:backoff:interval", observe);
- backoffInterval = subject;
- }
- );
+ Svc.Obs.add("weave:service:backoff:interval", function observe(subject) {
+ Svc.Obs.remove("weave:service:backoff:interval", observe);
+ backoffInterval = subject;
+ });
Assert.ok(!Status.enforceBackoff);
Assert.equal(Status.service, STATUS_OK);
@@ -178,13 +175,10 @@ add_task(
await configureIdentity({ username: "broken.info" }, server);
let backoffInterval;
- Svc.Obs.add(
- "weave:service:backoff:interval",
- function observe(subject, data) {
- Svc.Obs.remove("weave:service:backoff:interval", observe);
- backoffInterval = subject;
- }
- );
+ Svc.Obs.add("weave:service:backoff:interval", function observe(subject) {
+ Svc.Obs.remove("weave:service:backoff:interval", observe);
+ backoffInterval = subject;
+ });
Assert.ok(!Status.enforceBackoff);
Assert.equal(Status.service, STATUS_OK);
@@ -215,13 +209,10 @@ add_task(
await configureIdentity({ username: "broken.meta" }, server);
let backoffInterval;
- Svc.Obs.add(
- "weave:service:backoff:interval",
- function observe(subject, data) {
- Svc.Obs.remove("weave:service:backoff:interval", observe);
- backoffInterval = subject;
- }
- );
+ Svc.Obs.add("weave:service:backoff:interval", function observe(subject) {
+ Svc.Obs.remove("weave:service:backoff:interval", observe);
+ backoffInterval = subject;
+ });
Assert.ok(!Status.enforceBackoff);
Assert.equal(Status.service, STATUS_OK);
@@ -254,13 +245,10 @@ add_task(
Service.collectionKeys.clear();
let backoffInterval;
- Svc.Obs.add(
- "weave:service:backoff:interval",
- function observe(subject, data) {
- Svc.Obs.remove("weave:service:backoff:interval", observe);
- backoffInterval = subject;
- }
- );
+ Svc.Obs.add("weave:service:backoff:interval", function observe(subject) {
+ Svc.Obs.remove("weave:service:backoff:interval", observe);
+ backoffInterval = subject;
+ });
Assert.ok(!Status.enforceBackoff);
Assert.equal(Status.service, STATUS_OK);
@@ -291,13 +279,10 @@ add_task(
await configureIdentity({ username: "broken.keys" }, server);
let backoffInterval;
- Svc.Obs.add(
- "weave:service:backoff:interval",
- function observe(subject, data) {
- Svc.Obs.remove("weave:service:backoff:interval", observe);
- backoffInterval = subject;
- }
- );
+ Svc.Obs.add("weave:service:backoff:interval", function observe(subject) {
+ Svc.Obs.remove("weave:service:backoff:interval", observe);
+ backoffInterval = subject;
+ });
Assert.ok(!Status.enforceBackoff);
Assert.equal(Status.service, STATUS_OK);
@@ -328,13 +313,10 @@ add_task(
await configureIdentity({ username: "broken.wipe" }, server);
let backoffInterval;
- Svc.Obs.add(
- "weave:service:backoff:interval",
- function observe(subject, data) {
- Svc.Obs.remove("weave:service:backoff:interval", observe);
- backoffInterval = subject;
- }
- );
+ Svc.Obs.add("weave:service:backoff:interval", function observe(subject) {
+ Svc.Obs.remove("weave:service:backoff:interval", observe);
+ backoffInterval = subject;
+ });
Assert.ok(!Status.enforceBackoff);
Assert.equal(Status.service, STATUS_OK);
@@ -368,13 +350,10 @@ add_task(
engine.enabled = true;
let backoffInterval;
- Svc.Obs.add(
- "weave:service:backoff:interval",
- function observe(subject, data) {
- Svc.Obs.remove("weave:service:backoff:interval", observe);
- backoffInterval = subject;
- }
- );
+ Svc.Obs.add("weave:service:backoff:interval", function observe(subject) {
+ Svc.Obs.remove("weave:service:backoff:interval", observe);
+ backoffInterval = subject;
+ });
Assert.ok(!Status.enforceBackoff);
Assert.equal(Status.service, STATUS_OK);
diff --git a/services/sync/tests/unit/test_errorhandler_filelog.js b/services/sync/tests/unit/test_errorhandler_filelog.js
index 66260b3f59..357049276c 100644
--- a/services/sync/tests/unit/test_errorhandler_filelog.js
+++ b/services/sync/tests/unit/test_errorhandler_filelog.js
@@ -82,7 +82,7 @@ function readFile(file, callback) {
uri: NetUtil.newURI(file),
loadUsingSystemPrincipal: true,
},
- function (inputStream, statusCode, request) {
+ function (inputStream, statusCode) {
let data = NetUtil.readInputStreamToString(
inputStream,
inputStream.available()
diff --git a/services/sync/tests/unit/test_fxa_node_reassignment.js b/services/sync/tests/unit/test_fxa_node_reassignment.js
index 0b25df0183..0fa7ee922c 100644
--- a/services/sync/tests/unit/test_fxa_node_reassignment.js
+++ b/services/sync/tests/unit/test_fxa_node_reassignment.js
@@ -46,7 +46,7 @@ function prepareServer(cbAfterTokenFetch) {
// A server callback to ensure we don't accidentally hit the wrong endpoint
// after a node reassignment.
let callback = {
- onRequest(req, resp) {
+ onRequest(req) {
let full = `${req.scheme}://${req.host}:${req.port}${req.path}`;
let expected = config.fxaccount.token.endpoint;
Assert.ok(
diff --git a/services/sync/tests/unit/test_history_engine.js b/services/sync/tests/unit/test_history_engine.js
index 9cca379b0b..259338df09 100644
--- a/services/sync/tests/unit/test_history_engine.js
+++ b/services/sync/tests/unit/test_history_engine.js
@@ -16,13 +16,13 @@ XPCOMUtils.defineLazyServiceGetter(
"mozIAsyncHistory"
);
async function rawAddVisit(id, uri, visitPRTime, transitionType) {
- return new Promise((resolve, reject) => {
+ return new Promise(resolve => {
let results = [];
let handler = {
handleResult(result) {
results.push(result);
},
- handleError(resultCode, placeInfo) {
+ handleError(resultCode) {
do_throw(`updatePlaces gave error ${resultCode}!`);
},
handleCompletion(count) {
diff --git a/services/sync/tests/unit/test_history_store.js b/services/sync/tests/unit/test_history_store.js
index 07aee0dd01..1777664bb6 100644
--- a/services/sync/tests/unit/test_history_store.js
+++ b/services/sync/tests/unit/test_history_store.js
@@ -17,7 +17,7 @@ const TIMESTAMP3 = (Date.now() - 123894) * 1000;
function promiseOnVisitObserved() {
return new Promise(res => {
- let listener = new PlacesWeakCallbackWrapper(events => {
+ let listener = new PlacesWeakCallbackWrapper(() => {
PlacesObservers.removeListener(["page-visited"], listener);
res();
});
diff --git a/services/sync/tests/unit/test_history_tracker.js b/services/sync/tests/unit/test_history_tracker.js
index 6f351d6984..64433574b4 100644
--- a/services/sync/tests/unit/test_history_tracker.js
+++ b/services/sync/tests/unit/test_history_tracker.js
@@ -37,7 +37,7 @@ async function verifyTrackedItems(tracked) {
let trackedIDs = new Set(Object.keys(changes));
for (let guid of tracked) {
ok(guid in changes, `${guid} should be tracked`);
- ok(changes[guid] > 0, `${guid} should have a modified time`);
+ Assert.greater(changes[guid], 0, `${guid} should have a modified time`);
trackedIDs.delete(guid);
}
equal(
@@ -160,7 +160,7 @@ add_task(async function test_dont_track_expiration() {
let scorePromise = promiseOneObserver("weave:engine:score:updated");
// Observe expiration.
- Services.obs.addObserver(function onExpiration(aSubject, aTopic, aData) {
+ Services.obs.addObserver(function onExpiration(aSubject, aTopic) {
Services.obs.removeObserver(onExpiration, aTopic);
// Remove the remaining page to update its score.
PlacesUtils.history.remove(uriToRemove);
diff --git a/services/sync/tests/unit/test_hmac_error.js b/services/sync/tests/unit/test_hmac_error.js
index 26dbc12dea..a04e54f476 100644
--- a/services/sync/tests/unit/test_hmac_error.js
+++ b/services/sync/tests/unit/test_hmac_error.js
@@ -171,7 +171,7 @@ add_task(async function hmac_error_during_node_reassignment() {
}
let onSyncFinished = function () {};
let obs = {
- observe: function observe(subject, topic, data) {
+ observe: function observe(subject, topic) {
switch (topic) {
case "weave:service:sync:error":
onSyncError();
diff --git a/services/sync/tests/unit/test_httpd_sync_server.js b/services/sync/tests/unit/test_httpd_sync_server.js
index 6ac8ff5e04..23bb05b15d 100644
--- a/services/sync/tests/unit/test_httpd_sync_server.js
+++ b/services/sync/tests/unit/test_httpd_sync_server.js
@@ -160,7 +160,7 @@ add_task(async function test_storage_request() {
async function deleteWBONotExists() {
let req = localRequest(server, keysURL);
- server.callback.onItemDeleted = function (username, collection, wboID) {
+ server.callback.onItemDeleted = function () {
do_throw("onItemDeleted should not have been called.");
};
diff --git a/services/sync/tests/unit/test_interval_triggers.js b/services/sync/tests/unit/test_interval_triggers.js
index 6f2821ec45..eb0b39f636 100644
--- a/services/sync/tests/unit/test_interval_triggers.js
+++ b/services/sync/tests/unit/test_interval_triggers.js
@@ -51,7 +51,7 @@ add_task(async function setup() {
// Don't remove stale clients when syncing. This is a test-only workaround
// that lets us add clients directly to the store, without losing them on
// the next sync.
- clientsEngine._removeRemoteClient = async id => {};
+ clientsEngine._removeRemoteClient = async () => {};
});
add_task(async function test_successful_sync_adjustSyncInterval() {
diff --git a/services/sync/tests/unit/test_password_engine.js b/services/sync/tests/unit/test_password_engine.js
index 081403f63d..54fe8972f2 100644
--- a/services/sync/tests/unit/test_password_engine.js
+++ b/services/sync/tests/unit/test_password_engine.js
@@ -434,8 +434,9 @@ add_task(async function test_sync_outgoing() {
equal(deletedLogin.guid, guid, "deleted login guid");
equal(deletedLogin.everSynced, true, "deleted login everSynced");
equal(deletedLogin.syncCounter, 0, "deleted login syncCounter");
- ok(
- deletedLogin.timePasswordChanged > 0,
+ Assert.greater(
+ deletedLogin.timePasswordChanged,
+ 0,
"deleted login timePasswordChanged"
);
} finally {
@@ -525,7 +526,7 @@ add_task(async function test_sync_incoming() {
checkFields.forEach(field => {
equal(logins[0][field], details[field]);
});
- ok(logins[0].timePasswordChanged > details.timePasswordChanged);
+ Assert.greater(logins[0].timePasswordChanged, details.timePasswordChanged);
equal(logins[0].syncCounter, 0);
equal(logins[0].everSynced, true);
@@ -553,7 +554,7 @@ add_task(async function test_sync_incoming() {
checkFields.forEach(field => {
equal(logins[0][field], details[field]);
});
- ok(logins[0].timePasswordChanged > details.timePasswordChanged);
+ Assert.greater(logins[0].timePasswordChanged, details.timePasswordChanged);
equal(logins[0].syncCounter, 0);
equal(logins[0].everSynced, true);
diff --git a/services/sync/tests/unit/test_resource.js b/services/sync/tests/unit/test_resource.js
index 5182784639..5dee57b39a 100644
--- a/services/sync/tests/unit/test_resource.js
+++ b/services/sync/tests/unit/test_resource.js
@@ -480,7 +480,7 @@ add_task(async function test_post_override_content_type() {
add_task(async function test_weave_backoff() {
_("X-Weave-Backoff header notifies observer");
let backoffInterval;
- function onBackoff(subject, data) {
+ function onBackoff(subject) {
backoffInterval = subject;
}
Observers.add("weave:service:backoff:interval", onBackoff);
diff --git a/services/sync/tests/unit/test_service_sync_401.js b/services/sync/tests/unit/test_service_sync_401.js
index a0bde0b0ab..0c285872e9 100644
--- a/services/sync/tests/unit/test_service_sync_401.js
+++ b/services/sync/tests/unit/test_service_sync_401.js
@@ -48,7 +48,7 @@ add_task(async function run_test() {
Svc.PrefBranch.setIntPref("lastPing", Math.floor(Date.now() / 1000));
let threw = false;
- Svc.Obs.add("weave:service:sync:error", function (subject, data) {
+ Svc.Obs.add("weave:service:sync:error", function () {
threw = true;
});
diff --git a/services/sync/tests/unit/test_service_verifyLogin.js b/services/sync/tests/unit/test_service_verifyLogin.js
index b99b5c692c..b3afe6179a 100644
--- a/services/sync/tests/unit/test_service_verifyLogin.js
+++ b/services/sync/tests/unit/test_service_verifyLogin.js
@@ -78,13 +78,10 @@ add_task(async function test_verifyLogin() {
Service._updateCachedURLs();
Assert.ok(!Service.status.enforceBackoff);
let backoffInterval;
- Svc.Obs.add(
- "weave:service:backoff:interval",
- function observe(subject, data) {
- Svc.Obs.remove("weave:service:backoff:interval", observe);
- backoffInterval = subject;
- }
- );
+ Svc.Obs.add("weave:service:backoff:interval", function observe(subject) {
+ Svc.Obs.remove("weave:service:backoff:interval", observe);
+ backoffInterval = subject;
+ });
Assert.equal(false, await Service.verifyLogin());
Assert.ok(Service.status.enforceBackoff);
Assert.equal(backoffInterval, 42);
diff --git a/services/sync/tests/unit/test_sync_auth_manager.js b/services/sync/tests/unit/test_sync_auth_manager.js
index 9af40d26c6..f9fa669667 100644
--- a/services/sync/tests/unit/test_sync_auth_manager.js
+++ b/services/sync/tests/unit/test_sync_auth_manager.js
@@ -37,9 +37,8 @@ const { TokenServerClient, TokenServerClientServerError } =
ChromeUtils.importESModule(
"resource://services-common/tokenserverclient.sys.mjs"
);
-const { AccountState } = ChromeUtils.importESModule(
- "resource://gre/modules/FxAccounts.sys.mjs"
-);
+const { AccountState, ERROR_INVALID_ACCOUNT_STATE } =
+ ChromeUtils.importESModule("resource://gre/modules/FxAccounts.sys.mjs");
const SECOND_MS = 1000;
const MINUTE_MS = SECOND_MS * 60;
@@ -192,8 +191,11 @@ add_task(async function test_initialializeWithAuthErrorAndDeletedAccount() {
await Assert.rejects(
syncAuthManager._ensureValidToken(),
- AuthenticationError,
- "should reject due to an auth error"
+ err => {
+ Assert.equal(err.message, ERROR_INVALID_ACCOUNT_STATE);
+ return true; // expected error
+ },
+ "should reject because the account was deleted"
);
Assert.ok(accessTokenWithSessionTokenCalled);
@@ -801,14 +803,11 @@ add_task(async function test_getKeysMissing() {
storageManager.initialize(identityConfig.fxaccount.user);
return new AccountState(storageManager);
},
- // And the keys object with a mock that returns no keys.
- keys: {
- getKeyForScope() {
- return Promise.resolve(null);
- },
- },
});
-
+ fxa.getOAuthTokenAndKey = () => {
+ // And the keys object with a mock that returns no keys.
+ return Promise.resolve({ key: null, token: "fake token" });
+ };
syncAuthManager._fxaService = fxa;
await Assert.rejects(
@@ -844,14 +843,12 @@ add_task(async function test_getKeysUnexpecedError() {
storageManager.initialize(identityConfig.fxaccount.user);
return new AccountState(storageManager);
},
- // And the keys object with a mock that returns no keys.
- keys: {
- async getKeyForScope() {
- throw new Error("well that was unexpected");
- },
- },
});
+ fxa.getOAuthTokenAndKey = () => {
+ return Promise.reject("well that was unexpected");
+ };
+
syncAuthManager._fxaService = fxa;
await Assert.rejects(
@@ -1005,7 +1002,7 @@ function mockTokenServer(func) {
requestLog.addAppender(new Log.DumpAppender());
requestLog.level = Log.Level.Trace;
}
- function MockRESTRequest(url) {}
+ function MockRESTRequest() {}
MockRESTRequest.prototype = {
_log: requestLog,
setHeader() {},
diff --git a/services/sync/tests/unit/test_syncedtabs.js b/services/sync/tests/unit/test_syncedtabs.js
index 79ab3e0686..c915e12602 100644
--- a/services/sync/tests/unit/test_syncedtabs.js
+++ b/services/sync/tests/unit/test_syncedtabs.js
@@ -87,7 +87,7 @@ let MockClientsEngine = {
return tabsEngine.clients[id].fxaDeviceId;
},
- getClientType(id) {
+ getClientType() {
return "desktop";
},
};
diff --git a/services/sync/tests/unit/test_syncscheduler.js b/services/sync/tests/unit/test_syncscheduler.js
index 98b7937da3..8eb1ea3f40 100644
--- a/services/sync/tests/unit/test_syncscheduler.js
+++ b/services/sync/tests/unit/test_syncscheduler.js
@@ -92,7 +92,7 @@ add_task(async function setup() {
// Don't remove stale clients when syncing. This is a test-only workaround
// that lets us add clients directly to the store, without losing them on
// the next sync.
- clientsEngine._removeRemoteClient = async id => {};
+ clientsEngine._removeRemoteClient = async () => {};
await Service.engineManager.clear();
validate_all_future_pings();
diff --git a/services/sync/tests/unit/test_tab_quickwrite.js b/services/sync/tests/unit/test_tab_quickwrite.js
index 2a1c75c8c6..c363992d66 100644
--- a/services/sync/tests/unit/test_tab_quickwrite.js
+++ b/services/sync/tests/unit/test_tab_quickwrite.js
@@ -179,7 +179,7 @@ add_task(async function test_tab_quickWrite_telemetry() {
let telem = get_sync_test_telemetry();
telem.payloads = [];
let oldSubmit = telem.submit;
- let submitPromise = new Promise((resolve, reject) => {
+ let submitPromise = new Promise(resolve => {
telem.submit = function (ping) {
telem.submit = oldSubmit;
resolve(ping);
diff --git a/services/sync/tests/unit/test_telemetry.js b/services/sync/tests/unit/test_telemetry.js
index 961e96a01b..4f3a4e7c2b 100644
--- a/services/sync/tests/unit/test_telemetry.js
+++ b/services/sync/tests/unit/test_telemetry.js
@@ -734,7 +734,7 @@ add_task(async function test_clean_real_os_error() {
equal(failureReason.name, "unexpectederror");
equal(
failureReason.error,
- "OS error [File/Path not found] Could not open the file at [profileDir]/no/such/path.json"
+ "OS error [File/Path not found] Could not open `[profileDir]/no/such/path.json': file does not exist"
);
});
} finally {
@@ -1351,7 +1351,7 @@ add_task(async function test_no_node_type() {
await configureIdentity(null, server);
await sync_and_validate_telem(ping => {
- ok(ping.syncNodeType === undefined);
+ Assert.strictEqual(ping.syncNodeType, undefined);
}, true);
await promiseStopServer(server);
});
diff --git a/services/sync/tests/unit/test_uistate.js b/services/sync/tests/unit/test_uistate.js
index cb1ff1979e..136f274a71 100644
--- a/services/sync/tests/unit/test_uistate.js
+++ b/services/sync/tests/unit/test_uistate.js
@@ -292,7 +292,7 @@ add_task(async function test_syncFinished() {
const newState = Object.assign({}, UIState.get());
ok(!newState.syncing);
- ok(new Date(newState.lastSync) > new Date(oldState.lastSync));
+ Assert.greater(new Date(newState.lastSync), new Date(oldState.lastSync));
});
add_task(async function test_syncError() {
@@ -314,7 +314,7 @@ add_task(async function test_syncError() {
function observeUIUpdate() {
return new Promise(resolve => {
- let obs = (aSubject, aTopic, aData) => {
+ let obs = (aSubject, aTopic) => {
Services.obs.removeObserver(obs, aTopic);
const state = UIState.get();
resolve(state);
diff --git a/services/sync/tps/extensions/tps/resource/modules/bookmarkValidator.sys.mjs b/services/sync/tps/extensions/tps/resource/modules/bookmarkValidator.sys.mjs
index a7724c6aaa..b78e31ab79 100644
--- a/services/sync/tps/extensions/tps/resource/modules/bookmarkValidator.sys.mjs
+++ b/services/sync/tps/extensions/tps/resource/modules/bookmarkValidator.sys.mjs
@@ -602,7 +602,7 @@ class ServerRecordInspection {
await lazy.Async.yieldingForEach(
this.liveRecords,
- (record, i) => {
+ record => {
if (!seen.has(record.id)) {
// We intentionally don't record the parentid here, since we only record
// that if the record refers to a parent that doesn't exist, which we
diff --git a/services/sync/tps/extensions/tps/resource/modules/tabs.sys.mjs b/services/sync/tps/extensions/tps/resource/modules/tabs.sys.mjs
index 8ea8f3b780..5ac25dbb4c 100644
--- a/services/sync/tps/extensions/tps/resource/modules/tabs.sys.mjs
+++ b/services/sync/tps/extensions/tps/resource/modules/tabs.sys.mjs
@@ -44,7 +44,7 @@ export var BrowserTabs = {
// Wait for the tab to load.
await new Promise(resolve => {
let mm = browser.ownerGlobal.messageManager;
- mm.addMessageListener("tps:loadEvent", function onLoad(msg) {
+ mm.addMessageListener("tps:loadEvent", function onLoad() {
mm.removeMessageListener("tps:loadEvent", onLoad);
resolve();
});
diff --git a/services/sync/tps/extensions/tps/resource/modules/windows.sys.mjs b/services/sync/tps/extensions/tps/resource/modules/windows.sys.mjs
index b0798b9031..22c2f47ec9 100644
--- a/services/sync/tps/extensions/tps/resource/modules/windows.sys.mjs
+++ b/services/sync/tps/extensions/tps/resource/modules/windows.sys.mjs
@@ -16,7 +16,7 @@ export var BrowserWindows = {
* @param aPrivate The private option.
* @return nothing
*/
- Add(aPrivate, fn) {
+ Add(aPrivate) {
return new Promise(resolve => {
let mainWindow = Services.wm.getMostRecentWindow("navigator:browser");
let win = mainWindow.OpenBrowserWindow({ private: aPrivate });
diff --git a/services/sync/tps/extensions/tps/resource/tps.sys.mjs b/services/sync/tps/extensions/tps/resource/tps.sys.mjs
index 2c4a5994a6..449ca27411 100644
--- a/services/sync/tps/extensions/tps/resource/tps.sys.mjs
+++ b/services/sync/tps/extensions/tps/resource/tps.sys.mjs
@@ -168,7 +168,7 @@ export var TPS = {
"nsISupportsWeakReference",
]),
- observe: function TPS__observe(subject, topic, data) {
+ observe: function TPS__observe(subject, topic) {
try {
lazy.Logger.logInfo("----------event observed: " + topic);