summaryrefslogtreecommitdiffstats
path: root/toolkit/components/url-classifier/tests/unit
diff options
context:
space:
mode:
authorDaniel Baumann <daniel.baumann@progress-linux.org>2024-04-07 09:22:09 +0000
committerDaniel Baumann <daniel.baumann@progress-linux.org>2024-04-07 09:22:09 +0000
commit43a97878ce14b72f0981164f87f2e35e14151312 (patch)
tree620249daf56c0258faa40cbdcf9cfba06de2a846 /toolkit/components/url-classifier/tests/unit
parentInitial commit. (diff)
downloadfirefox-upstream.tar.xz
firefox-upstream.zip
Adding upstream version 110.0.1.upstream/110.0.1upstream
Signed-off-by: Daniel Baumann <daniel.baumann@progress-linux.org>
Diffstat (limited to 'toolkit/components/url-classifier/tests/unit')
-rw-r--r--toolkit/components/url-classifier/tests/unit/data/content-fingerprinting-track-digest256bin0 -> 948 bytes
-rw-r--r--toolkit/components/url-classifier/tests/unit/data/digest1.chunkbin0 -> 939 bytes
-rw-r--r--toolkit/components/url-classifier/tests/unit/data/digest2.chunk2
-rw-r--r--toolkit/components/url-classifier/tests/unit/data/invalid.chunk2
-rw-r--r--toolkit/components/url-classifier/tests/unit/data/mozplugin-block-digest256bin0 -> 3029 bytes
-rw-r--r--toolkit/components/url-classifier/tests/unit/head_urlclassifier.js572
-rw-r--r--toolkit/components/url-classifier/tests/unit/test_addsub.js329
-rw-r--r--toolkit/components/url-classifier/tests/unit/test_backoff.js92
-rw-r--r--toolkit/components/url-classifier/tests/unit/test_bug1274685_unowned_list.js65
-rw-r--r--toolkit/components/url-classifier/tests/unit/test_canonicalization.js83
-rw-r--r--toolkit/components/url-classifier/tests/unit/test_channelClassifierService.js223
-rw-r--r--toolkit/components/url-classifier/tests/unit/test_dbservice.js329
-rw-r--r--toolkit/components/url-classifier/tests/unit/test_digest256.js143
-rw-r--r--toolkit/components/url-classifier/tests/unit/test_exceptionListService.js285
-rw-r--r--toolkit/components/url-classifier/tests/unit/test_features.js83
-rw-r--r--toolkit/components/url-classifier/tests/unit/test_hashcompleter.js438
-rw-r--r--toolkit/components/url-classifier/tests/unit/test_hashcompleter_v4.js292
-rw-r--r--toolkit/components/url-classifier/tests/unit/test_listmanager.js355
-rw-r--r--toolkit/components/url-classifier/tests/unit/test_malwaretable_pref.js7
-rw-r--r--toolkit/components/url-classifier/tests/unit/test_partial.js611
-rw-r--r--toolkit/components/url-classifier/tests/unit/test_platform_specific_threats.js104
-rw-r--r--toolkit/components/url-classifier/tests/unit/test_pref.js15
-rw-r--r--toolkit/components/url-classifier/tests/unit/test_prefixset.js178
-rw-r--r--toolkit/components/url-classifier/tests/unit/test_provider_url.js32
-rw-r--r--toolkit/components/url-classifier/tests/unit/test_rsListService.js370
-rw-r--r--toolkit/components/url-classifier/tests/unit/test_safebrowsing_protobuf.js29
-rw-r--r--toolkit/components/url-classifier/tests/unit/test_shouldclassify.js164
-rw-r--r--toolkit/components/url-classifier/tests/unit/test_streamupdater.js244
-rw-r--r--toolkit/components/url-classifier/tests/unit/test_threat_type_conversion.js55
-rw-r--r--toolkit/components/url-classifier/tests/unit/xpcshell.ini37
30 files changed, 5139 insertions, 0 deletions
diff --git a/toolkit/components/url-classifier/tests/unit/data/content-fingerprinting-track-digest256 b/toolkit/components/url-classifier/tests/unit/data/content-fingerprinting-track-digest256
new file mode 100644
index 0000000000..cf95b25ac3
--- /dev/null
+++ b/toolkit/components/url-classifier/tests/unit/data/content-fingerprinting-track-digest256
Binary files differ
diff --git a/toolkit/components/url-classifier/tests/unit/data/digest1.chunk b/toolkit/components/url-classifier/tests/unit/data/digest1.chunk
new file mode 100644
index 0000000000..3850373c19
--- /dev/null
+++ b/toolkit/components/url-classifier/tests/unit/data/digest1.chunk
Binary files differ
diff --git a/toolkit/components/url-classifier/tests/unit/data/digest2.chunk b/toolkit/components/url-classifier/tests/unit/data/digest2.chunk
new file mode 100644
index 0000000000..738c96f6ba
--- /dev/null
+++ b/toolkit/components/url-classifier/tests/unit/data/digest2.chunk
@@ -0,0 +1,2 @@
+a:5:32:32
+_H^a7]=#nmnoQ \ No newline at end of file
diff --git a/toolkit/components/url-classifier/tests/unit/data/invalid.chunk b/toolkit/components/url-classifier/tests/unit/data/invalid.chunk
new file mode 100644
index 0000000000..7911ca4963
--- /dev/null
+++ b/toolkit/components/url-classifier/tests/unit/data/invalid.chunk
@@ -0,0 +1,2 @@
+a:5:32
+“Ê_Há^˜aÍ7ÂÙ]´=#ÌnmåÃøún‹æo—ÌQ‰
diff --git a/toolkit/components/url-classifier/tests/unit/data/mozplugin-block-digest256 b/toolkit/components/url-classifier/tests/unit/data/mozplugin-block-digest256
new file mode 100644
index 0000000000..40f64f3cbf
--- /dev/null
+++ b/toolkit/components/url-classifier/tests/unit/data/mozplugin-block-digest256
Binary files differ
diff --git a/toolkit/components/url-classifier/tests/unit/head_urlclassifier.js b/toolkit/components/url-classifier/tests/unit/head_urlclassifier.js
new file mode 100644
index 0000000000..773f726458
--- /dev/null
+++ b/toolkit/components/url-classifier/tests/unit/head_urlclassifier.js
@@ -0,0 +1,572 @@
+//* -*- indent-tabs-mode: nil; js-indent-level: 2 -*- *
+function dumpn(s) {
+ dump(s + "\n");
+}
+
+const NS_APP_USER_PROFILE_50_DIR = "ProfD";
+const NS_APP_USER_PROFILE_LOCAL_50_DIR = "ProfLD";
+
+var {
+ HTTP_400,
+ HTTP_401,
+ HTTP_402,
+ HTTP_403,
+ HTTP_404,
+ HTTP_405,
+ HTTP_406,
+ HTTP_407,
+ HTTP_408,
+ HTTP_409,
+ HTTP_410,
+ HTTP_411,
+ HTTP_412,
+ HTTP_413,
+ HTTP_414,
+ HTTP_415,
+ HTTP_417,
+ HTTP_500,
+ HTTP_501,
+ HTTP_502,
+ HTTP_503,
+ HTTP_504,
+ HTTP_505,
+ HttpError,
+ HttpServer,
+} = ChromeUtils.import("resource://testing-common/httpd.js");
+
+do_get_profile();
+
+// Ensure PSM is initialized before the test
+Cc["@mozilla.org/psm;1"].getService(Ci.nsISupports);
+
+// Disable hashcompleter noise for tests
+Services.prefs.setIntPref("urlclassifier.gethashnoise", 0);
+
+// Enable malware/phishing checking for tests
+Services.prefs.setBoolPref("browser.safebrowsing.malware.enabled", true);
+Services.prefs.setBoolPref("browser.safebrowsing.blockedURIs.enabled", true);
+Services.prefs.setBoolPref("browser.safebrowsing.phishing.enabled", true);
+Services.prefs.setBoolPref(
+ "browser.safebrowsing.provider.test.disableBackoff",
+ true
+);
+
+// Add testing tables, we don't use moztest-* here because it doesn't support update
+Services.prefs.setCharPref("urlclassifier.phishTable", "test-phish-simple");
+Services.prefs.setCharPref(
+ "urlclassifier.malwareTable",
+ "test-harmful-simple,test-malware-simple,test-unwanted-simple"
+);
+Services.prefs.setCharPref("urlclassifier.blockedTable", "test-block-simple");
+Services.prefs.setCharPref("urlclassifier.trackingTable", "test-track-simple");
+Services.prefs.setCharPref(
+ "urlclassifier.trackingWhitelistTable",
+ "test-trackwhite-simple"
+);
+
+// Enable all completions for tests
+Services.prefs.setCharPref("urlclassifier.disallow_completions", "");
+
+// Hash completion timeout
+Services.prefs.setIntPref("urlclassifier.gethash.timeout_ms", 5000);
+
+function delFile(name) {
+ try {
+ // Delete a previously created sqlite file
+ var file = Services.dirsvc.get("ProfLD", Ci.nsIFile);
+ file.append(name);
+ if (file.exists()) {
+ file.remove(false);
+ }
+ } catch (e) {}
+}
+
+function cleanUp() {
+ delFile("urlclassifier3.sqlite");
+ delFile("safebrowsing/classifier.hashkey");
+ delFile("safebrowsing/test-phish-simple.sbstore");
+ delFile("safebrowsing/test-malware-simple.sbstore");
+ delFile("safebrowsing/test-unwanted-simple.sbstore");
+ delFile("safebrowsing/test-block-simple.sbstore");
+ delFile("safebrowsing/test-harmful-simple.sbstore");
+ delFile("safebrowsing/test-track-simple.sbstore");
+ delFile("safebrowsing/test-trackwhite-simple.sbstore");
+ delFile("safebrowsing/test-phish-simple.pset");
+ delFile("safebrowsing/test-malware-simple.pset");
+ delFile("safebrowsing/test-unwanted-simple.pset");
+ delFile("safebrowsing/test-block-simple.pset");
+ delFile("safebrowsing/test-harmful-simple.pset");
+ delFile("safebrowsing/test-track-simple.pset");
+ delFile("safebrowsing/test-trackwhite-simple.pset");
+ delFile("safebrowsing/moz-phish-simple.sbstore");
+ delFile("safebrowsing/moz-phish-simple.pset");
+ delFile("testLarge.pset");
+ delFile("testNoDelta.pset");
+}
+
+// Update uses allTables by default
+var allTables =
+ "test-phish-simple,test-malware-simple,test-unwanted-simple,test-track-simple,test-trackwhite-simple,test-block-simple";
+var mozTables = "moz-phish-simple";
+
+var dbservice = Cc["@mozilla.org/url-classifier/dbservice;1"].getService(
+ Ci.nsIUrlClassifierDBService
+);
+var streamUpdater = Cc[
+ "@mozilla.org/url-classifier/streamupdater;1"
+].getService(Ci.nsIUrlClassifierStreamUpdater);
+
+/*
+ * Builds an update from an object that looks like:
+ *{ "test-phish-simple" : [{
+ * "chunkType" : "a", // 'a' is assumed if not specified
+ * "chunkNum" : 1, // numerically-increasing chunk numbers are assumed
+ * // if not specified
+ * "urls" : [ "foo.com/a", "foo.com/b", "bar.com/" ]
+ * }
+ */
+
+function buildUpdate(update, hashSize) {
+ if (!hashSize) {
+ hashSize = 32;
+ }
+ var updateStr = "n:1000\n";
+
+ for (var tableName in update) {
+ if (tableName != "") {
+ updateStr += "i:" + tableName + "\n";
+ }
+ var chunks = update[tableName];
+ for (var j = 0; j < chunks.length; j++) {
+ var chunk = chunks[j];
+ var chunkType = chunk.chunkType ? chunk.chunkType : "a";
+ var chunkNum = chunk.chunkNum ? chunk.chunkNum : j;
+ updateStr += chunkType + ":" + chunkNum + ":" + hashSize;
+
+ if (chunk.urls) {
+ var chunkData = chunk.urls.join("\n");
+ updateStr += ":" + chunkData.length + "\n" + chunkData;
+ }
+
+ updateStr += "\n";
+ }
+ }
+
+ return updateStr;
+}
+
+function buildPhishingUpdate(chunks, hashSize) {
+ return buildUpdate({ "test-phish-simple": chunks }, hashSize);
+}
+
+function buildMalwareUpdate(chunks, hashSize) {
+ return buildUpdate({ "test-malware-simple": chunks }, hashSize);
+}
+
+function buildUnwantedUpdate(chunks, hashSize) {
+ return buildUpdate({ "test-unwanted-simple": chunks }, hashSize);
+}
+
+function buildBlockedUpdate(chunks, hashSize) {
+ return buildUpdate({ "test-block-simple": chunks }, hashSize);
+}
+
+function buildMozPhishingUpdate(chunks, hashSize) {
+ return buildUpdate({ "moz-phish-simple": chunks }, hashSize);
+}
+
+function buildBareUpdate(chunks, hashSize) {
+ return buildUpdate({ "": chunks }, hashSize);
+}
+
+/**
+ * Performs an update of the dbservice manually, bypassing the stream updater
+ */
+function doSimpleUpdate(updateText, success, failure) {
+ var listener = {
+ QueryInterface: ChromeUtils.generateQI(["nsIUrlClassifierUpdateObserver"]),
+
+ updateUrlRequested(url) {},
+ streamFinished(status) {},
+ updateError(errorCode) {
+ failure(errorCode);
+ },
+ updateSuccess(requestedTimeout) {
+ success(requestedTimeout);
+ },
+ };
+
+ dbservice.beginUpdate(listener, allTables);
+ dbservice.beginStream("", "");
+ dbservice.updateStream(updateText);
+ dbservice.finishStream();
+ dbservice.finishUpdate();
+}
+
+/**
+ * Simulates a failed database update.
+ */
+function doErrorUpdate(tables, success, failure) {
+ var listener = {
+ QueryInterface: ChromeUtils.generateQI(["nsIUrlClassifierUpdateObserver"]),
+
+ updateUrlRequested(url) {},
+ streamFinished(status) {},
+ updateError(errorCode) {
+ success(errorCode);
+ },
+ updateSuccess(requestedTimeout) {
+ failure(requestedTimeout);
+ },
+ };
+
+ dbservice.beginUpdate(listener, tables, null);
+ dbservice.beginStream("", "");
+ dbservice.cancelUpdate();
+}
+
+/**
+ * Performs an update of the dbservice using the stream updater and a
+ * data: uri
+ */
+function doStreamUpdate(updateText, success, failure, downloadFailure) {
+ var dataUpdate = "data:," + encodeURIComponent(updateText);
+
+ if (!downloadFailure) {
+ downloadFailure = failure;
+ }
+
+ streamUpdater.downloadUpdates(
+ allTables,
+ "",
+ true,
+ dataUpdate,
+ success,
+ failure,
+ downloadFailure
+ );
+}
+
+var gAssertions = {
+ tableData(expectedTables, cb) {
+ dbservice.getTables(function(tables) {
+ // rebuild the tables in a predictable order.
+ var parts = tables.split("\n");
+ while (parts[parts.length - 1] == "") {
+ parts.pop();
+ }
+ parts.sort();
+ tables = parts.join("\n");
+
+ Assert.equal(tables, expectedTables);
+ cb();
+ });
+ },
+
+ checkUrls(urls, expected, cb, useMoz = false) {
+ // work with a copy of the list.
+ urls = urls.slice(0);
+ var doLookup = function() {
+ if (urls.length) {
+ var tables = useMoz ? mozTables : allTables;
+ var fragment = urls.shift();
+ var principal = Services.scriptSecurityManager.createContentPrincipal(
+ Services.io.newURI("http://" + fragment),
+ {}
+ );
+ dbservice.lookup(
+ principal,
+ tables,
+ function(arg) {
+ Assert.equal(expected, arg);
+ doLookup();
+ },
+ true
+ );
+ } else {
+ cb();
+ }
+ };
+ doLookup();
+ },
+
+ checkTables(url, expected, cb) {
+ var principal = Services.scriptSecurityManager.createContentPrincipal(
+ Services.io.newURI("http://" + url),
+ {}
+ );
+ dbservice.lookup(
+ principal,
+ allTables,
+ function(tables) {
+ // Rebuild tables in a predictable order.
+ var parts = tables.split(",");
+ while (parts[parts.length - 1] == "") {
+ parts.pop();
+ }
+ parts.sort();
+ tables = parts.join(",");
+ Assert.equal(tables, expected);
+ cb();
+ },
+ true
+ );
+ },
+
+ urlsDontExist(urls, cb) {
+ this.checkUrls(urls, "", cb);
+ },
+
+ urlsExist(urls, cb) {
+ this.checkUrls(urls, "test-phish-simple", cb);
+ },
+
+ malwareUrlsExist(urls, cb) {
+ this.checkUrls(urls, "test-malware-simple", cb);
+ },
+
+ unwantedUrlsExist(urls, cb) {
+ this.checkUrls(urls, "test-unwanted-simple", cb);
+ },
+
+ blockedUrlsExist(urls, cb) {
+ this.checkUrls(urls, "test-block-simple", cb);
+ },
+
+ mozPhishingUrlsExist(urls, cb) {
+ this.checkUrls(urls, "moz-phish-simple", cb, true);
+ },
+
+ subsDontExist(urls, cb) {
+ // XXX: there's no interface for checking items in the subs table
+ cb();
+ },
+
+ subsExist(urls, cb) {
+ // XXX: there's no interface for checking items in the subs table
+ cb();
+ },
+
+ urlExistInMultipleTables(data, cb) {
+ this.checkTables(data.url, data.tables, cb);
+ },
+};
+
+/**
+ * Check a set of assertions against the gAssertions table.
+ */
+function checkAssertions(assertions, doneCallback) {
+ var checkAssertion = function() {
+ for (var i in assertions) {
+ var data = assertions[i];
+ delete assertions[i];
+ gAssertions[i](data, checkAssertion);
+ return;
+ }
+
+ doneCallback();
+ };
+
+ checkAssertion();
+}
+
+function updateError(arg) {
+ do_throw(arg);
+}
+
+/**
+ * Utility functions
+ */
+ChromeUtils.defineModuleGetter(
+ this,
+ "NetUtil",
+ "resource://gre/modules/NetUtil.jsm"
+);
+
+function readFileToString(aFilename) {
+ let f = do_get_file(aFilename);
+ let stream = Cc["@mozilla.org/network/file-input-stream;1"].createInstance(
+ Ci.nsIFileInputStream
+ );
+ stream.init(f, -1, 0, 0);
+ let buf = NetUtil.readInputStreamToString(stream, stream.available());
+ return buf;
+}
+
+// Runs a set of updates, and then checks a set of assertions.
+function doUpdateTest(updates, assertions, successCallback, errorCallback) {
+ var errorUpdate = function() {
+ checkAssertions(assertions, errorCallback);
+ };
+
+ var runUpdate = function() {
+ if (updates.length) {
+ var update = updates.shift();
+ doStreamUpdate(update, runUpdate, errorUpdate, null);
+ } else {
+ checkAssertions(assertions, successCallback);
+ }
+ };
+
+ runUpdate();
+}
+
+var gTests;
+var gNextTest = 0;
+
+function runNextTest() {
+ if (gNextTest >= gTests.length) {
+ do_test_finished();
+ return;
+ }
+
+ dbservice.resetDatabase();
+ dbservice.setHashCompleter("test-phish-simple", null);
+
+ let test = gTests[gNextTest++];
+ dump("running " + test.name + "\n");
+ test();
+}
+
+function runTests(tests) {
+ gTests = tests;
+ runNextTest();
+}
+
+var timerArray = [];
+
+function Timer(delay, cb) {
+ this.cb = cb;
+ var timer = Cc["@mozilla.org/timer;1"].createInstance(Ci.nsITimer);
+ timer.initWithCallback(this, delay, timer.TYPE_ONE_SHOT);
+ timerArray.push(timer);
+}
+
+Timer.prototype = {
+ QueryInterface: ChromeUtils.generateQI(["nsITimerCallback"]),
+ notify(timer) {
+ this.cb();
+ },
+};
+
+// LFSRgenerator is a 32-bit linear feedback shift register random number
+// generator. It is highly predictable and is not intended to be used for
+// cryptography but rather to allow easier debugging than a test that uses
+// Math.random().
+function LFSRgenerator(seed) {
+ // Force |seed| to be a number.
+ seed = +seed;
+ // LFSR generators do not work with a value of 0.
+ if (seed == 0) {
+ seed = 1;
+ }
+
+ this._value = seed;
+}
+LFSRgenerator.prototype = {
+ // nextNum returns a random unsigned integer of in the range [0,2^|bits|].
+ nextNum(bits) {
+ if (!bits) {
+ bits = 32;
+ }
+
+ let val = this._value;
+ // Taps are 32, 22, 2 and 1.
+ let bit = ((val >>> 0) ^ (val >>> 10) ^ (val >>> 30) ^ (val >>> 31)) & 1;
+ val = (val >>> 1) | (bit << 31);
+ this._value = val;
+
+ return val >>> (32 - bits);
+ },
+};
+
+function waitUntilMetaDataSaved(expectedState, expectedChecksum, callback) {
+ let dbService = Cc["@mozilla.org/url-classifier/dbservice;1"].getService(
+ Ci.nsIUrlClassifierDBService
+ );
+
+ dbService.getTables(metaData => {
+ info("metadata: " + metaData);
+ let didCallback = false;
+ metaData.split("\n").some(line => {
+ // Parse [tableName];[stateBase64]
+ let p = line.indexOf(";");
+ if (-1 === p) {
+ return false; // continue.
+ }
+ let tableName = line.substring(0, p);
+ let metadata = line.substring(p + 1).split(":");
+ let stateBase64 = metadata[0];
+ let checksumBase64 = metadata[1];
+
+ if (tableName !== "test-phish-proto") {
+ return false; // continue.
+ }
+
+ if (
+ stateBase64 === btoa(expectedState) &&
+ checksumBase64 === btoa(expectedChecksum)
+ ) {
+ info("State has been saved to disk!");
+
+ // We slightly defer the callback to see if the in-memory
+ // |getTables| caching works correctly.
+ dbService.getTables(cachedMetadata => {
+ equal(cachedMetadata, metaData);
+ callback();
+ });
+
+ // Even though we haven't done callback at this moment
+ // but we still claim "we have" in order to stop repeating
+ // a new timer.
+ didCallback = true;
+ }
+
+ return true; // break no matter whether the state is matching.
+ });
+
+ if (!didCallback) {
+ do_timeout(
+ 1000,
+ waitUntilMetaDataSaved.bind(
+ null,
+ expectedState,
+ expectedChecksum,
+ callback
+ )
+ );
+ }
+ });
+}
+
+var gUpdateFinishedObserverEnabled = false;
+var gUpdateFinishedObserver = function(aSubject, aTopic, aData) {
+ info("[" + aTopic + "] " + aData);
+ if (aData != "success") {
+ updateError(aData);
+ }
+};
+
+function throwOnUpdateErrors() {
+ Services.obs.addObserver(
+ gUpdateFinishedObserver,
+ "safebrowsing-update-finished"
+ );
+ gUpdateFinishedObserverEnabled = true;
+}
+
+function stopThrowingOnUpdateErrors() {
+ if (gUpdateFinishedObserverEnabled) {
+ Services.obs.removeObserver(
+ gUpdateFinishedObserver,
+ "safebrowsing-update-finished"
+ );
+ gUpdateFinishedObserverEnabled = false;
+ }
+}
+
+cleanUp();
+
+registerCleanupFunction(function() {
+ cleanUp();
+});
diff --git a/toolkit/components/url-classifier/tests/unit/test_addsub.js b/toolkit/components/url-classifier/tests/unit/test_addsub.js
new file mode 100644
index 0000000000..f58a02506f
--- /dev/null
+++ b/toolkit/components/url-classifier/tests/unit/test_addsub.js
@@ -0,0 +1,329 @@
+function doTest(updates, assertions) {
+ doUpdateTest(updates, assertions, runNextTest, updateError);
+}
+
+// Test an add of two urls to a fresh database
+function testSimpleAdds() {
+ var addUrls = ["foo.com/a", "foo.com/b", "bar.com/c"];
+ var update = buildPhishingUpdate([{ chunkNum: 1, urls: addUrls }]);
+
+ var assertions = {
+ tableData: "test-phish-simple;a:1",
+ urlsExist: addUrls,
+ };
+
+ doTest([update], assertions);
+}
+
+// Same as testSimpleAdds, but make the same-domain URLs come from different
+// chunks.
+function testMultipleAdds() {
+ var add1Urls = ["foo.com/a", "bar.com/c"];
+ var add2Urls = ["foo.com/b"];
+
+ var update = buildPhishingUpdate([
+ { chunkNum: 1, urls: add1Urls },
+ { chunkNum: 2, urls: add2Urls },
+ ]);
+ var assertions = {
+ tableData: "test-phish-simple;a:1-2",
+ urlsExist: add1Urls.concat(add2Urls),
+ };
+
+ doTest([update], assertions);
+}
+
+// Test that a sub will remove an existing add
+function testSimpleSub() {
+ var addUrls = ["foo.com/a", "bar.com/b"];
+ var subUrls = ["1:foo.com/a"];
+
+ var addUpdate = buildPhishingUpdate([
+ {
+ chunkNum: 1, // adds and subtracts don't share a chunk numbering space
+ urls: addUrls,
+ },
+ ]);
+
+ var subUpdate = buildPhishingUpdate([
+ { chunkNum: 50, chunkType: "s", urls: subUrls },
+ ]);
+
+ var assertions = {
+ tableData: "test-phish-simple;a:1:s:50",
+ urlsExist: ["bar.com/b"],
+ urlsDontExist: ["foo.com/a"],
+ subsDontExist: ["foo.com/a"],
+ };
+
+ doTest([addUpdate, subUpdate], assertions);
+}
+
+// Same as testSimpleSub(), but the sub comes in before the add.
+function testSubEmptiesAdd() {
+ var subUrls = ["1:foo.com/a"];
+ var addUrls = ["foo.com/a", "bar.com/b"];
+
+ var subUpdate = buildPhishingUpdate([
+ { chunkNum: 50, chunkType: "s", urls: subUrls },
+ ]);
+
+ var addUpdate = buildPhishingUpdate([{ chunkNum: 1, urls: addUrls }]);
+
+ var assertions = {
+ tableData: "test-phish-simple;a:1:s:50",
+ urlsExist: ["bar.com/b"],
+ urlsDontExist: ["foo.com/a"],
+ subsDontExist: ["foo.com/a"], // this sub was found, it shouldn't exist anymore
+ };
+
+ doTest([subUpdate, addUpdate], assertions);
+}
+
+// Very similar to testSubEmptiesAdd, except that the domain entry will
+// still have an item left over that needs to be synced.
+function testSubPartiallyEmptiesAdd() {
+ var subUrls = ["1:foo.com/a"];
+ var addUrls = ["foo.com/a", "foo.com/b", "bar.com/b"];
+
+ var subUpdate = buildPhishingUpdate([
+ { chunkNum: 1, chunkType: "s", urls: subUrls },
+ ]);
+
+ var addUpdate = buildPhishingUpdate([
+ {
+ chunkNum: 1, // adds and subtracts don't share a chunk numbering space
+ urls: addUrls,
+ },
+ ]);
+
+ var assertions = {
+ tableData: "test-phish-simple;a:1:s:1",
+ urlsExist: ["foo.com/b", "bar.com/b"],
+ urlsDontExist: ["foo.com/a"],
+ subsDontExist: ["foo.com/a"], // this sub was found, it shouldn't exist anymore
+ };
+
+ doTest([subUpdate, addUpdate], assertions);
+}
+
+// We SHOULD be testing that pending subs are removed using
+// subsDontExist assertions. Since we don't have a good interface for getting
+// at sub entries, we'll verify it by side-effect. Subbing a url once
+// then adding it twice should leave the url intact.
+function testPendingSubRemoved() {
+ var subUrls = ["1:foo.com/a", "2:foo.com/b"];
+ var addUrls = ["foo.com/a", "foo.com/b"];
+
+ var subUpdate = buildPhishingUpdate([
+ { chunkNum: 1, chunkType: "s", urls: subUrls },
+ ]);
+
+ var addUpdate1 = buildPhishingUpdate([
+ {
+ chunkNum: 1, // adds and subtracts don't share a chunk numbering space
+ urls: addUrls,
+ },
+ ]);
+
+ var addUpdate2 = buildPhishingUpdate([{ chunkNum: 2, urls: addUrls }]);
+
+ var assertions = {
+ tableData: "test-phish-simple;a:1-2:s:1",
+ urlsExist: ["foo.com/a", "foo.com/b"],
+ subsDontExist: ["foo.com/a", "foo.com/b"], // this sub was found, it shouldn't exist anymore
+ };
+
+ doTest([subUpdate, addUpdate1, addUpdate2], assertions);
+}
+
+// Make sure that a saved sub is removed when the sub chunk is expired.
+function testPendingSubExpire() {
+ var subUrls = ["1:foo.com/a", "1:foo.com/b"];
+ var addUrls = ["foo.com/a", "foo.com/b"];
+
+ var subUpdate = buildPhishingUpdate([
+ { chunkNum: 1, chunkType: "s", urls: subUrls },
+ ]);
+
+ var expireUpdate = buildPhishingUpdate([{ chunkNum: 1, chunkType: "sd" }]);
+
+ var addUpdate = buildPhishingUpdate([
+ {
+ chunkNum: 1, // adds and subtracts don't share a chunk numbering space
+ urls: addUrls,
+ },
+ ]);
+
+ var assertions = {
+ tableData: "test-phish-simple;a:1",
+ urlsExist: ["foo.com/a", "foo.com/b"],
+ subsDontExist: ["foo.com/a", "foo.com/b"], // this sub was expired
+ };
+
+ doTest([subUpdate, expireUpdate, addUpdate], assertions);
+}
+
+// Make sure that the sub url removes from only the chunk that it specifies
+function testDuplicateAdds() {
+ var urls = ["foo.com/a"];
+
+ var addUpdate1 = buildPhishingUpdate([{ chunkNum: 1, urls }]);
+ var addUpdate2 = buildPhishingUpdate([{ chunkNum: 2, urls }]);
+ var subUpdate = buildPhishingUpdate([
+ { chunkNum: 3, chunkType: "s", urls: ["2:foo.com/a"] },
+ ]);
+
+ var assertions = {
+ tableData: "test-phish-simple;a:1-2:s:3",
+ urlsExist: ["foo.com/a"],
+ subsDontExist: ["foo.com/a"],
+ };
+
+ doTest([addUpdate1, addUpdate2, subUpdate], assertions);
+}
+
+// Tests a sub which matches some existing adds but leaves others.
+function testSubPartiallyMatches() {
+ var addUrls = ["1:foo.com/a", "2:foo.com/b"];
+
+ var addUpdate = buildPhishingUpdate([{ chunkNum: 1, urls: addUrls }]);
+
+ var subUpdate = buildPhishingUpdate([
+ { chunkNum: 1, chunkType: "s", urls: addUrls },
+ ]);
+
+ var assertions = {
+ tableData: "test-phish-simple;a:1:s:1",
+ urlsDontExist: ["foo.com/a"],
+ subsDontExist: ["foo.com/a"],
+ subsExist: ["foo.com/b"],
+ };
+
+ doTest([addUpdate, subUpdate], assertions);
+}
+
+// XXX: because subsExist isn't actually implemented, this is the same
+// test as above but with a second add chunk that should fail to be added
+// because of a pending sub chunk.
+function testSubPartiallyMatches2() {
+ var addUrls = ["foo.com/a"];
+ var subUrls = ["1:foo.com/a", "2:foo.com/b"];
+ var addUrls2 = ["foo.com/b"];
+
+ var addUpdate = buildPhishingUpdate([{ chunkNum: 1, urls: addUrls }]);
+
+ var subUpdate = buildPhishingUpdate([
+ { chunkNum: 1, chunkType: "s", urls: subUrls },
+ ]);
+
+ var addUpdate2 = buildPhishingUpdate([{ chunkNum: 2, urls: addUrls2 }]);
+
+ var assertions = {
+ tableData: "test-phish-simple;a:1-2:s:1",
+ urlsDontExist: ["foo.com/a", "foo.com/b"],
+ subsDontExist: ["foo.com/a", "foo.com/b"],
+ };
+
+ doTest([addUpdate, subUpdate, addUpdate2], assertions);
+}
+
+// Verify that two subs for the same domain but from different chunks
+// match (tests that existing sub entries are properly updated)
+function testSubsDifferentChunks() {
+ var subUrls1 = ["3:foo.com/a"];
+ var subUrls2 = ["3:foo.com/b"];
+
+ var addUrls = ["foo.com/a", "foo.com/b", "foo.com/c"];
+
+ var subUpdate1 = buildPhishingUpdate([
+ { chunkNum: 1, chunkType: "s", urls: subUrls1 },
+ ]);
+ var subUpdate2 = buildPhishingUpdate([
+ { chunkNum: 2, chunkType: "s", urls: subUrls2 },
+ ]);
+ var addUpdate = buildPhishingUpdate([{ chunkNum: 3, urls: addUrls }]);
+
+ var assertions = {
+ tableData: "test-phish-simple;a:3:s:1-2",
+ urlsExist: ["foo.com/c"],
+ urlsDontExist: ["foo.com/a", "foo.com/b"],
+ subsDontExist: ["foo.com/a", "foo.com/b"],
+ };
+
+ doTest([subUpdate1, subUpdate2, addUpdate], assertions);
+}
+
+// for bug 534079
+function testSubsDifferentChunksSameHostId() {
+ var subUrls1 = ["1:foo.com/a"];
+ var subUrls2 = ["1:foo.com/b", "2:foo.com/c"];
+
+ var addUrls = ["foo.com/a", "foo.com/b"];
+ var addUrls2 = ["foo.com/c"];
+
+ var subUpdate1 = buildPhishingUpdate([
+ { chunkNum: 1, chunkType: "s", urls: subUrls1 },
+ ]);
+ var subUpdate2 = buildPhishingUpdate([
+ { chunkNum: 2, chunkType: "s", urls: subUrls2 },
+ ]);
+
+ var addUpdate = buildPhishingUpdate([{ chunkNum: 1, urls: addUrls }]);
+ var addUpdate2 = buildPhishingUpdate([{ chunkNum: 2, urls: addUrls2 }]);
+
+ var assertions = {
+ tableData: "test-phish-simple;a:1-2:s:1-2",
+ urlsDontExist: ["foo.com/c", "foo.com/b", "foo.com/a"],
+ };
+
+ doTest([addUpdate, addUpdate2, subUpdate1, subUpdate2], assertions);
+}
+
+// Test lists of expired chunks
+function testExpireLists() {
+ var addUpdate = buildPhishingUpdate([
+ { chunkNum: 1, urls: ["foo.com/a"] },
+ { chunkNum: 3, urls: ["bar.com/a"] },
+ { chunkNum: 4, urls: ["baz.com/a"] },
+ { chunkNum: 5, urls: ["blah.com/a"] },
+ ]);
+ var subUpdate = buildPhishingUpdate([
+ { chunkNum: 1, chunkType: "s", urls: ["50:foo.com/1"] },
+ { chunkNum: 2, chunkType: "s", urls: ["50:bar.com/1"] },
+ { chunkNum: 3, chunkType: "s", urls: ["50:baz.com/1"] },
+ { chunkNum: 5, chunkType: "s", urls: ["50:blah.com/1"] },
+ ]);
+
+ var expireUpdate = buildPhishingUpdate([
+ { chunkType: "ad:1,3-5" },
+ { chunkType: "sd:1-3,5" },
+ ]);
+
+ var assertions = {
+ // "tableData" : "test-phish-simple;"
+ tableData: "",
+ };
+
+ doTest([addUpdate, subUpdate, expireUpdate], assertions);
+}
+
+function run_test() {
+ runTests([
+ testSimpleAdds,
+ testMultipleAdds,
+ testSimpleSub,
+ testSubEmptiesAdd,
+ testSubPartiallyEmptiesAdd,
+ testPendingSubRemoved,
+ testPendingSubExpire,
+ testDuplicateAdds,
+ testSubPartiallyMatches,
+ testSubPartiallyMatches2,
+ testSubsDifferentChunks,
+ testSubsDifferentChunksSameHostId,
+ testExpireLists,
+ ]);
+}
+
+do_test_pending();
diff --git a/toolkit/components/url-classifier/tests/unit/test_backoff.js b/toolkit/components/url-classifier/tests/unit/test_backoff.js
new file mode 100644
index 0000000000..ffdaa29524
--- /dev/null
+++ b/toolkit/components/url-classifier/tests/unit/test_backoff.js
@@ -0,0 +1,92 @@
+// Some unittests (e.g., paste into JS shell)
+var jslib = Cc["@mozilla.org/url-classifier/jslib;1"].getService()
+ .wrappedJSObject;
+
+var jslibDate = Cu.getGlobalForObject(jslib).Date;
+
+var _Datenow = jslibDate.now;
+function setNow(time) {
+ jslibDate.now = function() {
+ return time;
+ };
+}
+
+function run_test() {
+ // 3 errors, 1ms retry period, max 3 requests per ten milliseconds,
+ // 5ms backoff interval, 19ms max delay
+ var rb = new jslib.RequestBackoff(3, 1, 3, 10, 5, 19, 0);
+ setNow(1);
+ rb.noteServerResponse(200);
+ Assert.ok(rb.canMakeRequest());
+ setNow(2);
+ Assert.ok(rb.canMakeRequest());
+
+ // First error should trigger a 1ms delay
+ rb.noteServerResponse(500);
+ Assert.ok(!rb.canMakeRequest());
+ Assert.equal(rb.nextRequestTime_, 3);
+ setNow(3);
+ Assert.ok(rb.canMakeRequest());
+
+ // Second error should also trigger a 1ms delay
+ rb.noteServerResponse(500);
+ Assert.ok(!rb.canMakeRequest());
+ Assert.equal(rb.nextRequestTime_, 4);
+ setNow(4);
+ Assert.ok(rb.canMakeRequest());
+
+ // Third error should trigger a 5ms backoff
+ rb.noteServerResponse(500);
+ Assert.ok(!rb.canMakeRequest());
+ Assert.equal(rb.nextRequestTime_, 9);
+ setNow(9);
+ Assert.ok(rb.canMakeRequest());
+
+ // Trigger backoff again
+ rb.noteServerResponse(503);
+ Assert.ok(!rb.canMakeRequest());
+ Assert.equal(rb.nextRequestTime_, 19);
+ setNow(19);
+ Assert.ok(rb.canMakeRequest());
+
+ // Trigger backoff a third time and hit max timeout
+ rb.noteServerResponse(302);
+ Assert.ok(!rb.canMakeRequest());
+ Assert.equal(rb.nextRequestTime_, 38);
+ setNow(38);
+ Assert.ok(rb.canMakeRequest());
+
+ // One more backoff, should still be at the max timeout
+ rb.noteServerResponse(400);
+ Assert.ok(!rb.canMakeRequest());
+ Assert.equal(rb.nextRequestTime_, 57);
+ setNow(57);
+ Assert.ok(rb.canMakeRequest());
+
+ // Request goes through
+ rb.noteServerResponse(200);
+ Assert.ok(rb.canMakeRequest());
+ Assert.equal(rb.nextRequestTime_, 0);
+ setNow(58);
+ rb.noteServerResponse(500);
+
+ // Another error, should trigger a 1ms backoff
+ Assert.ok(!rb.canMakeRequest());
+ Assert.equal(rb.nextRequestTime_, 59);
+
+ setNow(59);
+ Assert.ok(rb.canMakeRequest());
+
+ setNow(200);
+ rb.noteRequest();
+ setNow(201);
+ rb.noteRequest();
+ setNow(202);
+ Assert.ok(rb.canMakeRequest());
+ rb.noteRequest();
+ Assert.ok(!rb.canMakeRequest());
+ setNow(211);
+ Assert.ok(rb.canMakeRequest());
+
+ jslibDate.now = _Datenow;
+}
diff --git a/toolkit/components/url-classifier/tests/unit/test_bug1274685_unowned_list.js b/toolkit/components/url-classifier/tests/unit/test_bug1274685_unowned_list.js
new file mode 100644
index 0000000000..bab11b055d
--- /dev/null
+++ b/toolkit/components/url-classifier/tests/unit/test_bug1274685_unowned_list.js
@@ -0,0 +1,65 @@
+const { SafeBrowsing } = ChromeUtils.import(
+ "resource://gre/modules/SafeBrowsing.jsm"
+);
+const { updateAppInfo } = ChromeUtils.importESModule(
+ "resource://testing-common/AppInfo.sys.mjs"
+);
+
+add_setup(async () => {
+ // 'Cc["@mozilla.org/xre/app-info;1"]' for xpcshell has no nsIXULAppInfo
+ // so that we have to update it to make nsURLFormatter.js happy.
+ // (SafeBrowsing.init() will indirectly use nsURLFormatter.js)
+ updateAppInfo();
+
+ // This test should not actually try to create a connection to any real
+ // endpoint. But a background request could try that while the test is in
+ // progress before we've actually shut down networking, and would cause a
+ // crash due to connecting to a non-local IP.
+ Services.prefs.setCharPref(
+ "browser.safebrowsing.provider.mozilla.updateURL",
+ `http://localhost:4444/safebrowsing/update`
+ );
+ registerCleanupFunction(() => {
+ Services.prefs.clearUserPref(
+ "browser.safebrowsing.provider.mozilla.updateURL"
+ );
+ Services.prefs.clearUserPref("browser.safebrowsing.provider.google.lists");
+ Services.prefs.clearUserPref("browser.safebrowsing.provider.google4.lists");
+ });
+});
+
+add_task(async function test() {
+ SafeBrowsing.init();
+
+ let origListV2 = Services.prefs.getCharPref(
+ "browser.safebrowsing.provider.google.lists"
+ );
+ let origListV4 = Services.prefs.getCharPref(
+ "browser.safebrowsing.provider.google4.lists"
+ );
+
+ // Ensure there's a list missing in both Safe Browsing V2 and V4.
+ let trimmedListV2 = origListV2.replace("goog-malware-shavar,", "");
+ Services.prefs.setCharPref(
+ "browser.safebrowsing.provider.google.lists",
+ trimmedListV2
+ );
+ let trimmedListV4 = origListV4.replace("goog-malware-proto,", "");
+ Services.prefs.setCharPref(
+ "browser.safebrowsing.provider.google4.lists",
+ trimmedListV4
+ );
+
+ try {
+ // Bug 1274685 - Unowned Safe Browsing tables break list updates
+ //
+ // If SafeBrowsing.registerTableWithURLs() doesn't check if
+ // a provider is found before registering table, an exception
+ // will be thrown while accessing a null object.
+ //
+ SafeBrowsing.registerTables();
+ ok(true, "SafeBrowsing.registerTables() did not throw.");
+ } catch (e) {
+ ok(false, "Exception thrown due to " + e.toString());
+ }
+});
diff --git a/toolkit/components/url-classifier/tests/unit/test_canonicalization.js b/toolkit/components/url-classifier/tests/unit/test_canonicalization.js
new file mode 100644
index 0000000000..e26bb5d84a
--- /dev/null
+++ b/toolkit/components/url-classifier/tests/unit/test_canonicalization.js
@@ -0,0 +1,83 @@
+/* Any copyright is dedicated to the Public Domain.
+http://creativecommons.org/publicdomain/zero/1.0/ */
+
+"use strict";
+
+function canonicalize(url) {
+ let urlUtils = Cc["@mozilla.org/url-classifier/utils;1"].getService(
+ Ci.nsIUrlClassifierUtils
+ );
+
+ let uri = Services.io.newURI(url);
+ return uri.scheme + "://" + urlUtils.getKeyForURI(uri);
+}
+
+function run_test() {
+ // These testcases are from
+ // https://developers.google.com/safe-browsing/v4/urls-hashing
+ equal(canonicalize("http://host/%25%32%35"), "http://host/%25");
+ equal(canonicalize("http://host/%25%32%35%25%32%35"), "http://host/%25%25");
+ equal(canonicalize("http://host/%2525252525252525"), "http://host/%25");
+ equal(canonicalize("http://host/asdf%25%32%35asd"), "http://host/asdf%25asd");
+ equal(
+ canonicalize("http://host/%%%25%32%35asd%%"),
+ "http://host/%25%25%25asd%25%25"
+ );
+ equal(canonicalize("http://www.google.com/"), "http://www.google.com/");
+ equal(
+ canonicalize(
+ "http://%31%36%38%2e%31%38%38%2e%39%39%2e%32%36/%2E%73%65%63%75%72%65/%77%77%77%2E%65%62%61%79%2E%63%6F%6D/"
+ ),
+ "http://168.188.99.26/.secure/www.ebay.com/"
+ );
+ equal(
+ canonicalize(
+ "http://195.127.0.11/uploads/%20%20%20%20/.verify/.eBaysecure=updateuserdataxplimnbqmn-xplmvalidateinfoswqpcmlx=hgplmcx/"
+ ),
+ "http://195.127.0.11/uploads/%20%20%20%20/.verify/.eBaysecure=updateuserdataxplimnbqmn-xplmvalidateinfoswqpcmlx=hgplmcx/"
+ );
+ equal(canonicalize("http://3279880203/blah"), "http://195.127.0.11/blah");
+ equal(
+ canonicalize("http://www.google.com/blah/.."),
+ "http://www.google.com/"
+ );
+ equal(
+ canonicalize("http://www.evil.com/blah#frag"),
+ "http://www.evil.com/blah"
+ );
+ equal(canonicalize("http://www.GOOgle.com/"), "http://www.google.com/");
+ equal(canonicalize("http://www.google.com.../"), "http://www.google.com/");
+ equal(
+ canonicalize("http://www.google.com/foo\tbar\rbaz\n2"),
+ "http://www.google.com/foobarbaz2"
+ );
+ equal(canonicalize("http://www.google.com/q?"), "http://www.google.com/q?");
+ equal(
+ canonicalize("http://www.google.com/q?r?"),
+ "http://www.google.com/q?r?"
+ );
+ equal(
+ canonicalize("http://www.google.com/q?r?s"),
+ "http://www.google.com/q?r?s"
+ );
+ equal(canonicalize("http://evil.com/foo#bar#baz"), "http://evil.com/foo");
+ equal(canonicalize("http://evil.com/foo;"), "http://evil.com/foo;");
+ equal(canonicalize("http://evil.com/foo?bar;"), "http://evil.com/foo?bar;");
+ equal(
+ canonicalize("http://notrailingslash.com"),
+ "http://notrailingslash.com/"
+ );
+ equal(
+ canonicalize("http://www.gotaport.com:1234/"),
+ "http://www.gotaport.com/"
+ );
+ equal(
+ canonicalize("https://www.securesite.com/"),
+ "https://www.securesite.com/"
+ );
+ equal(canonicalize("http://host.com/ab%23cd"), "http://host.com/ab%23cd");
+ equal(
+ canonicalize("http://host.com//twoslashes?more//slashes"),
+ "http://host.com/twoslashes?more//slashes"
+ );
+}
diff --git a/toolkit/components/url-classifier/tests/unit/test_channelClassifierService.js b/toolkit/components/url-classifier/tests/unit/test_channelClassifierService.js
new file mode 100644
index 0000000000..fdcca2acef
--- /dev/null
+++ b/toolkit/components/url-classifier/tests/unit/test_channelClassifierService.js
@@ -0,0 +1,223 @@
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+"use strict";
+
+/* Unit tests for the nsIUrlClassifierSkipListService implementation. */
+
+var httpserver = new HttpServer();
+
+const { NetUtil } = ChromeUtils.import("resource://gre/modules/NetUtil.jsm");
+const { UrlClassifierTestUtils } = ChromeUtils.import(
+ "resource://testing-common/UrlClassifierTestUtils.jsm"
+);
+
+const FEATURE_STP_PREF = "privacy.trackingprotection.socialtracking.enabled";
+const TOP_LEVEL_DOMAIN = "http://www.example.com/";
+const TRACKER_DOMAIN = "http://social-tracking.example.org/";
+
+function setupChannel(uri, topUri = TOP_LEVEL_DOMAIN) {
+ httpserver.registerPathHandler("/", null);
+ httpserver.start(-1);
+
+ let channel = NetUtil.newChannel({
+ uri: uri + ":" + httpserver.identity.primaryPort,
+ loadingPrincipal: Services.scriptSecurityManager.createContentPrincipal(
+ NetUtil.newURI(topUri),
+ {}
+ ),
+ securityFlags: Ci.nsILoadInfo.SEC_ALLOW_CROSS_ORIGIN_SEC_CONTEXT_IS_NULL,
+ contentPolicyType: Ci.nsIContentPolicy.TYPE_OTHER,
+ });
+
+ channel
+ .QueryInterface(Ci.nsIHttpChannelInternal)
+ .setTopWindowURIIfUnknown(Services.io.newURI(topUri));
+
+ return channel;
+}
+
+function waitForBeforeBlockEvent(expected, callback) {
+ return new Promise(function(resolve) {
+ let observer = function observe(aSubject, aTopic, aData) {
+ switch (aTopic) {
+ case "urlclassifier-before-block-channel":
+ let channel = aSubject.QueryInterface(
+ Ci.nsIUrlClassifierBlockedChannel
+ );
+ Assert.equal(
+ channel.reason,
+ expected.reason,
+ "verify blocked reason"
+ );
+ Assert.equal(
+ channel.url,
+ expected.url,
+ "verify url of blocked channel"
+ );
+
+ if (callback) {
+ callback(channel);
+ }
+
+ service.removeListener(observer);
+ resolve(channel);
+ break;
+ }
+ };
+
+ let service = Cc[
+ "@mozilla.org/url-classifier/channel-classifier-service;1"
+ ].getService(Ci.nsIChannelClassifierService);
+ service.addListener(observer);
+ });
+}
+
+add_task(async function test_block_channel() {
+ Services.prefs.setBoolPref(FEATURE_STP_PREF, true);
+ await UrlClassifierTestUtils.addTestTrackers();
+
+ let channel = setupChannel(TRACKER_DOMAIN);
+
+ let blockPromise = waitForBeforeBlockEvent(
+ {
+ reason: Ci.nsIUrlClassifierBlockedChannel.SOCIAL_TRACKING_PROTECTION,
+ url: channel.URI.spec,
+ },
+ null
+ );
+
+ let openPromise = new Promise((resolve, reject) => {
+ channel.asyncOpen({
+ onStartRequest: (request, context) => {},
+ onDataAvailable: (request, context, stream, offset, count) => {},
+ onStopRequest: (request, status) => {
+ dump("status = " + status + "\n");
+ if (status == 200) {
+ Assert.ok(false, "Should not successfully open the channel");
+ } else {
+ Assert.equal(
+ status,
+ Cr.NS_ERROR_SOCIALTRACKING_URI,
+ "Should fail to open the channel"
+ );
+ }
+ resolve();
+ },
+ });
+ });
+
+ // wait for block event from url-classifier
+ await blockPromise;
+
+ // wait for onStopRequest callback from AsyncOpen
+ await openPromise;
+
+ // clean up
+ UrlClassifierTestUtils.cleanupTestTrackers();
+ Services.prefs.clearUserPref(FEATURE_STP_PREF);
+ httpserver.stop();
+});
+
+add_task(async function test_unblock_channel() {
+ Services.prefs.setBoolPref(FEATURE_STP_PREF, true);
+ //Services.prefs.setBoolPref("network.dns.native-is-localhost", true);
+
+ await UrlClassifierTestUtils.addTestTrackers();
+
+ let channel = setupChannel(TRACKER_DOMAIN);
+
+ let blockPromise = waitForBeforeBlockEvent(
+ {
+ reason: Ci.nsIUrlClassifierBlockedChannel.SOCIAL_TRACKING_PROTECTION,
+ url: channel.URI.spec,
+ },
+ ch => {
+ ch.replace();
+ }
+ );
+
+ let openPromise = new Promise((resolve, reject) => {
+ channel.asyncOpen({
+ onStartRequest: (request, context) => {},
+ onDataAvailable: (request, context, stream, offset, count) => {},
+ onStopRequest: (request, status) => {
+ if (status == Cr.NS_ERROR_SOCIALTRACKING_URI) {
+ Assert.ok(false, "Classifier should not cancel this channel");
+ } else {
+ // This request is supposed to fail, but we need to ensure it
+ // is not canceled by url-classifier
+ Assert.equal(
+ status,
+ Cr.NS_ERROR_UNKNOWN_HOST,
+ "Not cancel by classifier"
+ );
+ }
+ resolve();
+ },
+ });
+ });
+
+ // wait for block event from url-classifier
+ await blockPromise;
+
+ // wait for onStopRequest callback from AsyncOpen
+ await openPromise;
+
+ // clean up
+ UrlClassifierTestUtils.cleanupTestTrackers();
+ Services.prefs.clearUserPref(FEATURE_STP_PREF);
+ httpserver.stop();
+});
+
+add_task(async function test_allow_channel() {
+ Services.prefs.setBoolPref(FEATURE_STP_PREF, true);
+ //Services.prefs.setBoolPref("network.dns.native-is-localhost", true);
+
+ await UrlClassifierTestUtils.addTestTrackers();
+
+ let channel = setupChannel(TRACKER_DOMAIN);
+
+ let blockPromise = waitForBeforeBlockEvent(
+ {
+ reason: Ci.nsIUrlClassifierBlockedChannel.SOCIAL_TRACKING_PROTECTION,
+ url: channel.URI.spec,
+ },
+ ch => {
+ ch.allow();
+ }
+ );
+
+ let openPromise = new Promise((resolve, reject) => {
+ channel.asyncOpen({
+ onStartRequest: (request, context) => {},
+ onDataAvailable: (request, context, stream, offset, count) => {},
+ onStopRequest: (request, status) => {
+ if (status == Cr.NS_ERROR_SOCIALTRACKING_URI) {
+ Assert.ok(false, "Classifier should not cancel this channel");
+ } else {
+ // This request is supposed to fail, but we need to ensure it
+ // is not canceled by url-classifier
+ Assert.equal(
+ status,
+ Cr.NS_ERROR_UNKNOWN_HOST,
+ "Not cancel by classifier"
+ );
+ }
+ resolve();
+ },
+ });
+ });
+
+ // wait for block event from url-classifier
+ await blockPromise;
+
+ // wait for onStopRequest callback from AsyncOpen
+ await openPromise;
+
+ // clean up
+ UrlClassifierTestUtils.cleanupTestTrackers();
+ Services.prefs.clearUserPref(FEATURE_STP_PREF);
+ httpserver.stop();
+});
diff --git a/toolkit/components/url-classifier/tests/unit/test_dbservice.js b/toolkit/components/url-classifier/tests/unit/test_dbservice.js
new file mode 100644
index 0000000000..70ac02021a
--- /dev/null
+++ b/toolkit/components/url-classifier/tests/unit/test_dbservice.js
@@ -0,0 +1,329 @@
+var chunk1Urls = ["test.com/aba", "test.com/foo/bar", "foo.bar.com/a/b/c"];
+var chunk1 = chunk1Urls.join("\n");
+
+var chunk2Urls = [
+ "blah.com/a",
+ "baz.com/",
+ "255.255.0.1/",
+ "www.foo.com/test2?param=1",
+];
+var chunk2 = chunk2Urls.join("\n");
+
+var chunk3Urls = ["test.com/a", "foo.bar.com/a", "blah.com/a"];
+var chunk3 = chunk3Urls.join("\n");
+
+var chunk3SubUrls = ["1:test.com/a", "1:foo.bar.com/a", "2:blah.com/a"];
+var chunk3Sub = chunk3SubUrls.join("\n");
+
+var chunk4Urls = ["a.com/b", "b.com/c"];
+var chunk4 = chunk4Urls.join("\n");
+
+var chunk5Urls = ["d.com/e", "f.com/g"];
+var chunk5 = chunk5Urls.join("\n");
+
+var chunk6Urls = ["h.com/i", "j.com/k"];
+var chunk6 = chunk6Urls.join("\n");
+
+var chunk7Urls = ["l.com/m", "n.com/o"];
+var chunk7 = chunk7Urls.join("\n");
+
+// we are going to add chunks 1, 2, 4, 5, and 6 to phish-simple,
+// chunk 2 to malware-simple, and chunk 3 to unwanted-simple,
+// and chunk 7 to block-simple.
+// Then we'll remove the urls in chunk3 from phish-simple, then
+// expire chunk 1 and chunks 4-7 from phish-simple.
+var phishExpected = {};
+var phishUnexpected = {};
+var malwareExpected = {};
+var unwantedExpected = {};
+var blockedExpected = {};
+for (let i = 0; i < chunk2Urls.length; i++) {
+ phishExpected[chunk2Urls[i]] = true;
+ malwareExpected[chunk2Urls[i]] = true;
+}
+for (let i = 0; i < chunk3Urls.length; i++) {
+ unwantedExpected[chunk3Urls[i]] = true;
+ delete phishExpected[chunk3Urls[i]];
+ phishUnexpected[chunk3Urls[i]] = true;
+}
+for (let i = 0; i < chunk1Urls.length; i++) {
+ // chunk1 urls are expired
+ phishUnexpected[chunk1Urls[i]] = true;
+}
+for (let i = 0; i < chunk4Urls.length; i++) {
+ // chunk4 urls are expired
+ phishUnexpected[chunk4Urls[i]] = true;
+}
+for (let i = 0; i < chunk5Urls.length; i++) {
+ // chunk5 urls are expired
+ phishUnexpected[chunk5Urls[i]] = true;
+}
+for (let i = 0; i < chunk6Urls.length; i++) {
+ // chunk6 urls are expired
+ phishUnexpected[chunk6Urls[i]] = true;
+}
+for (let i = 0; i < chunk7Urls.length; i++) {
+ blockedExpected[chunk7Urls[i]] = true;
+ // chunk7 urls are expired
+ phishUnexpected[chunk7Urls[i]] = true;
+}
+
+// Check that the entries hit based on sub-parts
+phishExpected["baz.com/foo/bar"] = true;
+phishExpected["foo.bar.baz.com/foo"] = true;
+phishExpected["bar.baz.com/"] = true;
+
+var numExpecting;
+
+function testFailure(arg) {
+ do_throw(arg);
+}
+
+function checkNoHost() {
+ // Looking up a no-host uri such as a data: uri should throw an exception.
+ var exception;
+ try {
+ let principal = Services.scriptSecurityManager.createContentPrincipal(
+ Services.io.newURI("data:text/html,<b>test</b>"),
+ {}
+ );
+ dbservice.lookup(principal, allTables);
+
+ exception = false;
+ } catch (e) {
+ exception = true;
+ }
+ Assert.ok(exception);
+
+ do_test_finished();
+}
+
+function tablesCallbackWithoutSub(tables) {
+ var parts = tables.split("\n");
+ parts.sort();
+
+ // there's a leading \n here because splitting left an empty string
+ // after the trailing newline, which will sort first
+ Assert.equal(
+ parts.join("\n"),
+ "\ntest-block-simple;a:1\ntest-malware-simple;a:1\ntest-phish-simple;a:2\ntest-unwanted-simple;a:1"
+ );
+
+ checkNoHost();
+}
+
+function expireSubSuccess(result) {
+ dbservice.getTables(tablesCallbackWithoutSub);
+}
+
+function tablesCallbackWithSub(tables) {
+ var parts = tables.split("\n");
+
+ let expectedChunks = [
+ "test-block-simple;a:1",
+ "test-malware-simple;a:1",
+ "test-phish-simple;a:2:s:3",
+ "test-unwanted-simple;a:1",
+ ];
+ for (let chunk of expectedChunks) {
+ Assert.ok(parts.includes(chunk));
+ }
+
+ // verify that expiring a sub chunk removes its name from the list
+ var data = "n:1000\ni:test-phish-simple\nsd:3\n";
+
+ doSimpleUpdate(data, expireSubSuccess, testFailure);
+}
+
+function checkChunksWithSub() {
+ dbservice.getTables(tablesCallbackWithSub);
+}
+
+function checkDone() {
+ if (--numExpecting == 0) {
+ checkChunksWithSub();
+ }
+}
+
+function phishExists(result) {
+ dumpn("phishExists: " + result);
+ try {
+ Assert.ok(result.includes("test-phish-simple"));
+ } finally {
+ checkDone();
+ }
+}
+
+function phishDoesntExist(result) {
+ dumpn("phishDoesntExist: " + result);
+ try {
+ Assert.ok(!result.includes("test-phish-simple"));
+ } finally {
+ checkDone();
+ }
+}
+
+function malwareExists(result) {
+ dumpn("malwareExists: " + result);
+
+ try {
+ Assert.ok(result.includes("test-malware-simple"));
+ } finally {
+ checkDone();
+ }
+}
+
+function unwantedExists(result) {
+ dumpn("unwantedExists: " + result);
+
+ try {
+ Assert.ok(result.includes("test-unwanted-simple"));
+ } finally {
+ checkDone();
+ }
+}
+
+function blockedExists(result) {
+ dumpn("blockedExists: " + result);
+
+ try {
+ Assert.ok(result.includes("test-block-simple"));
+ } finally {
+ checkDone();
+ }
+}
+
+function checkState() {
+ numExpecting = 0;
+
+ for (let key in phishExpected) {
+ let principal = Services.scriptSecurityManager.createContentPrincipal(
+ Services.io.newURI("http://" + key),
+ {}
+ );
+ dbservice.lookup(principal, allTables, phishExists, true);
+ numExpecting++;
+ }
+
+ for (let key in phishUnexpected) {
+ let principal = Services.scriptSecurityManager.createContentPrincipal(
+ Services.io.newURI("http://" + key),
+ {}
+ );
+ dbservice.lookup(principal, allTables, phishDoesntExist, true);
+ numExpecting++;
+ }
+
+ for (let key in malwareExpected) {
+ let principal = Services.scriptSecurityManager.createContentPrincipal(
+ Services.io.newURI("http://" + key),
+ {}
+ );
+ dbservice.lookup(principal, allTables, malwareExists, true);
+ numExpecting++;
+ }
+
+ for (let key in unwantedExpected) {
+ let principal = Services.scriptSecurityManager.createContentPrincipal(
+ Services.io.newURI("http://" + key),
+ {}
+ );
+ dbservice.lookup(principal, allTables, unwantedExists, true);
+ numExpecting++;
+ }
+
+ for (let key in blockedExpected) {
+ let principal = Services.scriptSecurityManager.createContentPrincipal(
+ Services.io.newURI("http://" + key),
+ {}
+ );
+ dbservice.lookup(principal, allTables, blockedExists, true);
+ numExpecting++;
+ }
+}
+
+function testSubSuccess(result) {
+ Assert.equal(result, "1000");
+ checkState();
+}
+
+function do_subs() {
+ var data =
+ "n:1000\n" +
+ "i:test-phish-simple\n" +
+ "s:3:32:" +
+ chunk3Sub.length +
+ "\n" +
+ chunk3Sub +
+ "\n" +
+ "ad:1\n" +
+ "ad:4-6\n";
+
+ doSimpleUpdate(data, testSubSuccess, testFailure);
+}
+
+function testAddSuccess(arg) {
+ Assert.equal(arg, "1000");
+
+ do_subs();
+}
+
+function do_adds() {
+ // This test relies on the fact that only -regexp tables are ungzipped,
+ // and only -hash tables are assumed to be pre-md5'd. So we use
+ // a 'simple' table type to get simple hostname-per-line semantics.
+
+ var data =
+ "n:1000\n" +
+ "i:test-phish-simple\n" +
+ "a:1:32:" +
+ chunk1.length +
+ "\n" +
+ chunk1 +
+ "\n" +
+ "a:2:32:" +
+ chunk2.length +
+ "\n" +
+ chunk2 +
+ "\n" +
+ "a:4:32:" +
+ chunk4.length +
+ "\n" +
+ chunk4 +
+ "\n" +
+ "a:5:32:" +
+ chunk5.length +
+ "\n" +
+ chunk5 +
+ "\n" +
+ "a:6:32:" +
+ chunk6.length +
+ "\n" +
+ chunk6 +
+ "\n" +
+ "i:test-malware-simple\n" +
+ "a:1:32:" +
+ chunk2.length +
+ "\n" +
+ chunk2 +
+ "\n" +
+ "i:test-unwanted-simple\n" +
+ "a:1:32:" +
+ chunk3.length +
+ "\n" +
+ chunk3 +
+ "\n" +
+ "i:test-block-simple\n" +
+ "a:1:32:" +
+ chunk7.length +
+ "\n" +
+ chunk7 +
+ "\n";
+
+ doSimpleUpdate(data, testAddSuccess, testFailure);
+}
+
+function run_test() {
+ do_adds();
+ do_test_pending();
+}
diff --git a/toolkit/components/url-classifier/tests/unit/test_digest256.js b/toolkit/components/url-classifier/tests/unit/test_digest256.js
new file mode 100644
index 0000000000..2dd5705bc2
--- /dev/null
+++ b/toolkit/components/url-classifier/tests/unit/test_digest256.js
@@ -0,0 +1,143 @@
+// Global test server for serving safebrowsing updates.
+var gHttpServ = null;
+// Global nsIUrlClassifierDBService
+var gDbService = Cc["@mozilla.org/url-classifier/dbservice;1"].getService(
+ Ci.nsIUrlClassifierDBService
+);
+
+// A map of tables to arrays of update redirect urls.
+var gTables = {};
+
+// Registers a table for which to serve update chunks. Returns a promise that
+// resolves when that chunk has been downloaded.
+function registerTableUpdate(aTable, aFilename) {
+ return new Promise(resolve => {
+ // If we haven't been given an update for this table yet, add it to the map
+ if (!(aTable in gTables)) {
+ gTables[aTable] = [];
+ }
+
+ // The number of chunks associated with this table.
+ let numChunks = gTables[aTable].length + 1;
+ let redirectPath = "/" + aTable + "-" + numChunks;
+ let redirectUrl = "localhost:4444" + redirectPath;
+
+ // Store redirect url for that table so we can return it later when we
+ // process an update request.
+ gTables[aTable].push(redirectUrl);
+
+ gHttpServ.registerPathHandler(redirectPath, function(request, response) {
+ info("Mock safebrowsing server handling request for " + redirectPath);
+ let contents = readFileToString(aFilename);
+ response.setHeader(
+ "Content-Type",
+ "application/vnd.google.safebrowsing-update",
+ false
+ );
+ response.setStatusLine(request.httpVersion, 200, "OK");
+ response.bodyOutputStream.write(contents, contents.length);
+ resolve(contents);
+ });
+ });
+}
+
+// Construct a response with redirect urls.
+function processUpdateRequest() {
+ let response = "n:1000\n";
+ for (let table in gTables) {
+ response += "i:" + table + "\n";
+ for (let i = 0; i < gTables[table].length; ++i) {
+ response += "u:" + gTables[table][i] + "\n";
+ }
+ }
+ info("Returning update response: " + response);
+ return response;
+}
+
+// Set up our test server to handle update requests.
+function run_test() {
+ gHttpServ = new HttpServer();
+ gHttpServ.registerDirectory("/", do_get_cwd());
+
+ gHttpServ.registerPathHandler("/downloads", function(request, response) {
+ let blob = processUpdateRequest();
+ response.setHeader(
+ "Content-Type",
+ "application/vnd.google.safebrowsing-update",
+ false
+ );
+ response.setStatusLine(request.httpVersion, 200, "OK");
+ response.bodyOutputStream.write(blob, blob.length);
+ });
+
+ gHttpServ.start(4444);
+ run_next_test();
+}
+
+// Just throw if we ever get an update or download error.
+function handleError(aEvent) {
+ do_throw("We didn't download or update correctly: " + aEvent);
+}
+
+add_test(function test_update() {
+ let streamUpdater = Cc[
+ "@mozilla.org/url-classifier/streamupdater;1"
+ ].getService(Ci.nsIUrlClassifierStreamUpdater);
+
+ // Load up some update chunks for the safebrowsing server to serve.
+ registerTableUpdate("goog-downloadwhite-digest256", "data/digest1.chunk");
+ registerTableUpdate("goog-downloadwhite-digest256", "data/digest2.chunk");
+
+ // Download some updates, and don't continue until the downloads are done.
+ function updateSuccess(aEvent) {
+ // Timeout of n:1000 is constructed in processUpdateRequest above and
+ // passed back in the callback in nsIUrlClassifierStreamUpdater on success.
+ Assert.equal("1000", aEvent);
+ info("All data processed");
+ run_next_test();
+ }
+ streamUpdater.downloadUpdates(
+ "goog-downloadwhite-digest256",
+ "goog-downloadwhite-digest256;\n",
+ true,
+ "http://localhost:4444/downloads",
+ updateSuccess,
+ handleError,
+ handleError
+ );
+});
+
+add_test(function test_url_not_whitelisted() {
+ let uri = Services.io.newURI("http://example.com");
+ let principal = Services.scriptSecurityManager.createContentPrincipal(
+ uri,
+ {}
+ );
+ gDbService.lookup(
+ principal,
+ "goog-downloadwhite-digest256",
+ function handleEvent(aEvent) {
+ // This URI is not on any lists.
+ Assert.equal("", aEvent);
+ run_next_test();
+ }
+ );
+});
+
+add_test(function test_url_whitelisted() {
+ // Hash of "whitelisted.com/" (canonicalized URL) is:
+ // 93CA5F48E15E9861CD37C2D95DB43D23CC6E6DE5C3F8FA6E8BE66F97CC518907
+ let uri = Services.io.newURI("http://whitelisted.com");
+ let principal = Services.scriptSecurityManager.createContentPrincipal(
+ uri,
+ {}
+ );
+ gDbService.lookup(
+ principal,
+ "goog-downloadwhite-digest256",
+ function handleEvent(aEvent) {
+ Assert.equal("goog-downloadwhite-digest256", aEvent);
+ run_next_test();
+ }
+ );
+});
diff --git a/toolkit/components/url-classifier/tests/unit/test_exceptionListService.js b/toolkit/components/url-classifier/tests/unit/test_exceptionListService.js
new file mode 100644
index 0000000000..1b40ece856
--- /dev/null
+++ b/toolkit/components/url-classifier/tests/unit/test_exceptionListService.js
@@ -0,0 +1,285 @@
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+"use strict";
+
+/* Unit tests for the nsIUrlClassifierExceptionListService implementation. */
+
+const { RemoteSettings } = ChromeUtils.import(
+ "resource://services-settings/remote-settings.js"
+);
+
+const COLLECTION_NAME = "url-classifier-skip-urls";
+const FEATURE_TRACKING_NAME = "tracking-annotation-test";
+const FEATURE_TRACKING_PREF_NAME = "urlclassifier.tracking-annotation-test";
+const FEATURE_SOCIAL_NAME = "socialtracking-annotation-test";
+const FEATURE_SOCIAL_PREF_NAME = "urlclassifier.socialtracking-annotation-test";
+const FEATURE_FINGERPRINTING_NAME = "fingerprinting-annotation-test";
+const FEATURE_FINGERPRINTING_PREF_NAME =
+ "urlclassifier.fingerprinting-annotation-test";
+
+do_get_profile();
+
+class UpdateEvent extends EventTarget {}
+function waitForEvent(element, eventName) {
+ return new Promise(function(resolve) {
+ element.addEventListener(eventName, e => resolve(e.detail), { once: true });
+ });
+}
+
+add_task(async function test_list_changes() {
+ let exceptionListService = Cc[
+ "@mozilla.org/url-classifier/exception-list-service;1"
+ ].getService(Ci.nsIUrlClassifierExceptionListService);
+
+ // Make sure we have a pref initially, since the exception list service
+ // requires it.
+ Services.prefs.setStringPref(FEATURE_TRACKING_PREF_NAME, "");
+
+ let updateEvent = new UpdateEvent();
+ let obs = data => {
+ let event = new CustomEvent("update", { detail: data });
+ updateEvent.dispatchEvent(event);
+ };
+
+ let records = [
+ {
+ id: "1",
+ last_modified: 1000000000000001,
+ feature: FEATURE_TRACKING_NAME,
+ pattern: "example.com",
+ },
+ ];
+
+ // Add some initial data.
+ let db = RemoteSettings(COLLECTION_NAME).db;
+ await db.importChanges({}, Date.now(), records);
+ let promise = waitForEvent(updateEvent, "update");
+
+ exceptionListService.registerAndRunExceptionListObserver(
+ FEATURE_TRACKING_NAME,
+ FEATURE_TRACKING_PREF_NAME,
+ obs
+ );
+
+ Assert.equal(await promise, "", "No items in the list");
+
+ // Second event is from the RemoteSettings record.
+ let list = await waitForEvent(updateEvent, "update");
+ Assert.equal(list, "example.com", "Has one item in the list");
+
+ records.push(
+ {
+ id: "2",
+ last_modified: 1000000000000002,
+ feature: FEATURE_TRACKING_NAME,
+ pattern: "MOZILLA.ORG",
+ },
+ {
+ id: "3",
+ last_modified: 1000000000000003,
+ feature: "some-other-feature",
+ pattern: "noinclude.com",
+ },
+ {
+ last_modified: 1000000000000004,
+ feature: FEATURE_TRACKING_NAME,
+ pattern: "*.example.org",
+ }
+ );
+
+ promise = waitForEvent(updateEvent, "update");
+
+ await RemoteSettings(COLLECTION_NAME).emit("sync", {
+ data: { current: records },
+ });
+
+ list = await promise;
+
+ Assert.equal(
+ list,
+ "example.com,mozilla.org,*.example.org",
+ "Has several items in the list"
+ );
+
+ promise = waitForEvent(updateEvent, "update");
+
+ Services.prefs.setStringPref(FEATURE_TRACKING_PREF_NAME, "test.com");
+
+ list = await promise;
+
+ Assert.equal(
+ list,
+ "test.com,example.com,mozilla.org,*.example.org",
+ "Has several items in the list"
+ );
+
+ promise = waitForEvent(updateEvent, "update");
+
+ Services.prefs.setStringPref(
+ FEATURE_TRACKING_PREF_NAME,
+ "test.com,whatever.com,*.abc.com"
+ );
+
+ list = await promise;
+
+ Assert.equal(
+ list,
+ "test.com,whatever.com,*.abc.com,example.com,mozilla.org,*.example.org",
+ "Has several items in the list"
+ );
+
+ exceptionListService.unregisterExceptionListObserver(
+ FEATURE_TRACKING_NAME,
+ obs
+ );
+ exceptionListService.clear();
+
+ await db.clear();
+});
+
+/**
+ * This test make sure when a feature registers itself to exceptionlist service,
+ * it can get the correct initial data.
+ */
+add_task(async function test_list_init_data() {
+ let exceptionListService = Cc[
+ "@mozilla.org/url-classifier/exception-list-service;1"
+ ].getService(Ci.nsIUrlClassifierExceptionListService);
+
+ // Make sure we have a pref initially, since the exception list service
+ // requires it.
+ Services.prefs.setStringPref(FEATURE_TRACKING_PREF_NAME, "");
+
+ let updateEvent = new UpdateEvent();
+
+ let records = [
+ {
+ id: "1",
+ last_modified: 1000000000000001,
+ feature: FEATURE_TRACKING_NAME,
+ pattern: "tracking.example.com",
+ },
+ {
+ id: "2",
+ last_modified: 1000000000000002,
+ feature: FEATURE_SOCIAL_NAME,
+ pattern: "social.example.com",
+ },
+ {
+ id: "3",
+ last_modified: 1000000000000003,
+ feature: FEATURE_TRACKING_NAME,
+ pattern: "*.tracking.org",
+ },
+ {
+ id: "4",
+ last_modified: 1000000000000004,
+ feature: FEATURE_SOCIAL_NAME,
+ pattern: "MOZILLA.ORG",
+ },
+ ];
+
+ // Add some initial data.
+ let db = RemoteSettings(COLLECTION_NAME).db;
+ await db.importChanges({}, Date.now(), records);
+
+ // The first registered feature make ExceptionListService get the initial data
+ // from remote setting.
+ let promise = waitForEvent(updateEvent, "update");
+
+ let obs = data => {
+ let event = new CustomEvent("update", { detail: data });
+ updateEvent.dispatchEvent(event);
+ };
+ exceptionListService.registerAndRunExceptionListObserver(
+ FEATURE_TRACKING_NAME,
+ FEATURE_TRACKING_PREF_NAME,
+ obs
+ );
+
+ let list = await promise;
+ Assert.equal(list, "", "Empty list initially");
+
+ Assert.equal(
+ await waitForEvent(updateEvent, "update"),
+ "tracking.example.com,*.tracking.org",
+ "Has several items in the list"
+ );
+
+ // Register another feature after ExceptionListService got the initial data.
+ promise = waitForEvent(updateEvent, "update");
+
+ exceptionListService.registerAndRunExceptionListObserver(
+ FEATURE_SOCIAL_NAME,
+ FEATURE_SOCIAL_PREF_NAME,
+ obs
+ );
+
+ list = await promise;
+
+ Assert.equal(
+ list,
+ "social.example.com,mozilla.org",
+ "Has several items in the list"
+ );
+
+ // Test registering a feature after ExceptionListService recieved the synced data.
+ records.push(
+ {
+ id: "5",
+ last_modified: 1000000000000002,
+ feature: FEATURE_FINGERPRINTING_NAME,
+ pattern: "fingerprinting.example.com",
+ },
+ {
+ id: "6",
+ last_modified: 1000000000000002,
+ feature: "other-fature",
+ pattern: "not-a-fingerprinting.example.com",
+ },
+ {
+ id: "7",
+ last_modified: 1000000000000002,
+ feature: FEATURE_FINGERPRINTING_NAME,
+ pattern: "*.fingerprinting.org",
+ }
+ );
+
+ await RemoteSettings(COLLECTION_NAME).emit("sync", {
+ data: { current: records },
+ });
+
+ promise = waitForEvent(updateEvent, "update");
+
+ exceptionListService.registerAndRunExceptionListObserver(
+ FEATURE_FINGERPRINTING_NAME,
+ FEATURE_FINGERPRINTING_PREF_NAME,
+ obs
+ );
+
+ list = await promise;
+
+ Assert.equal(
+ list,
+ "fingerprinting.example.com,*.fingerprinting.org",
+ "Has several items in the list"
+ );
+
+ exceptionListService.unregisterExceptionListObserver(
+ FEATURE_TRACKING_NAME,
+ obs
+ );
+ exceptionListService.unregisterExceptionListObserver(
+ FEATURE_SOCIAL_NAME,
+ obs
+ );
+ exceptionListService.unregisterExceptionListObserver(
+ FEATURE_FINGERPRINTING_NAME,
+ obs
+ );
+ exceptionListService.clear();
+
+ await db.clear();
+});
diff --git a/toolkit/components/url-classifier/tests/unit/test_features.js b/toolkit/components/url-classifier/tests/unit/test_features.js
new file mode 100644
index 0000000000..088378b560
--- /dev/null
+++ b/toolkit/components/url-classifier/tests/unit/test_features.js
@@ -0,0 +1,83 @@
+/* Any copyright is dedicated to the Public Domain.
+ http://creativecommons.org/publicdomain/zero/1.0/
+*/
+
+"use strict";
+
+add_test(async _ => {
+ ok(
+ Services.cookies,
+ "Force the cookie service to be initialized to avoid issues later. " +
+ "See https://bugzilla.mozilla.org/show_bug.cgi?id=1621759#c3"
+ );
+ Services.prefs.setBoolPref("browser.safebrowsing.passwords.enabled", true);
+
+ let classifier = Cc["@mozilla.org/url-classifier/dbservice;1"].getService(
+ Ci.nsIURIClassifier
+ );
+ ok(!!classifier, "We have the URI-Classifier");
+
+ var tests = [
+ { name: "a", expectedResult: false },
+ { name: "tracking-annotation", expectedResult: true },
+ { name: "tracking-protection", expectedResult: true },
+ { name: "login-reputation", expectedResult: true },
+ ];
+
+ tests.forEach(test => {
+ let feature;
+ try {
+ feature = classifier.getFeatureByName(test.name);
+ } catch (e) {}
+
+ equal(
+ !!feature,
+ test.expectedResult,
+ "Exceptected result for: " + test.name
+ );
+ if (feature) {
+ equal(feature.name, test.name, "Feature name matches");
+ }
+ });
+
+ let uri = Services.io.newURI("https://example.com");
+
+ let feature = classifier.getFeatureByName("tracking-protection");
+
+ let results = await new Promise(resolve => {
+ classifier.asyncClassifyLocalWithFeatures(
+ uri,
+ [feature],
+ Ci.nsIUrlClassifierFeature.blocklist,
+ r => {
+ resolve(r);
+ }
+ );
+ });
+ equal(results.length, 0, "No tracker");
+
+ Services.prefs.setCharPref(
+ "urlclassifier.trackingTable.testEntries",
+ "example.com"
+ );
+
+ feature = classifier.getFeatureByName("tracking-protection");
+
+ results = await new Promise(resolve => {
+ classifier.asyncClassifyLocalWithFeatures(
+ uri,
+ [feature],
+ Ci.nsIUrlClassifierFeature.blocklist,
+ r => {
+ resolve(r);
+ }
+ );
+ });
+ equal(results.length, 1, "Tracker");
+ let result = results[0];
+ equal(result.feature.name, "tracking-protection", "Correct feature");
+ equal(result.list, "tracking-blocklist-pref", "Correct list");
+
+ Services.prefs.clearUserPref("browser.safebrowsing.password.enabled");
+ run_next_test();
+});
diff --git a/toolkit/components/url-classifier/tests/unit/test_hashcompleter.js b/toolkit/components/url-classifier/tests/unit/test_hashcompleter.js
new file mode 100644
index 0000000000..5837753635
--- /dev/null
+++ b/toolkit/components/url-classifier/tests/unit/test_hashcompleter.js
@@ -0,0 +1,438 @@
+/* Any copyright is dedicated to the Public Domain.
+ http://creativecommons.org/publicdomain/zero/1.0/ */
+
+// This test ensures that the nsIUrlClassifierHashCompleter works as expected
+// and simulates an HTTP server to provide completions.
+//
+// In order to test completions, each group of completions sent as one request
+// to the HTTP server is called a completion set. There is currently not
+// support for multiple requests being sent to the server at once, in this test.
+// This tests makes a request for each element of |completionSets|, waits for
+// a response and then moves to the next element.
+// Each element of |completionSets| is an array of completions, and each
+// completion is an object with the properties:
+// hash: complete hash for the completion. Automatically right-padded
+// to be COMPLETE_LENGTH.
+// expectCompletion: boolean indicating whether the server should respond
+// with a full hash.
+// forceServerError: boolean indicating whether the server should respond
+// with a 503.
+// table: name of the table that the hash corresponds to. Only needs to be set
+// if a completion is expected.
+// chunkId: positive integer corresponding to the chunk that the hash belongs
+// to. Only needs to be set if a completion is expected.
+// multipleCompletions: boolean indicating whether the server should respond
+// with more than one full hash. If this is set to true
+// then |expectCompletion| must also be set to true and
+// |hash| must have the same prefix as all |completions|.
+// completions: an array of completions (objects with a hash, table and
+// chunkId property as described above). This property is only
+// used when |multipleCompletions| is set to true.
+
+// Basic prefixes with 2/3 completions.
+var basicCompletionSet = [
+ {
+ hash: "abcdefgh",
+ expectCompletion: true,
+ table: "test",
+ chunkId: 1234,
+ },
+ {
+ hash: "1234",
+ expectCompletion: false,
+ },
+ {
+ hash: "\u0000\u0000\u000012312",
+ expectCompletion: true,
+ table: "test",
+ chunkId: 1234,
+ },
+];
+
+// 3 prefixes with 0 completions to test HashCompleter handling a 204 status.
+var falseCompletionSet = [
+ {
+ hash: "1234",
+ expectCompletion: false,
+ },
+ {
+ hash: "",
+ expectCompletion: false,
+ },
+ {
+ hash: "abc",
+ expectCompletion: false,
+ },
+];
+
+// The current implementation (as of Mar 2011) sometimes sends duplicate
+// entries to HashCompleter and even expects responses for duplicated entries.
+var dupedCompletionSet = [
+ {
+ hash: "1234",
+ expectCompletion: true,
+ table: "test",
+ chunkId: 1,
+ },
+ {
+ hash: "5678",
+ expectCompletion: false,
+ table: "test2",
+ chunkId: 2,
+ },
+ {
+ hash: "1234",
+ expectCompletion: true,
+ table: "test",
+ chunkId: 1,
+ },
+ {
+ hash: "5678",
+ expectCompletion: false,
+ table: "test2",
+ chunkId: 2,
+ },
+];
+
+// It is possible for a hash completion request to return with multiple
+// completions, the HashCompleter should return all of these.
+var multipleResponsesCompletionSet = [
+ {
+ hash: "1234",
+ expectCompletion: true,
+ multipleCompletions: true,
+ completions: [
+ {
+ hash: "123456",
+ table: "test1",
+ chunkId: 3,
+ },
+ {
+ hash: "123478",
+ table: "test2",
+ chunkId: 4,
+ },
+ ],
+ },
+];
+
+function buildCompletionRequest(aCompletionSet) {
+ let prefixes = [];
+ let prefixSet = new Set();
+ aCompletionSet.forEach(s => {
+ let prefix = s.hash.substring(0, 4);
+ if (prefixSet.has(prefix)) {
+ return;
+ }
+ prefixSet.add(prefix);
+ prefixes.push(prefix);
+ });
+ return 4 + ":" + 4 * prefixes.length + "\n" + prefixes.join("");
+}
+
+function parseCompletionRequest(aRequest) {
+ // Format: [partial_length]:[num_of_prefix * partial_length]\n[prefixes_data]
+
+ let tokens = /(\d):(\d+)/.exec(aRequest);
+ if (tokens.length < 3) {
+ dump("Request format error.");
+ return null;
+ }
+
+ let partialLength = parseInt(tokens[1]);
+
+ let payloadStart =
+ tokens[1].length + // partial length
+ 1 + // ':'
+ tokens[2].length + // payload length
+ 1; // '\n'
+
+ let prefixSet = [];
+ for (let i = payloadStart; i < aRequest.length; i += partialLength) {
+ let prefix = aRequest.substr(i, partialLength);
+ if (prefix.length !== partialLength) {
+ dump("Header info not correct: " + aRequest.substr(0, payloadStart));
+ return null;
+ }
+ prefixSet.push(prefix);
+ }
+ prefixSet.sort();
+
+ return prefixSet;
+}
+
+// Compare the requests in string format.
+function compareCompletionRequest(aRequest1, aRequest2) {
+ let prefixSet1 = parseCompletionRequest(aRequest1);
+ let prefixSet2 = parseCompletionRequest(aRequest2);
+
+ return equal(JSON.stringify(prefixSet1), JSON.stringify(prefixSet2));
+}
+
+// The fifth completion set is added at runtime by getRandomCompletionSet.
+// Each completion in the set only has one response and its purpose is to
+// provide an easy way to test the HashCompleter handling an arbitrarily large
+// completion set (determined by SIZE_OF_RANDOM_SET).
+const SIZE_OF_RANDOM_SET = 16;
+function getRandomCompletionSet(forceServerError) {
+ let completionSet = [];
+ let hashPrefixes = [];
+
+ let seed = Math.floor(Math.random() * Math.pow(2, 32));
+ dump("Using seed of " + seed + " for random completion set.\n");
+ let rand = new LFSRgenerator(seed);
+
+ for (let i = 0; i < SIZE_OF_RANDOM_SET; i++) {
+ let completion = {
+ expectCompletion: false,
+ forceServerError: false,
+ _finished: false,
+ };
+
+ // Generate a random 256 bit hash. First we get a random number and then
+ // convert it to a string.
+ let hash;
+ let prefix;
+ do {
+ hash = "";
+ let length = 1 + rand.nextNum(5);
+ for (let j = 0; j < length; j++) {
+ hash += String.fromCharCode(rand.nextNum(8));
+ }
+ prefix = hash.substring(0, 4);
+ } while (hashPrefixes.includes(prefix));
+
+ hashPrefixes.push(prefix);
+ completion.hash = hash;
+
+ if (!forceServerError) {
+ completion.expectCompletion = rand.nextNum(1) == 1;
+ } else {
+ completion.forceServerError = true;
+ }
+ if (completion.expectCompletion) {
+ // Generate a random alpha-numeric string of length start with "test" for the
+ // table name.
+ completion.table = "test" + rand.nextNum(31).toString(36);
+
+ completion.chunkId = rand.nextNum(16);
+ }
+ completionSet.push(completion);
+ }
+
+ return completionSet;
+}
+
+var completionSets = [
+ basicCompletionSet,
+ falseCompletionSet,
+ dupedCompletionSet,
+ multipleResponsesCompletionSet,
+];
+var currentCompletionSet = -1;
+var finishedCompletions = 0;
+
+const SERVER_PATH = "/hash-completer";
+var server;
+
+// Completion hashes are automatically right-padded with null chars to have a
+// length of COMPLETE_LENGTH.
+// Taken from nsUrlClassifierDBService.h
+const COMPLETE_LENGTH = 32;
+
+var completer = Cc["@mozilla.org/url-classifier/hashcompleter;1"].getService(
+ Ci.nsIUrlClassifierHashCompleter
+);
+
+var gethashUrl;
+
+// Expected highest completion set for which the server sends a response.
+var expectedMaxServerCompletionSet = 0;
+var maxServerCompletionSet = 0;
+
+function run_test() {
+ // This test case exercises the backoff functionality so we can't leave it disabled.
+ Services.prefs.setBoolPref(
+ "browser.safebrowsing.provider.test.disableBackoff",
+ false
+ );
+ // Generate a random completion set that return successful responses.
+ completionSets.push(getRandomCompletionSet(false));
+ // We backoff after receiving an error, so requests shouldn't reach the
+ // server after that.
+ expectedMaxServerCompletionSet = completionSets.length;
+ // Generate some completion sets that return 503s.
+ for (let j = 0; j < 10; ++j) {
+ completionSets.push(getRandomCompletionSet(true));
+ }
+
+ // Fix up the completions before running the test.
+ for (let completionSet of completionSets) {
+ for (let completion of completionSet) {
+ // Pad the right of each |hash| so that the length is COMPLETE_LENGTH.
+ if (completion.multipleCompletions) {
+ for (let responseCompletion of completion.completions) {
+ let numChars = COMPLETE_LENGTH - responseCompletion.hash.length;
+ responseCompletion.hash += new Array(numChars + 1).join("\u0000");
+ }
+ } else {
+ let numChars = COMPLETE_LENGTH - completion.hash.length;
+ completion.hash += new Array(numChars + 1).join("\u0000");
+ }
+ }
+ }
+ do_test_pending();
+
+ server = new HttpServer();
+ server.registerPathHandler(SERVER_PATH, hashCompleterServer);
+
+ server.start(-1);
+ const SERVER_PORT = server.identity.primaryPort;
+
+ gethashUrl = "http://localhost:" + SERVER_PORT + SERVER_PATH;
+
+ runNextCompletion();
+}
+
+function runNextCompletion() {
+ // The server relies on currentCompletionSet to send the correct response, so
+ // don't increment it until we start the new set of callbacks.
+ currentCompletionSet++;
+ if (currentCompletionSet >= completionSets.length) {
+ finish();
+ return;
+ }
+
+ dump(
+ "Now on completion set index " +
+ currentCompletionSet +
+ ", length " +
+ completionSets[currentCompletionSet].length +
+ "\n"
+ );
+ // Number of finished completions for this set.
+ finishedCompletions = 0;
+ for (let completion of completionSets[currentCompletionSet]) {
+ completer.complete(
+ completion.hash.substring(0, 4),
+ gethashUrl,
+ "test-phish-shavar", // Could be arbitrary v2 table name.
+ new callback(completion)
+ );
+ }
+}
+
+function hashCompleterServer(aRequest, aResponse) {
+ let stream = aRequest.bodyInputStream;
+ let wrapperStream = Cc["@mozilla.org/binaryinputstream;1"].createInstance(
+ Ci.nsIBinaryInputStream
+ );
+ wrapperStream.setInputStream(stream);
+
+ let len = stream.available();
+ let data = wrapperStream.readBytes(len);
+
+ // Check if we got the expected completion request.
+ let expectedRequest = buildCompletionRequest(
+ completionSets[currentCompletionSet]
+ );
+ compareCompletionRequest(data, expectedRequest);
+
+ // To avoid a response with duplicate hash completions, we keep track of all
+ // completed hash prefixes so far.
+ let completedHashes = [];
+ let responseText = "";
+
+ function responseForCompletion(x) {
+ return x.table + ":" + x.chunkId + ":" + x.hash.length + "\n" + x.hash;
+ }
+ // As per the spec, a server should response with a 204 if there are no
+ // full-length hashes that match the prefixes.
+ let httpStatus = 204;
+ for (let completion of completionSets[currentCompletionSet]) {
+ if (
+ completion.expectCompletion &&
+ !completedHashes.includes(completion.hash)
+ ) {
+ completedHashes.push(completion.hash);
+
+ if (completion.multipleCompletions) {
+ responseText += completion.completions
+ .map(responseForCompletion)
+ .join("");
+ } else {
+ responseText += responseForCompletion(completion);
+ }
+ }
+ if (completion.forceServerError) {
+ httpStatus = 503;
+ }
+ }
+
+ dump("Server sending response for " + currentCompletionSet + "\n");
+ maxServerCompletionSet = currentCompletionSet;
+ if (responseText && httpStatus != 503) {
+ aResponse.write(responseText);
+ } else {
+ aResponse.setStatusLine(null, httpStatus, null);
+ }
+}
+
+function callback(completion) {
+ this._completion = completion;
+}
+
+callback.prototype = {
+ completionV2: function completionV2(hash, table, chunkId, trusted) {
+ Assert.ok(this._completion.expectCompletion);
+ if (this._completion.multipleCompletions) {
+ for (let completion of this._completion.completions) {
+ if (completion.hash == hash) {
+ Assert.equal(JSON.stringify(hash), JSON.stringify(completion.hash));
+ Assert.equal(table, completion.table);
+ Assert.equal(chunkId, completion.chunkId);
+
+ completion._completed = true;
+
+ if (this._completion.completions.every(x => x._completed)) {
+ this._completed = true;
+ }
+
+ break;
+ }
+ }
+ } else {
+ // Hashes are not actually strings and can contain arbitrary data.
+ Assert.equal(JSON.stringify(hash), JSON.stringify(this._completion.hash));
+ Assert.equal(table, this._completion.table);
+ Assert.equal(chunkId, this._completion.chunkId);
+
+ this._completed = true;
+ }
+ },
+
+ completionFinished: function completionFinished(status) {
+ finishedCompletions++;
+ Assert.equal(!!this._completion.expectCompletion, !!this._completed);
+ this._completion._finished = true;
+
+ // currentCompletionSet can mutate before all of the callbacks are complete.
+ if (
+ currentCompletionSet < completionSets.length &&
+ finishedCompletions == completionSets[currentCompletionSet].length
+ ) {
+ runNextCompletion();
+ }
+ },
+};
+
+function finish() {
+ Services.prefs.clearUserPref(
+ "browser.safebrowsing.provider.test.disableBackoff"
+ );
+
+ Assert.equal(expectedMaxServerCompletionSet, maxServerCompletionSet);
+ server.stop(function() {
+ do_test_finished();
+ });
+}
diff --git a/toolkit/components/url-classifier/tests/unit/test_hashcompleter_v4.js b/toolkit/components/url-classifier/tests/unit/test_hashcompleter_v4.js
new file mode 100644
index 0000000000..7eb23cb3b0
--- /dev/null
+++ b/toolkit/components/url-classifier/tests/unit/test_hashcompleter_v4.js
@@ -0,0 +1,292 @@
+const { XPCOMUtils } = ChromeUtils.importESModule(
+ "resource://gre/modules/XPCOMUtils.sys.mjs"
+);
+
+// These tables have a different update URL (for v4).
+const TEST_TABLE_DATA_V4 = {
+ tableName: "test-phish-proto",
+ providerName: "google4",
+ updateUrl: "http://localhost:5555/safebrowsing/update?",
+ gethashUrl: "http://localhost:5555/safebrowsing/gethash-v4?",
+};
+
+const PREF_NEXTUPDATETIME_V4 =
+ "browser.safebrowsing.provider.google4.nextupdatetime";
+const GETHASH_PATH = "/safebrowsing/gethash-v4";
+
+// The protobuf binary represention of gethash response:
+// minimumWaitDuration : 12 secs 10 nanosecs
+// negativeCacheDuration : 120 secs 9 nanosecs
+//
+// { CompleteHash, ThreatType, CacheDuration { secs, nanos } };
+// { nsCString("01234567890123456789012345678901"), SOCIAL_ENGINEERING_PUBLIC, { 8, 500 } },
+// { nsCString("12345678901234567890123456789012"), SOCIAL_ENGINEERING_PUBLIC, { 7, 100} },
+// { nsCString("23456789012345678901234567890123"), SOCIAL_ENGINEERING_PUBLIC, { 1, 20 } },
+
+const GETHASH_RESPONSE_CONTENT =
+ "\x0A\x2D\x08\x02\x1A\x22\x0A\x20\x30\x31\x32\x33\x34\x35\x36\x37\x38\x39\x30\x31\x32\x33\x34\x35\x36\x37\x38\x39\x30\x31\x32\x33\x34\x35\x36\x37\x38\x39\x30\x31\x2A\x05\x08\x08\x10\xF4\x03\x0A\x2C\x08\x02\x1A\x22\x0A\x20\x31\x32\x33\x34\x35\x36\x37\x38\x39\x30\x31\x32\x33\x34\x35\x36\x37\x38\x39\x30\x31\x32\x33\x34\x35\x36\x37\x38\x39\x30\x31\x32\x2A\x04\x08\x07\x10\x64\x0A\x2C\x08\x02\x1A\x22\x0A\x20\x32\x33\x34\x35\x36\x37\x38\x39\x30\x31\x32\x33\x34\x35\x36\x37\x38\x39\x30\x31\x32\x33\x34\x35\x36\x37\x38\x39\x30\x31\x32\x33\x2A\x04\x08\x01\x10\x14\x12\x04\x08\x0C\x10\x0A\x1A\x04\x08\x78\x10\x09";
+
+// The protobuf binary represention of update response:
+//
+// [
+// {
+// 'threat_type': 2, // SOCIAL_ENGINEERING_PUBLIC
+// 'response_type': 2, // FULL_UPDATE
+// 'new_client_state': 'sta\x00te', // NEW_CLIENT_STATE
+// 'checksum': { "sha256": CHECKSUM }, // CHECKSUM
+// 'additions': { 'compression_type': RAW,
+// 'prefix_size': 4,
+// 'raw_hashes': "00000001000000020000000300000004"}
+// }
+// ]
+//
+const UPDATE_RESPONSE_CONTENT =
+ "\x0A\x4A\x08\x02\x20\x02\x2A\x18\x08\x01\x12\x14\x08\x04\x12\x10\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x02\x00\x00\x00\x03\x3A\x06\x73\x74\x61\x00\x74\x65\x42\x22\x0A\x20\x30\x67\xC7\x2C\x5E\x50\x1C\x31\xE3\xFE\xCA\x73\xF0\x47\xDC\x34\x1A\x95\x63\x99\xEC\x70\x5E\x0A\xEE\x9E\xFB\x17\xA1\x55\x35\x78\x12\x08\x08\x08\x10\x80\x94\xEB\xDC\x03";
+const UPDATE_PATH = "/safebrowsing/update";
+
+let gListManager = Cc["@mozilla.org/url-classifier/listmanager;1"].getService(
+ Ci.nsIUrlListManager
+);
+
+let gCompleter = Cc["@mozilla.org/url-classifier/hashcompleter;1"].getService(
+ Ci.nsIUrlClassifierHashCompleter
+);
+
+XPCOMUtils.defineLazyServiceGetter(
+ this,
+ "gUrlUtil",
+ "@mozilla.org/url-classifier/utils;1",
+ "nsIUrlClassifierUtils"
+);
+
+// Handles request for TEST_TABLE_DATA_V4.
+let gHttpServV4 = null;
+
+const NEW_CLIENT_STATE = "sta\0te";
+const CHECKSUM =
+ "\x30\x67\xc7\x2c\x5e\x50\x1c\x31\xe3\xfe\xca\x73\xf0\x47\xdc\x34\x1a\x95\x63\x99\xec\x70\x5e\x0a\xee\x9e\xfb\x17\xa1\x55\x35\x78";
+
+Services.prefs.setBoolPref("browser.safebrowsing.debug", true);
+
+// The "\xFF\xFF" is to generate a base64 string with "/".
+Services.prefs.setCharPref("browser.safebrowsing.id", "Firefox\xFF\xFF");
+
+// Register tables.
+gListManager.registerTable(
+ TEST_TABLE_DATA_V4.tableName,
+ TEST_TABLE_DATA_V4.providerName,
+ TEST_TABLE_DATA_V4.updateUrl,
+ TEST_TABLE_DATA_V4.gethashUrl
+);
+
+// This is unfortunately needed since v4 gethash request
+// requires the threat type (table name) as well as the
+// state it's associated with. We have to run the update once
+// to have the state written.
+add_test(function test_update_v4() {
+ gListManager.disableUpdate(TEST_TABLE_DATA_V4.tableName);
+ gListManager.enableUpdate(TEST_TABLE_DATA_V4.tableName);
+
+ // Force table update.
+ Services.prefs.setCharPref(PREF_NEXTUPDATETIME_V4, "1");
+ gListManager.maybeToggleUpdateChecking();
+});
+
+add_test(function test_getHashRequestV4() {
+ let request = gUrlUtil.makeFindFullHashRequestV4(
+ [TEST_TABLE_DATA_V4.tableName],
+ [btoa(NEW_CLIENT_STATE)],
+ [btoa("0123"), btoa("1234567"), btoa("1111")].sort()
+ );
+ registerHandlerGethashV4("&$req=" + request);
+ let completeFinishedCnt = 0;
+
+ gCompleter.complete(
+ "0123",
+ TEST_TABLE_DATA_V4.gethashUrl,
+ TEST_TABLE_DATA_V4.tableName,
+ {
+ completionV4(hash, table, duration, fullhashes) {
+ equal(hash, "0123");
+ equal(table, TEST_TABLE_DATA_V4.tableName);
+ equal(duration, 120);
+ equal(fullhashes.length, 1);
+
+ let match = fullhashes
+ .QueryInterface(Ci.nsIArray)
+ .queryElementAt(0, Ci.nsIFullHashMatch);
+
+ equal(match.fullHash, "01234567890123456789012345678901");
+ equal(match.cacheDuration, 8);
+ info("completion: " + match.fullHash + ", " + table);
+ },
+
+ completionFinished(status) {
+ equal(status, Cr.NS_OK);
+ completeFinishedCnt++;
+ if (3 === completeFinishedCnt) {
+ run_next_test();
+ }
+ },
+ }
+ );
+
+ gCompleter.complete(
+ "1234567",
+ TEST_TABLE_DATA_V4.gethashUrl,
+ TEST_TABLE_DATA_V4.tableName,
+ {
+ completionV4(hash, table, duration, fullhashes) {
+ equal(hash, "1234567");
+ equal(table, TEST_TABLE_DATA_V4.tableName);
+ equal(duration, 120);
+ equal(fullhashes.length, 1);
+
+ let match = fullhashes
+ .QueryInterface(Ci.nsIArray)
+ .queryElementAt(0, Ci.nsIFullHashMatch);
+
+ equal(match.fullHash, "12345678901234567890123456789012");
+ equal(match.cacheDuration, 7);
+ info("completion: " + match.fullHash + ", " + table);
+ },
+
+ completionFinished(status) {
+ equal(status, Cr.NS_OK);
+ completeFinishedCnt++;
+ if (3 === completeFinishedCnt) {
+ run_next_test();
+ }
+ },
+ }
+ );
+
+ gCompleter.complete(
+ "1111",
+ TEST_TABLE_DATA_V4.gethashUrl,
+ TEST_TABLE_DATA_V4.tableName,
+ {
+ completionV4(hash, table, duration, fullhashes) {
+ equal(hash, "1111");
+ equal(table, TEST_TABLE_DATA_V4.tableName);
+ equal(duration, 120);
+ equal(fullhashes.length, 0);
+ },
+
+ completionFinished(status) {
+ equal(status, Cr.NS_OK);
+ completeFinishedCnt++;
+ if (3 === completeFinishedCnt) {
+ run_next_test();
+ }
+ },
+ }
+ );
+});
+
+add_test(function test_minWaitDuration() {
+ let failedComplete = function() {
+ gCompleter.complete(
+ "0123",
+ TEST_TABLE_DATA_V4.gethashUrl,
+ TEST_TABLE_DATA_V4.tableName,
+ {
+ completionFinished(status) {
+ equal(status, Cr.NS_ERROR_ABORT);
+ },
+ }
+ );
+ };
+
+ let successComplete = function() {
+ gCompleter.complete(
+ "1234567",
+ TEST_TABLE_DATA_V4.gethashUrl,
+ TEST_TABLE_DATA_V4.tableName,
+ {
+ completionV4(hash, table, duration, fullhashes) {
+ equal(hash, "1234567");
+ equal(table, TEST_TABLE_DATA_V4.tableName);
+ equal(fullhashes.length, 1);
+
+ let match = fullhashes
+ .QueryInterface(Ci.nsIArray)
+ .queryElementAt(0, Ci.nsIFullHashMatch);
+
+ equal(match.fullHash, "12345678901234567890123456789012");
+ equal(match.cacheDuration, 7);
+ info("completion: " + match.fullHash + ", " + table);
+ },
+
+ completionFinished(status) {
+ equal(status, Cr.NS_OK);
+ run_next_test();
+ },
+ }
+ );
+ };
+
+ let request = gUrlUtil.makeFindFullHashRequestV4(
+ [TEST_TABLE_DATA_V4.tableName],
+ [btoa(NEW_CLIENT_STATE)],
+ [btoa("1234567")]
+ );
+ registerHandlerGethashV4("&$req=" + request);
+
+ // The last gethash response contained a min wait duration 12 secs 10 nano
+ // So subsequent requests can happen only after the min wait duration
+ do_timeout(1000, failedComplete);
+ do_timeout(2000, failedComplete);
+ do_timeout(4000, failedComplete);
+ do_timeout(13000, successComplete);
+});
+
+function registerHandlerGethashV4(aExpectedQuery) {
+ gHttpServV4.registerPathHandler(GETHASH_PATH, null);
+ // V4 gethash handler.
+ gHttpServV4.registerPathHandler(GETHASH_PATH, function(request, response) {
+ equal(request.queryString, aExpectedQuery);
+
+ response.setStatusLine(request.httpVersion, 200, "OK");
+ response.bodyOutputStream.write(
+ GETHASH_RESPONSE_CONTENT,
+ GETHASH_RESPONSE_CONTENT.length
+ );
+ });
+}
+
+function registerHandlerUpdateV4() {
+ // Update handler. Will respond a valid state to be verified in the
+ // gethash handler.
+ gHttpServV4.registerPathHandler(UPDATE_PATH, function(request, response) {
+ response.setHeader(
+ "Content-Type",
+ "application/vnd.google.safebrowsing-update",
+ false
+ );
+ response.setStatusLine(request.httpVersion, 200, "OK");
+ response.bodyOutputStream.write(
+ UPDATE_RESPONSE_CONTENT,
+ UPDATE_RESPONSE_CONTENT.length
+ );
+
+ waitUntilMetaDataSaved(NEW_CLIENT_STATE, CHECKSUM, () => {
+ run_next_test();
+ });
+ });
+}
+
+function run_test() {
+ throwOnUpdateErrors();
+
+ gHttpServV4 = new HttpServer();
+ gHttpServV4.registerDirectory("/", do_get_cwd());
+
+ registerHandlerUpdateV4();
+ gHttpServV4.start(5555);
+ run_next_test();
+}
+
+registerCleanupFunction(function() {
+ stopThrowingOnUpdateErrors();
+});
diff --git a/toolkit/components/url-classifier/tests/unit/test_listmanager.js b/toolkit/components/url-classifier/tests/unit/test_listmanager.js
new file mode 100644
index 0000000000..485c109bcd
--- /dev/null
+++ b/toolkit/components/url-classifier/tests/unit/test_listmanager.js
@@ -0,0 +1,355 @@
+// These tables share the same updateURL.
+const TEST_TABLE_DATA_LIST = [
+ // 0:
+ {
+ tableName: "test-listmanager0-digest256",
+ providerName: "google",
+ updateUrl: "http://localhost:4444/safebrowsing/update",
+ gethashUrl: "http://localhost:4444/safebrowsing/gethash0",
+ },
+
+ // 1:
+ {
+ tableName: "test-listmanager1-digest256",
+ providerName: "google",
+ updateUrl: "http://localhost:4444/safebrowsing/update",
+ gethashUrl: "http://localhost:4444/safebrowsing/gethash1",
+ },
+
+ // 2.
+ {
+ tableName: "test-listmanager2-digest256",
+ providerName: "google",
+ updateUrl: "http://localhost:4444/safebrowsing/update",
+ gethashUrl: "http://localhost:4444/safebrowsing/gethash2",
+ },
+];
+
+// These tables have a different update URL (for v4).
+const TEST_TABLE_DATA_V4 = {
+ tableName: "test-phish-proto",
+ providerName: "google4",
+ updateUrl: "http://localhost:5555/safebrowsing/update?",
+ gethashUrl: "http://localhost:5555/safebrowsing/gethash-v4",
+};
+const TEST_TABLE_DATA_V4_DISABLED = {
+ tableName: "test-unwanted-proto",
+ providerName: "google4",
+ updateUrl: "http://localhost:5555/safebrowsing/update?",
+ gethashUrl: "http://localhost:5555/safebrowsing/gethash-v4",
+};
+
+const PREF_NEXTUPDATETIME =
+ "browser.safebrowsing.provider.google.nextupdatetime";
+const PREF_NEXTUPDATETIME_V4 =
+ "browser.safebrowsing.provider.google4.nextupdatetime";
+
+let gListManager = Cc["@mozilla.org/url-classifier/listmanager;1"].getService(
+ Ci.nsIUrlListManager
+);
+
+let gUrlUtils = Cc["@mozilla.org/url-classifier/utils;1"].getService(
+ Ci.nsIUrlClassifierUtils
+);
+
+// Global test server for serving safebrowsing updates.
+let gHttpServ = null;
+let gUpdateResponse = "";
+let gExpectedUpdateRequest = "";
+let gExpectedQueryV4 = "";
+
+// Handles request for TEST_TABLE_DATA_V4.
+let gHttpServV4 = null;
+
+// These two variables are used to synchronize the last two racing updates
+// (in terms of "update URL") in test_update_all_tables().
+let gUpdatedCntForTableData = 0; // For TEST_TABLE_DATA_LIST.
+let gIsV4Updated = false; // For TEST_TABLE_DATA_V4.
+
+const NEW_CLIENT_STATE = "sta\0te";
+const CHECKSUM =
+ "\x30\x67\xc7\x2c\x5e\x50\x1c\x31\xe3\xfe\xca\x73\xf0\x47\xdc\x34\x1a\x95\x63\x99\xec\x70\x5e\x0a\xee\x9e\xfb\x17\xa1\x55\x35\x78";
+
+Services.prefs.setBoolPref("browser.safebrowsing.debug", true);
+
+// The "\xFF\xFF" is to generate a base64 string with "/".
+Services.prefs.setCharPref("browser.safebrowsing.id", "Firefox\xFF\xFF");
+
+// Register tables.
+TEST_TABLE_DATA_LIST.forEach(function(t) {
+ gListManager.registerTable(
+ t.tableName,
+ t.providerName,
+ t.updateUrl,
+ t.gethashUrl
+ );
+});
+
+gListManager.registerTable(
+ TEST_TABLE_DATA_V4.tableName,
+ TEST_TABLE_DATA_V4.providerName,
+ TEST_TABLE_DATA_V4.updateUrl,
+ TEST_TABLE_DATA_V4.gethashUrl
+);
+
+// To test Bug 1302044.
+gListManager.registerTable(
+ TEST_TABLE_DATA_V4_DISABLED.tableName,
+ TEST_TABLE_DATA_V4_DISABLED.providerName,
+ TEST_TABLE_DATA_V4_DISABLED.updateUrl,
+ TEST_TABLE_DATA_V4_DISABLED.gethashUrl
+);
+
+const SERVER_INVOLVED_TEST_CASE_LIST = [
+ // - Do table0 update.
+ // - Server would respond "a:5:32:32\n[DATA]".
+ function test_update_table0() {
+ disableAllUpdates();
+
+ gListManager.enableUpdate(TEST_TABLE_DATA_LIST[0].tableName);
+ gExpectedUpdateRequest = TEST_TABLE_DATA_LIST[0].tableName + ";\n";
+
+ gUpdateResponse = "n:1000\ni:" + TEST_TABLE_DATA_LIST[0].tableName + "\n";
+ gUpdateResponse += readFileToString("data/digest2.chunk");
+
+ forceTableUpdate();
+ },
+
+ // - Do table0 update again. Since chunk 5 was added to table0 in the last
+ // update, the expected request contains "a:5".
+ // - Server would respond "s;2-12\n[DATA]".
+ function test_update_table0_with_existing_chunks() {
+ disableAllUpdates();
+
+ gListManager.enableUpdate(TEST_TABLE_DATA_LIST[0].tableName);
+ gExpectedUpdateRequest = TEST_TABLE_DATA_LIST[0].tableName + ";a:5\n";
+
+ gUpdateResponse = "n:1000\ni:" + TEST_TABLE_DATA_LIST[0].tableName + "\n";
+ gUpdateResponse += readFileToString("data/digest1.chunk");
+
+ forceTableUpdate();
+ },
+
+ // - Do all-table update.
+ // - Server would respond no chunk control.
+ //
+ // Note that this test MUST be the last one in the array since we rely on
+ // the number of sever-involved test case to synchronize the racing last
+ // two udpates for different URL.
+ function test_update_all_tables() {
+ disableAllUpdates();
+
+ // Enable all tables including TEST_TABLE_DATA_V4!
+ TEST_TABLE_DATA_LIST.forEach(function(t) {
+ gListManager.enableUpdate(t.tableName);
+ });
+
+ // We register two v4 tables but only enable one of them
+ // to verify that the disabled tables are not updated.
+ // See Bug 1302044.
+ gListManager.enableUpdate(TEST_TABLE_DATA_V4.tableName);
+ gListManager.disableUpdate(TEST_TABLE_DATA_V4_DISABLED.tableName);
+
+ // Expected results for v2.
+ gExpectedUpdateRequest =
+ TEST_TABLE_DATA_LIST[0].tableName +
+ ";a:5:s:2-12\n" +
+ TEST_TABLE_DATA_LIST[1].tableName +
+ ";\n" +
+ TEST_TABLE_DATA_LIST[2].tableName +
+ ";\n";
+ gUpdateResponse = "n:1000\n";
+
+ // We test the request against the query string since v4 request
+ // would be appened to the query string. The request is generated
+ // by protobuf API (binary) then encoded to base64 format.
+ let requestV4 = gUrlUtils.makeUpdateRequestV4(
+ [TEST_TABLE_DATA_V4.tableName],
+ [""]
+ );
+ gExpectedQueryV4 = "&$req=" + requestV4;
+
+ forceTableUpdate();
+ },
+];
+
+SERVER_INVOLVED_TEST_CASE_LIST.forEach(t => add_test(t));
+
+add_test(function test_partialUpdateV4() {
+ disableAllUpdates();
+
+ gListManager.enableUpdate(TEST_TABLE_DATA_V4.tableName);
+
+ // Since the new client state has been responded and saved in
+ // test_update_all_tables, this update request should send
+ // a partial update to the server.
+ let requestV4 = gUrlUtils.makeUpdateRequestV4(
+ [TEST_TABLE_DATA_V4.tableName],
+ [btoa(NEW_CLIENT_STATE)]
+ );
+ gExpectedQueryV4 = "&$req=" + requestV4;
+
+ forceTableUpdate();
+});
+
+// Tests nsIUrlListManager.getGethashUrl.
+add_test(function test_getGethashUrl() {
+ TEST_TABLE_DATA_LIST.forEach(function(t) {
+ equal(gListManager.getGethashUrl(t.tableName), t.gethashUrl);
+ });
+ equal(
+ gListManager.getGethashUrl(TEST_TABLE_DATA_V4.tableName),
+ TEST_TABLE_DATA_V4.gethashUrl
+ );
+ run_next_test();
+});
+
+function run_test() {
+ // Setup primary testing server.
+ gHttpServ = new HttpServer();
+ gHttpServ.registerDirectory("/", do_get_cwd());
+
+ gHttpServ.registerPathHandler("/safebrowsing/update", function(
+ request,
+ response
+ ) {
+ let body = NetUtil.readInputStreamToString(
+ request.bodyInputStream,
+ request.bodyInputStream.available()
+ );
+
+ // Verify if the request is as expected.
+ equal(body, gExpectedUpdateRequest);
+
+ // Respond the update which is controlled by the test case.
+ response.setHeader(
+ "Content-Type",
+ "application/vnd.google.safebrowsing-update",
+ false
+ );
+ response.setStatusLine(request.httpVersion, 200, "OK");
+ response.bodyOutputStream.write(gUpdateResponse, gUpdateResponse.length);
+
+ gUpdatedCntForTableData++;
+
+ if (gUpdatedCntForTableData !== SERVER_INVOLVED_TEST_CASE_LIST.length) {
+ // This is not the last test case so run the next once upon the
+ // the update success.
+ waitForUpdateSuccess(run_next_test);
+ return;
+ }
+
+ if (gIsV4Updated) {
+ run_next_test(); // All tests are done. Just finish.
+ return;
+ }
+
+ info("Waiting for TEST_TABLE_DATA_V4 to be tested ...");
+ });
+
+ gHttpServ.start(4444);
+
+ // Setup v4 testing server for the different update URL.
+ gHttpServV4 = new HttpServer();
+ gHttpServV4.registerDirectory("/", do_get_cwd());
+
+ gHttpServV4.registerPathHandler("/safebrowsing/update", function(
+ request,
+ response
+ ) {
+ // V4 update request body should be empty.
+ equal(request.bodyInputStream.available(), 0);
+
+ // Not on the spec. Found in Chromium source code...
+ equal(request.getHeader("X-HTTP-Method-Override"), "POST");
+
+ // V4 update request uses GET.
+ equal(request.method, "GET");
+
+ // V4 append the base64 encoded request to the query string.
+ equal(request.queryString, gExpectedQueryV4);
+ equal(request.queryString.indexOf("+"), -1);
+ equal(request.queryString.indexOf("/"), -1);
+
+ // Respond a V2 compatible content for now. In the future we can
+ // send a meaningful response to test Bug 1284178 to see if the
+ // update is successfully stored to database.
+ response.setHeader(
+ "Content-Type",
+ "application/vnd.google.safebrowsing-update",
+ false
+ );
+ response.setStatusLine(request.httpVersion, 200, "OK");
+
+ // The protobuf binary represention of response:
+ //
+ // [
+ // {
+ // 'threat_type': 2, // SOCIAL_ENGINEERING_PUBLIC
+ // 'response_type': 2, // FULL_UPDATE
+ // 'new_client_state': 'sta\x00te', // NEW_CLIENT_STATE
+ // 'checksum': { "sha256": CHECKSUM }, // CHECKSUM
+ // 'additions': { 'compression_type': RAW,
+ // 'prefix_size': 4,
+ // 'raw_hashes': "00000001000000020000000300000004"}
+ // }
+ // ]
+ //
+ let content =
+ "\x0A\x4A\x08\x02\x20\x02\x2A\x18\x08\x01\x12\x14\x08\x04\x12\x10\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x02\x00\x00\x00\x03\x3A\x06\x73\x74\x61\x00\x74\x65\x42\x22\x0A\x20\x30\x67\xC7\x2C\x5E\x50\x1C\x31\xE3\xFE\xCA\x73\xF0\x47\xDC\x34\x1A\x95\x63\x99\xEC\x70\x5E\x0A\xEE\x9E\xFB\x17\xA1\x55\x35\x78\x12\x08\x08\x08\x10\x80\x94\xEB\xDC\x03";
+
+ response.bodyOutputStream.write(content, content.length);
+
+ if (gIsV4Updated) {
+ // This falls to the case where test_partialUpdateV4 is running.
+ // We are supposed to have verified the update request contains
+ // the state we set in the previous request.
+ waitForUpdateSuccess(run_next_test);
+ return;
+ }
+
+ waitUntilMetaDataSaved(NEW_CLIENT_STATE, CHECKSUM, () => {
+ gIsV4Updated = true;
+
+ if (gUpdatedCntForTableData === SERVER_INVOLVED_TEST_CASE_LIST.length) {
+ // All tests are done!
+ run_next_test();
+ return;
+ }
+
+ info("Wait for all sever-involved tests to be done ...");
+ });
+ });
+
+ gHttpServV4.start(5555);
+
+ registerCleanupFunction(function() {
+ return (async function() {
+ await Promise.all([gHttpServ.stop(), gHttpServV4.stop()]);
+ })();
+ });
+
+ run_next_test();
+}
+
+// A trick to force updating tables. However, before calling this, we have to
+// call disableAllUpdates() first to clean up the updateCheckers in listmanager.
+function forceTableUpdate() {
+ throwOnUpdateErrors();
+ Services.prefs.setCharPref(PREF_NEXTUPDATETIME, "1");
+ Services.prefs.setCharPref(PREF_NEXTUPDATETIME_V4, "1");
+ gListManager.maybeToggleUpdateChecking();
+}
+
+function disableAllUpdates() {
+ stopThrowingOnUpdateErrors();
+ TEST_TABLE_DATA_LIST.forEach(t => gListManager.disableUpdate(t.tableName));
+ gListManager.disableUpdate(TEST_TABLE_DATA_V4.tableName);
+}
+
+function waitForUpdateSuccess(callback) {
+ Services.obs.addObserver(function listener() {
+ Services.obs.removeObserver(listener, "safebrowsing-update-finished");
+ callback();
+ }, "safebrowsing-update-finished");
+}
diff --git a/toolkit/components/url-classifier/tests/unit/test_malwaretable_pref.js b/toolkit/components/url-classifier/tests/unit/test_malwaretable_pref.js
new file mode 100644
index 0000000000..95a8f1a8e8
--- /dev/null
+++ b/toolkit/components/url-classifier/tests/unit/test_malwaretable_pref.js
@@ -0,0 +1,7 @@
+// Ensure that the default value of malwareTable is always in sorted order
+let originalValue = Services.prefs.getCharPref("urlclassifier.malwareTable");
+let sortedValue = originalValue
+ .split(",")
+ .sort()
+ .join(",");
+Assert.equal(originalValue, sortedValue);
diff --git a/toolkit/components/url-classifier/tests/unit/test_partial.js b/toolkit/components/url-classifier/tests/unit/test_partial.js
new file mode 100644
index 0000000000..fe752919a7
--- /dev/null
+++ b/toolkit/components/url-classifier/tests/unit/test_partial.js
@@ -0,0 +1,611 @@
+/**
+ * DummyCompleter() lets tests easily specify the results of a partial
+ * hash completion request.
+ */
+function DummyCompleter() {
+ this.fragments = {};
+ this.queries = [];
+ this.tableName = "test-phish-simple";
+}
+
+DummyCompleter.prototype = {
+ QueryInterface: ChromeUtils.generateQI(["nsIUrlClassifierHashCompleter"]),
+
+ complete(partialHash, gethashUrl, tableName, cb) {
+ this.queries.push(partialHash);
+ var fragments = this.fragments;
+ var self = this;
+ var doCallback = function() {
+ if (self.alwaysFail) {
+ cb.completionFinished(Cr.NS_ERROR_FAILURE);
+ return;
+ }
+ if (fragments[partialHash]) {
+ for (var i = 0; i < fragments[partialHash].length; i++) {
+ var chunkId = fragments[partialHash][i][0];
+ var hash = fragments[partialHash][i][1];
+ cb.completionV2(hash, self.tableName, chunkId);
+ }
+ }
+ cb.completionFinished(0);
+ };
+ executeSoon(doCallback);
+ },
+
+ getHash(fragment) {
+ var converter = Cc[
+ "@mozilla.org/intl/scriptableunicodeconverter"
+ ].createInstance(Ci.nsIScriptableUnicodeConverter);
+ converter.charset = "UTF-8";
+ var data = converter.convertToByteArray(fragment);
+ var ch = Cc["@mozilla.org/security/hash;1"].createInstance(
+ Ci.nsICryptoHash
+ );
+ ch.init(ch.SHA256);
+ ch.update(data, data.length);
+ var hash = ch.finish(false);
+ return hash.slice(0, 32);
+ },
+
+ addFragment(chunkId, fragment) {
+ this.addHash(chunkId, this.getHash(fragment));
+ },
+
+ // This method allows the caller to generate complete hashes that match the
+ // prefix of a real fragment, but have different complete hashes.
+ addConflict(chunkId, fragment) {
+ var realHash = this.getHash(fragment);
+ var invalidHash = this.getHash("blah blah blah blah blah");
+ this.addHash(chunkId, realHash.slice(0, 4) + invalidHash.slice(4, 32));
+ },
+
+ addHash(chunkId, hash) {
+ var partial = hash.slice(0, 4);
+ if (this.fragments[partial]) {
+ this.fragments[partial].push([chunkId, hash]);
+ } else {
+ this.fragments[partial] = [[chunkId, hash]];
+ }
+ },
+
+ compareQueries(fragments) {
+ var expectedQueries = [];
+ for (let i = 0; i < fragments.length; i++) {
+ expectedQueries.push(this.getHash(fragments[i]).slice(0, 4));
+ }
+ Assert.equal(this.queries.length, expectedQueries.length);
+ expectedQueries.sort();
+ this.queries.sort();
+ for (let i = 0; i < this.queries.length; i++) {
+ Assert.equal(this.queries[i], expectedQueries[i]);
+ }
+ },
+};
+
+function setupCompleter(table, hits, conflicts) {
+ var completer = new DummyCompleter();
+ completer.tableName = table;
+ for (let i = 0; i < hits.length; i++) {
+ let chunkId = hits[i][0];
+ let fragments = hits[i][1];
+ for (let j = 0; j < fragments.length; j++) {
+ completer.addFragment(chunkId, fragments[j]);
+ }
+ }
+ for (let i = 0; i < conflicts.length; i++) {
+ let chunkId = conflicts[i][0];
+ let fragments = conflicts[i][1];
+ for (let j = 0; j < fragments.length; j++) {
+ completer.addConflict(chunkId, fragments[j]);
+ }
+ }
+
+ dbservice.setHashCompleter(table, completer);
+
+ return completer;
+}
+
+function installCompleter(table, fragments, conflictFragments) {
+ return setupCompleter(table, fragments, conflictFragments);
+}
+
+function installFailingCompleter(table) {
+ var completer = setupCompleter(table, [], []);
+ completer.alwaysFail = true;
+ return completer;
+}
+
+// Helper assertion for checking dummy completer queries
+gAssertions.completerQueried = function(data, cb) {
+ var completer = data[0];
+ completer.compareQueries(data[1]);
+ cb();
+};
+
+function doTest(updates, assertions) {
+ doUpdateTest(updates, assertions, runNextTest, updateError);
+}
+
+// Test an add of two partial urls to a fresh database
+function testPartialAdds() {
+ var addUrls = ["foo.com/a", "foo.com/b", "bar.com/c"];
+ var update = buildPhishingUpdate([{ chunkNum: 1, urls: addUrls }], 4);
+
+ var completer = installCompleter("test-phish-simple", [[1, addUrls]], []);
+
+ var assertions = {
+ tableData: "test-phish-simple;a:1",
+ urlsExist: addUrls,
+ completerQueried: [completer, addUrls],
+ };
+
+ doTest([update], assertions);
+}
+
+function testPartialAddsWithConflicts() {
+ var addUrls = ["foo.com/a", "foo.com/b", "bar.com/c"];
+ var update = buildPhishingUpdate([{ chunkNum: 1, urls: addUrls }], 4);
+
+ // Each result will have both a real match and a conflict
+ var completer = installCompleter(
+ "test-phish-simple",
+ [[1, addUrls]],
+ [[1, addUrls]]
+ );
+
+ var assertions = {
+ tableData: "test-phish-simple;a:1",
+ urlsExist: addUrls,
+ completerQueried: [completer, addUrls],
+ };
+
+ doTest([update], assertions);
+}
+
+// Test whether the fragmenting code does not cause duplicated completions
+function testFragments() {
+ var addUrls = ["foo.com/a/b/c", "foo.net/", "foo.com/c/"];
+ var update = buildPhishingUpdate([{ chunkNum: 1, urls: addUrls }], 4);
+
+ var completer = installCompleter("test-phish-simple", [[1, addUrls]], []);
+
+ var assertions = {
+ tableData: "test-phish-simple;a:1",
+ urlsExist: addUrls,
+ completerQueried: [completer, addUrls],
+ };
+
+ doTest([update], assertions);
+}
+
+// Test http://code.google.com/p/google-safe-browsing/wiki/Protocolv2Spec
+// section 6.2 example 1
+function testSpecFragments() {
+ var probeUrls = ["a.b.c/1/2.html?param=1"];
+
+ var addUrls = [
+ "a.b.c/1/2.html",
+ "a.b.c/",
+ "a.b.c/1/",
+ "b.c/1/2.html?param=1",
+ "b.c/1/2.html",
+ "b.c/",
+ "b.c/1/",
+ "a.b.c/1/2.html?param=1",
+ ];
+
+ var update = buildPhishingUpdate([{ chunkNum: 1, urls: addUrls }], 4);
+
+ var completer = installCompleter("test-phish-simple", [[1, addUrls]], []);
+
+ var assertions = {
+ tableData: "test-phish-simple;a:1",
+ urlsExist: probeUrls,
+ completerQueried: [completer, addUrls],
+ };
+
+ doTest([update], assertions);
+}
+
+// Test http://code.google.com/p/google-safe-browsing/wiki/Protocolv2Spec
+// section 6.2 example 2
+function testMoreSpecFragments() {
+ var probeUrls = ["a.b.c.d.e.f.g/1.html"];
+
+ var addUrls = [
+ "a.b.c.d.e.f.g/1.html",
+ "a.b.c.d.e.f.g/",
+ "c.d.e.f.g/1.html",
+ "c.d.e.f.g/",
+ "d.e.f.g/1.html",
+ "d.e.f.g/",
+ "e.f.g/1.html",
+ "e.f.g/",
+ "f.g/1.html",
+ "f.g/",
+ ];
+
+ var update = buildPhishingUpdate([{ chunkNum: 1, urls: addUrls }], 4);
+
+ var completer = installCompleter("test-phish-simple", [[1, addUrls]], []);
+
+ var assertions = {
+ tableData: "test-phish-simple;a:1",
+ urlsExist: probeUrls,
+ completerQueried: [completer, addUrls],
+ };
+
+ doTest([update], assertions);
+}
+
+function testFalsePositives() {
+ var addUrls = ["foo.com/a", "foo.com/b", "bar.com/c"];
+ var update = buildPhishingUpdate([{ chunkNum: 1, urls: addUrls }], 4);
+
+ // Each result will have no matching complete hashes and a non-matching
+ // conflict
+ var completer = installCompleter("test-phish-simple", [], [[1, addUrls]]);
+
+ var assertions = {
+ tableData: "test-phish-simple;a:1",
+ urlsDontExist: addUrls,
+ completerQueried: [completer, addUrls],
+ };
+
+ doTest([update], assertions);
+}
+
+function testEmptyCompleter() {
+ var addUrls = ["foo.com/a", "foo.com/b", "bar.com/c"];
+ var update = buildPhishingUpdate([{ chunkNum: 1, urls: addUrls }], 4);
+
+ // Completer will never return full hashes
+ var completer = installCompleter("test-phish-simple", [], []);
+
+ var assertions = {
+ tableData: "test-phish-simple;a:1",
+ urlsDontExist: addUrls,
+ completerQueried: [completer, addUrls],
+ };
+
+ doTest([update], assertions);
+}
+
+function testCompleterFailure() {
+ var addUrls = ["foo.com/a", "foo.com/b", "bar.com/c"];
+ var update = buildPhishingUpdate([{ chunkNum: 1, urls: addUrls }], 4);
+
+ // Completer will never return full hashes
+ var completer = installFailingCompleter("test-phish-simple");
+
+ var assertions = {
+ tableData: "test-phish-simple;a:1",
+ urlsDontExist: addUrls,
+ completerQueried: [completer, addUrls],
+ };
+
+ doTest([update], assertions);
+}
+
+function testMixedSizesSameDomain() {
+ var add1Urls = ["foo.com/a"];
+ var add2Urls = ["foo.com/b"];
+
+ var update1 = buildPhishingUpdate([{ chunkNum: 1, urls: add1Urls }], 4);
+ var update2 = buildPhishingUpdate([{ chunkNum: 2, urls: add2Urls }], 32);
+
+ // We should only need to complete the partial hashes
+ var completer = installCompleter("test-phish-simple", [[1, add1Urls]], []);
+
+ var assertions = {
+ tableData: "test-phish-simple;a:1-2",
+ // both urls should match...
+ urlsExist: add1Urls.concat(add2Urls),
+ // ... but the completer should only be queried for the partial entry
+ completerQueried: [completer, add1Urls],
+ };
+
+ doTest([update1, update2], assertions);
+}
+
+function testMixedSizesDifferentDomains() {
+ var add1Urls = ["foo.com/a"];
+ var add2Urls = ["bar.com/b"];
+
+ var update1 = buildPhishingUpdate([{ chunkNum: 1, urls: add1Urls }], 4);
+ var update2 = buildPhishingUpdate([{ chunkNum: 2, urls: add2Urls }], 32);
+
+ // We should only need to complete the partial hashes
+ var completer = installCompleter("test-phish-simple", [[1, add1Urls]], []);
+
+ var assertions = {
+ tableData: "test-phish-simple;a:1-2",
+ // both urls should match...
+ urlsExist: add1Urls.concat(add2Urls),
+ // ... but the completer should only be queried for the partial entry
+ completerQueried: [completer, add1Urls],
+ };
+
+ doTest([update1, update2], assertions);
+}
+
+function testInvalidHashSize() {
+ var addUrls = ["foo.com/a", "foo.com/b", "bar.com/c"];
+ var update = buildPhishingUpdate([{ chunkNum: 1, urls: addUrls }], 12); // only 4 and 32 are legal hash sizes
+
+ var addUrls2 = ["zaz.com/a", "xyz.com/b"];
+ var update2 = buildPhishingUpdate([{ chunkNum: 2, urls: addUrls2 }], 4);
+
+ installCompleter("test-phish-simple", [[1, addUrls]], []);
+
+ var assertions = {
+ tableData: "test-phish-simple;a:2",
+ urlsDontExist: addUrls,
+ };
+
+ // A successful update will trigger an error
+ doUpdateTest([update2, update], assertions, updateError, runNextTest);
+}
+
+function testWrongTable() {
+ var addUrls = ["foo.com/a"];
+ var update = buildPhishingUpdate([{ chunkNum: 1, urls: addUrls }], 4);
+ var completer = installCompleter(
+ "test-malware-simple", // wrong table
+ [[1, addUrls]],
+ []
+ );
+
+ // The above installCompleter installs the completer for test-malware-simple,
+ // we want it to be used for test-phish-simple too.
+ dbservice.setHashCompleter("test-phish-simple", completer);
+
+ var assertions = {
+ tableData: "test-phish-simple;a:1",
+ // The urls were added as phishing urls, but the completer is claiming
+ // that they are malware urls, and we trust the completer in this case.
+ // The result will be discarded, so we can only check for non-existence.
+ urlsDontExist: addUrls,
+ // Make sure the completer was actually queried.
+ completerQueried: [completer, addUrls],
+ };
+
+ doUpdateTest(
+ [update],
+ assertions,
+ function() {
+ // Give the dbservice a chance to (not) cache the result.
+ do_timeout(3000, function() {
+ // The miss earlier will have caused a miss to be cached.
+ // Resetting the completer does not count as an update,
+ // so we will not be probed again.
+ var newCompleter = installCompleter(
+ "test-malware-simple",
+ [[1, addUrls]],
+ []
+ );
+ dbservice.setHashCompleter("test-phish-simple", newCompleter);
+
+ var assertions1 = {
+ urlsDontExist: addUrls,
+ };
+ checkAssertions(assertions1, runNextTest);
+ });
+ },
+ updateError
+ );
+}
+
+function setupCachedResults(addUrls, part2) {
+ var update = buildPhishingUpdate([{ chunkNum: 1, urls: addUrls }], 4);
+
+ var completer = installCompleter("test-phish-simple", [[1, addUrls]], []);
+
+ var assertions = {
+ tableData: "test-phish-simple;a:1",
+ // Request the add url. This should cause the completion to be cached.
+ urlsExist: addUrls,
+ // Make sure the completer was actually queried.
+ completerQueried: [completer, addUrls],
+ };
+
+ doUpdateTest(
+ [update],
+ assertions,
+ function() {
+ // Give the dbservice a chance to cache the result.
+ do_timeout(3000, part2);
+ },
+ updateError
+ );
+}
+
+function testCachedResults() {
+ setupCachedResults(["foo.com/a"], function(add) {
+ // This is called after setupCachedResults(). Verify that
+ // checking the url again does not cause a completer request.
+
+ // install a new completer, this one should never be queried.
+ var newCompleter = installCompleter("test-phish-simple", [[1, []]], []);
+
+ var assertions = {
+ urlsExist: ["foo.com/a"],
+ completerQueried: [newCompleter, []],
+ };
+ checkAssertions(assertions, runNextTest);
+ });
+}
+
+function testCachedResultsWithSub() {
+ setupCachedResults(["foo.com/a"], function() {
+ // install a new completer, this one should never be queried.
+ var newCompleter = installCompleter("test-phish-simple", [[1, []]], []);
+
+ var removeUpdate = buildPhishingUpdate(
+ [{ chunkNum: 2, chunkType: "s", urls: ["1:foo.com/a"] }],
+ 4
+ );
+
+ var assertions = {
+ urlsDontExist: ["foo.com/a"],
+ completerQueried: [newCompleter, []],
+ };
+
+ doTest([removeUpdate], assertions);
+ });
+}
+
+function testCachedResultsWithExpire() {
+ setupCachedResults(["foo.com/a"], function() {
+ // install a new completer, this one should never be queried.
+ var newCompleter = installCompleter("test-phish-simple", [[1, []]], []);
+
+ var expireUpdate = "n:1000\ni:test-phish-simple\nad:1\n";
+
+ var assertions = {
+ urlsDontExist: ["foo.com/a"],
+ completerQueried: [newCompleter, []],
+ };
+ doTest([expireUpdate], assertions);
+ });
+}
+
+function testCachedResultsFailure() {
+ var existUrls = ["foo.com/a"];
+ setupCachedResults(existUrls, function() {
+ // This is called after setupCachedResults(). Verify that
+ // checking the url again does not cause a completer request.
+
+ // install a new completer, this one should never be queried.
+ var newCompleter = installCompleter("test-phish-simple", [[1, []]], []);
+
+ var assertions = {
+ urlsExist: existUrls,
+ completerQueried: [newCompleter, []],
+ };
+
+ checkAssertions(assertions, function() {
+ // Apply the update. The cached completes should be gone.
+ doErrorUpdate(
+ "test-phish-simple,test-malware-simple",
+ function() {
+ // Now the completer gets queried again.
+ var newCompleter2 = installCompleter(
+ "test-phish-simple",
+ [[1, existUrls]],
+ []
+ );
+ var assertions2 = {
+ tableData: "test-phish-simple;a:1",
+ urlsExist: existUrls,
+ completerQueried: [newCompleter2, existUrls],
+ };
+ checkAssertions(assertions2, runNextTest);
+ },
+ updateError
+ );
+ });
+ });
+}
+
+function testErrorList() {
+ var addUrls = ["foo.com/a", "foo.com/b", "bar.com/c"];
+ var update = buildPhishingUpdate([{ chunkNum: 1, urls: addUrls }], 4);
+ // The update failure should will kill the completes, so the above
+ // must be a prefix to get any hit at all past the update failure.
+
+ var completer = installCompleter("test-phish-simple", [[1, addUrls]], []);
+
+ var assertions = {
+ tableData: "test-phish-simple;a:1",
+ urlsExist: addUrls,
+ // These are complete urls, and will only be completed if the
+ // list is stale.
+ completerQueried: [completer, addUrls],
+ };
+
+ // Apply the update.
+ doStreamUpdate(
+ update,
+ function() {
+ // Now the test-phish-simple and test-malware-simple tables are marked
+ // as fresh. Fake an update failure to mark them stale.
+ doErrorUpdate(
+ "test-phish-simple,test-malware-simple",
+ function() {
+ // Now the lists should be marked stale. Check assertions.
+ checkAssertions(assertions, runNextTest);
+ },
+ updateError
+ );
+ },
+ updateError
+ );
+}
+
+// Verify that different lists (test-phish-simple,
+// test-malware-simple) maintain their freshness separately.
+function testErrorListIndependent() {
+ var phishUrls = ["phish.com/a"];
+ var malwareUrls = ["attack.com/a"];
+ var update = buildPhishingUpdate([{ chunkNum: 1, urls: phishUrls }], 4);
+ // These have to persist past the update failure, so they must be prefixes,
+ // not completes.
+
+ update += buildMalwareUpdate([{ chunkNum: 2, urls: malwareUrls }], 32);
+
+ var completer = installCompleter("test-phish-simple", [[1, phishUrls]], []);
+
+ var assertions = {
+ tableData: "test-malware-simple;a:2\ntest-phish-simple;a:1",
+ urlsExist: phishUrls,
+ malwareUrlsExist: malwareUrls,
+ // Only this phishing urls should be completed, because only the phishing
+ // urls will be stale.
+ completerQueried: [completer, phishUrls],
+ };
+
+ // Apply the update.
+ doStreamUpdate(
+ update,
+ function() {
+ // Now the test-phish-simple and test-malware-simple tables are
+ // marked as fresh. Fake an update failure to mark *just*
+ // phishing data as stale.
+ doErrorUpdate(
+ "test-phish-simple",
+ function() {
+ // Now the lists should be marked stale. Check assertions.
+ checkAssertions(assertions, runNextTest);
+ },
+ updateError
+ );
+ },
+ updateError
+ );
+}
+
+function run_test() {
+ runTests([
+ testPartialAdds,
+ testPartialAddsWithConflicts,
+ testFragments,
+ testSpecFragments,
+ testMoreSpecFragments,
+ testFalsePositives,
+ testEmptyCompleter,
+ testCompleterFailure,
+ testMixedSizesSameDomain,
+ testMixedSizesDifferentDomains,
+ testInvalidHashSize,
+ testWrongTable,
+ testCachedResults,
+ testCachedResultsWithSub,
+ testCachedResultsWithExpire,
+ testCachedResultsFailure,
+ testErrorList,
+ testErrorListIndependent,
+ ]);
+}
+
+do_test_pending();
diff --git a/toolkit/components/url-classifier/tests/unit/test_platform_specific_threats.js b/toolkit/components/url-classifier/tests/unit/test_platform_specific_threats.js
new file mode 100644
index 0000000000..499c9e478c
--- /dev/null
+++ b/toolkit/components/url-classifier/tests/unit/test_platform_specific_threats.js
@@ -0,0 +1,104 @@
+/* Any copyright is dedicated to the Public Domain.
+ http://creativecommons.org/publicdomain/zero/1.0/ */
+
+const { AppConstants } = ChromeUtils.importESModule(
+ "resource://gre/modules/AppConstants.sys.mjs"
+);
+
+let urlUtils = Cc["@mozilla.org/url-classifier/utils;1"].getService(
+ Ci.nsIUrlClassifierUtils
+);
+
+function testMobileOnlyThreats() {
+ // Mobile-only threat type(s):
+ // - goog-harmful-proto (POTENTIALLY_HARMFUL_APPLICATION)
+
+ (function testUpdateRequest() {
+ let requestWithPHA = urlUtils.makeUpdateRequestV4(
+ ["goog-phish-proto", "goog-harmful-proto"],
+ ["AAAAAA", "AAAAAA"]
+ );
+
+ let requestNoPHA = urlUtils.makeUpdateRequestV4(
+ ["goog-phish-proto"],
+ ["AAAAAA"]
+ );
+
+ if (AppConstants.platform === "android") {
+ notEqual(
+ requestWithPHA,
+ requestNoPHA,
+ "PHA (i.e. goog-harmful-proto) shouldn't be filtered on mobile platform."
+ );
+ } else {
+ equal(
+ requestWithPHA,
+ requestNoPHA,
+ "PHA (i.e. goog-harmful-proto) should be filtered on non-mobile platform."
+ );
+ }
+ })();
+
+ (function testFullHashRequest() {
+ let requestWithPHA = urlUtils.makeFindFullHashRequestV4(
+ ["goog-phish-proto", "goog-harmful-proto"],
+ ["", ""], // state.
+ [btoa("0123")]
+ ); // prefix.
+
+ let requestNoPHA = urlUtils.makeFindFullHashRequestV4(
+ ["goog-phish-proto"],
+ [""], // state.
+ [btoa("0123")]
+ ); // prefix.
+
+ if (AppConstants.platform === "android") {
+ notEqual(
+ requestWithPHA,
+ requestNoPHA,
+ "PHA (i.e. goog-harmful-proto) shouldn't be filtered on mobile platform."
+ );
+ } else {
+ equal(
+ requestWithPHA,
+ requestNoPHA,
+ "PHA (i.e. goog-harmful-proto) should be filtered on non-mobile platform."
+ );
+ }
+ })();
+}
+
+function testDesktopOnlyThreats() {
+ // Desktop-only threats:
+ // - goog-downloadwhite-proto (CSD_WHITELIST)
+ // - goog-badbinurl-proto (MALICIOUS_BINARY)
+
+ let requestWithDesktopOnlyThreats = urlUtils.makeUpdateRequestV4(
+ ["goog-phish-proto", "goog-downloadwhite-proto", "goog-badbinurl-proto"],
+ ["", "", ""]
+ );
+
+ let requestNoDesktopOnlyThreats = urlUtils.makeUpdateRequestV4(
+ ["goog-phish-proto"],
+ [""]
+ );
+
+ if (AppConstants.platform === "android") {
+ equal(
+ requestWithDesktopOnlyThreats,
+ requestNoDesktopOnlyThreats,
+ "Android shouldn't contain 'goog-downloadwhite-proto' and 'goog-badbinurl-proto'."
+ );
+ } else {
+ notEqual(
+ requestWithDesktopOnlyThreats,
+ requestNoDesktopOnlyThreats,
+ "Desktop should contain 'goog-downloadwhite-proto' and 'goog-badbinurl-proto'."
+ );
+ }
+}
+
+function run_test() {
+ testMobileOnlyThreats();
+ testDesktopOnlyThreats();
+}
diff --git a/toolkit/components/url-classifier/tests/unit/test_pref.js b/toolkit/components/url-classifier/tests/unit/test_pref.js
new file mode 100644
index 0000000000..3a72eceb8e
--- /dev/null
+++ b/toolkit/components/url-classifier/tests/unit/test_pref.js
@@ -0,0 +1,15 @@
+function run_test() {
+ let urlUtils = Cc["@mozilla.org/url-classifier/utils;1"].getService(
+ Ci.nsIUrlClassifierUtils
+ );
+
+ // The google protocol version should be "2.2" until we enable SB v4
+ // by default.
+ equal(urlUtils.getProtocolVersion("google"), "2.2");
+
+ // Mozilla protocol version will stick to "2.2".
+ equal(urlUtils.getProtocolVersion("mozilla"), "2.2");
+
+ // Unknown provider version will be "2.2".
+ equal(urlUtils.getProtocolVersion("unknown-provider"), "2.2");
+}
diff --git a/toolkit/components/url-classifier/tests/unit/test_prefixset.js b/toolkit/components/url-classifier/tests/unit/test_prefixset.js
new file mode 100644
index 0000000000..2083ed43d4
--- /dev/null
+++ b/toolkit/components/url-classifier/tests/unit/test_prefixset.js
@@ -0,0 +1,178 @@
+// newPset: returns an empty nsIUrlClassifierPrefixSet.
+function newPset() {
+ let pset = Cc["@mozilla.org/url-classifier/prefixset;1"].createInstance(
+ Ci.nsIUrlClassifierPrefixSet
+ );
+ pset.init("all");
+ return pset;
+}
+
+// arrContains: returns true if |arr| contains the element |target|. Uses binary
+// search and requires |arr| to be sorted.
+function arrContains(arr, target) {
+ let start = 0;
+ let end = arr.length - 1;
+ let i = 0;
+
+ while (end > start) {
+ i = start + ((end - start) >> 1);
+ let value = arr[i];
+
+ if (value < target) {
+ start = i + 1;
+ } else if (value > target) {
+ end = i - 1;
+ } else {
+ break;
+ }
+ }
+ if (start == end) {
+ i = start;
+ }
+
+ return !(i < 0 || i >= arr.length) && arr[i] == target;
+}
+
+// checkContents: Check whether the PrefixSet pset contains
+// the prefixes in the passed array.
+function checkContents(pset, prefixes) {
+ var outcount = {},
+ outset = {};
+ outset = pset.getPrefixes(outcount);
+ let inset = prefixes;
+ Assert.equal(inset.length, outset.length);
+ inset.sort((x, y) => x - y);
+ for (let i = 0; i < inset.length; i++) {
+ Assert.equal(inset[i], outset[i]);
+ }
+}
+
+function wrappedProbe(pset, prefix) {
+ return pset.contains(prefix);
+}
+
+// doRandomLookups: we use this to test for false membership with random input
+// over the range of prefixes (unsigned 32-bits integers).
+// pset: a nsIUrlClassifierPrefixSet to test.
+// prefixes: an array of prefixes supposed to make up the prefix set.
+// N: number of random lookups to make.
+function doRandomLookups(pset, prefixes, N) {
+ for (let i = 0; i < N; i++) {
+ let randInt = prefixes[0];
+ while (arrContains(prefixes, randInt)) {
+ randInt = Math.floor(Math.random() * Math.pow(2, 32));
+ }
+
+ Assert.ok(!wrappedProbe(pset, randInt));
+ }
+}
+
+// doExpectedLookups: we use this to test expected membership.
+// pset: a nsIUrlClassifierPrefixSet to test.
+// prefixes:
+function doExpectedLookups(pset, prefixes, N) {
+ for (let i = 0; i < N; i++) {
+ prefixes.forEach(function(x) {
+ dump("Checking " + x + "\n");
+ Assert.ok(wrappedProbe(pset, x));
+ });
+ }
+}
+
+// testBasicPset: A very basic test of the prefix set to make sure that it
+// exists and to give a basic example of its use.
+function testBasicPset() {
+ let pset = Cc["@mozilla.org/url-classifier/prefixset;1"].createInstance(
+ Ci.nsIUrlClassifierPrefixSet
+ );
+ let prefixes = [2, 50, 100, 2000, 78000, 1593203];
+ pset.setPrefixes(prefixes, prefixes.length);
+
+ Assert.ok(wrappedProbe(pset, 100));
+ Assert.ok(!wrappedProbe(pset, 100000));
+ Assert.ok(wrappedProbe(pset, 1593203));
+ Assert.ok(!wrappedProbe(pset, 999));
+ Assert.ok(!wrappedProbe(pset, 0));
+
+ checkContents(pset, prefixes);
+}
+
+function testDuplicates() {
+ let pset = Cc["@mozilla.org/url-classifier/prefixset;1"].createInstance(
+ Ci.nsIUrlClassifierPrefixSet
+ );
+ let prefixes = [1, 1, 2, 2, 2, 3, 3, 3, 3, 3, 3, 5, 6, 6, 7, 7, 9, 9, 9];
+ pset.setPrefixes(prefixes, prefixes.length);
+
+ Assert.ok(wrappedProbe(pset, 1));
+ Assert.ok(wrappedProbe(pset, 2));
+ Assert.ok(wrappedProbe(pset, 5));
+ Assert.ok(wrappedProbe(pset, 9));
+ Assert.ok(!wrappedProbe(pset, 4));
+ Assert.ok(!wrappedProbe(pset, 8));
+
+ checkContents(pset, prefixes);
+}
+
+function testSimplePset() {
+ let pset = newPset();
+ let prefixes = [1, 2, 100, 400, 123456789];
+ pset.setPrefixes(prefixes, prefixes.length);
+
+ doRandomLookups(pset, prefixes, 100);
+ doExpectedLookups(pset, prefixes, 1);
+
+ checkContents(pset, prefixes);
+}
+
+function testReSetPrefixes() {
+ let pset = newPset();
+ let prefixes = [1, 5, 100, 1000, 150000];
+ pset.setPrefixes(prefixes, prefixes.length);
+
+ doExpectedLookups(pset, prefixes, 1);
+
+ let secondPrefixes = [12, 50, 300, 2000, 5000, 200000];
+ pset.setPrefixes(secondPrefixes, secondPrefixes.length);
+
+ doExpectedLookups(pset, secondPrefixes, 1);
+ for (let i = 0; i < prefixes.length; i++) {
+ Assert.ok(!wrappedProbe(pset, prefixes[i]));
+ }
+
+ checkContents(pset, secondPrefixes);
+}
+
+function testTinySet() {
+ let pset = Cc["@mozilla.org/url-classifier/prefixset;1"].createInstance(
+ Ci.nsIUrlClassifierPrefixSet
+ );
+ let prefixes = [1];
+ pset.setPrefixes(prefixes, prefixes.length);
+
+ Assert.ok(wrappedProbe(pset, 1));
+ Assert.ok(!wrappedProbe(pset, 100000));
+ checkContents(pset, prefixes);
+
+ prefixes = [];
+ pset.setPrefixes(prefixes, prefixes.length);
+ Assert.ok(!wrappedProbe(pset, 1));
+ checkContents(pset, prefixes);
+}
+
+var tests = [
+ testBasicPset,
+ testSimplePset,
+ testReSetPrefixes,
+ testDuplicates,
+ testTinySet,
+];
+
+function run_test() {
+ // None of the tests use |executeSoon| or any sort of callbacks, so we can
+ // just run them in succession.
+ for (let i = 0; i < tests.length; i++) {
+ dump("Running " + tests[i].name + "\n");
+ tests[i]();
+ }
+}
diff --git a/toolkit/components/url-classifier/tests/unit/test_provider_url.js b/toolkit/components/url-classifier/tests/unit/test_provider_url.js
new file mode 100644
index 0000000000..8229448a9c
--- /dev/null
+++ b/toolkit/components/url-classifier/tests/unit/test_provider_url.js
@@ -0,0 +1,32 @@
+const { updateAppInfo } = ChromeUtils.importESModule(
+ "resource://testing-common/AppInfo.sys.mjs"
+);
+
+function updateVersion(version) {
+ updateAppInfo({ version });
+}
+
+add_test(function test_provider_url() {
+ let urls = [
+ "browser.safebrowsing.provider.google.updateURL",
+ "browser.safebrowsing.provider.google.gethashURL",
+ "browser.safebrowsing.provider.mozilla.updateURL",
+ "browser.safebrowsing.provider.mozilla.gethashURL",
+ ];
+
+ // FIXME: Most of these only worked in the past because calling
+ // `updateAppInfo` did not actually replace `Services.appinfo`, which
+ // the URL formatter uses.
+ // let versions = ["49.0", "49.0.1", "49.0a1", "49.0b1", "49.0esr", "49.0.1esr"];
+ let versions = ["49.0", "49.0.1"];
+
+ for (let version of versions) {
+ for (let url of urls) {
+ updateVersion(version);
+ let value = Services.urlFormatter.formatURLPref(url);
+ Assert.notEqual(value.indexOf("&appver=49.0&"), -1);
+ }
+ }
+
+ run_next_test();
+});
diff --git a/toolkit/components/url-classifier/tests/unit/test_rsListService.js b/toolkit/components/url-classifier/tests/unit/test_rsListService.js
new file mode 100644
index 0000000000..1133241e5e
--- /dev/null
+++ b/toolkit/components/url-classifier/tests/unit/test_rsListService.js
@@ -0,0 +1,370 @@
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+"use strict";
+
+/* Unit tests for the nsIUrlClassifierRemoteSettingsService implementation. */
+
+const { RemoteSettings } = ChromeUtils.import(
+ "resource://services-settings/remote-settings.js"
+);
+const { SBRS_UPDATE_MINIMUM_DELAY } = ChromeUtils.import(
+ "resource://gre/modules/UrlClassifierRemoteSettingsService.jsm"
+);
+
+const COLLECTION_NAME = "tracking-protection-lists";
+
+const REMOTE_SETTINGS_DATA = [
+ {
+ Name: "content-fingerprinting-track-digest256",
+ attachment: {
+ hash: "96a4a850a1a475001148fa8a3a5efea58951f7176d3624ad7614fbf32732ee48",
+ size: 948,
+ filename: "content-fingerprinting-track-digest256",
+ location:
+ "main-workspace/tracking-protection-lists/content-fingerprinting-track-digest256",
+ mimetype: "text/plain",
+ },
+ id: "content-fingerprinting-track-digest256",
+ Version: 1597417364,
+ },
+ {
+ Name: "mozplugin-block-digest256",
+ attachment: {
+ hash: "dd2b800c7e4bad17e1c79f3e530c0b94e0a039adf4566f30bc3c285a547fa4fc",
+ size: 3029,
+ filename: "mozplugin-block-digest256",
+ location:
+ "main-workspace/tracking-protection-lists/mozplugin-block-digest256",
+ mimetype: "text/plain",
+ },
+ id: "mozplugin-block-digest256",
+ Version: 1575583456,
+ },
+ // Entry with non-exist attachment
+ {
+ Name: "social-track-digest256",
+ attachment: {
+ location: "main-workspace/tracking-protection-lists/not-exist",
+ },
+ id: "social-track-digest256",
+ Version: 1111111111,
+ },
+ // Entry with corrupted attachment
+ {
+ Name: "analytic-track-digest256",
+ attachment: {
+ hash: "644a0662bcf7313570ee68490e3805f5cc7a0503c097f040525c28dc5bfe4c97",
+ size: 58,
+ filename: "invalid.chunk",
+ location: "main-workspace/tracking-protection-lists/invalid.chunk",
+ mimetype: "text/plain",
+ },
+ id: "analytic-track-digest256",
+ Version: 1111111111,
+ },
+];
+
+let gListService = Cc["@mozilla.org/url-classifier/list-service;1"].getService(
+ Ci.nsIUrlClassifierRemoteSettingsService
+);
+let gDbService = Cc["@mozilla.org/url-classifier/dbservice;1"].getService(
+ Ci.nsIUrlClassifierDBService
+);
+
+class UpdateEvent extends EventTarget {}
+function waitForEvent(element, eventName) {
+ return new Promise(function(resolve) {
+ element.addEventListener(eventName, e => resolve(e.detail), { once: true });
+ });
+}
+
+function buildPayload(tables) {
+ let payload = ``;
+ for (let table of tables) {
+ payload += table[0];
+ if (table[1] != null) {
+ payload += `;a:${table[1]}`;
+ }
+ payload += `\n`;
+ }
+ return payload;
+}
+
+let server;
+add_task(async function init() {
+ Services.prefs.setCharPref(
+ "browser.safebrowsing.provider.mozilla.updateURL",
+ `moz-sbrs://tracking-protection-list`
+ );
+ // Setup HTTP server for remote setting
+ server = new HttpServer();
+ server.start(-1);
+ registerCleanupFunction(() => server.stop(() => {}));
+
+ server.registerDirectory(
+ "/cdn/main-workspace/tracking-protection-lists/",
+ do_get_file("data")
+ );
+
+ server.registerPathHandler("/v1/", (request, response) => {
+ response.write(
+ JSON.stringify({
+ capabilities: {
+ attachments: {
+ base_url: `http://localhost:${server.identity.primaryPort}/cdn/`,
+ },
+ },
+ })
+ );
+ response.setHeader("Content-Type", "application/json; charset=UTF-8");
+ response.setStatusLine(null, 200, "OK");
+ });
+
+ Services.prefs.setCharPref(
+ "services.settings.server",
+ `http://localhost:${server.identity.primaryPort}/v1`
+ );
+
+ // Setup remote setting initial data
+ let db = await RemoteSettings(COLLECTION_NAME).db;
+ await db.importChanges({}, 42, REMOTE_SETTINGS_DATA);
+
+ registerCleanupFunction(() => {
+ Services.prefs.clearUserPref(
+ "browser.safebrowsing.provider.mozilla.updateURL"
+ );
+ Services.prefs.clearUserPref("services.settings.server");
+ });
+});
+
+// Test updates from RemoteSettings when there is no local data
+add_task(async function test_empty_update() {
+ let updateEvent = new UpdateEvent();
+ let promise = waitForEvent(updateEvent, "update");
+
+ const TEST_TABLES = [
+ ["mozplugin-block-digest256", null], // empty
+ ["content-fingerprinting-track-digest256", null], // empty
+ ];
+
+ gListService.fetchList(buildPayload(TEST_TABLES), {
+ // nsIStreamListener observer
+ onStartRequest(request) {},
+ onDataAvailable(aRequest, aStream, aOffset, aCount) {
+ let stream = Cc["@mozilla.org/scriptableinputstream;1"].createInstance(
+ Ci.nsIScriptableInputStream
+ );
+ stream.init(aStream);
+ let event = new CustomEvent("update", {
+ detail: stream.readBytes(aCount),
+ });
+ updateEvent.dispatchEvent(event);
+ },
+ onStopRequest(request, status) {},
+ });
+
+ let expected = "n:" + SBRS_UPDATE_MINIMUM_DELAY + "\n";
+ for (const table of TEST_TABLES) {
+ expected += `i:${table[0]}\n` + readFileToString(`data/${table[0]}`);
+ }
+
+ Assert.equal(
+ await promise,
+ expected,
+ "Receive expected data from onDataAvailable"
+ );
+ gListService.clear();
+});
+
+// Test updates from RemoteSettings when we have an empty table,
+// a table with an older version, and a table which is up-to-date.
+add_task(async function test_update() {
+ let updateEvent = new UpdateEvent();
+ let promise = waitForEvent(updateEvent, "update");
+
+ const TEST_TABLES = [
+ ["mozplugin-block-digest256", 1575583456], // up-to-date
+ ["content-fingerprinting-track-digest256", 1575583456 - 1], // older version
+ ];
+
+ gListService.fetchList(buildPayload(TEST_TABLES), {
+ // observer
+ // nsIStreamListener observer
+ onStartRequest(request) {},
+ onDataAvailable(aRequest, aStream, aOffset, aCount) {
+ let stream = Cc["@mozilla.org/scriptableinputstream;1"].createInstance(
+ Ci.nsIScriptableInputStream
+ );
+ stream.init(aStream);
+ let event = new CustomEvent("update", {
+ detail: stream.readBytes(aCount),
+ });
+ updateEvent.dispatchEvent(event);
+ },
+ onStopRequest(request, status) {},
+ });
+
+ // Build request with no version
+ let expected = "n:" + SBRS_UPDATE_MINIMUM_DELAY + "\n";
+ for (const table of TEST_TABLES) {
+ if (["content-fingerprinting-track-digest256"].includes(table[0])) {
+ expected += `i:${table[0]}\n` + readFileToString(`data/${table[0]}`);
+ }
+ }
+
+ Assert.equal(
+ await promise,
+ expected,
+ "Receive expected data from onDataAvailable"
+ );
+ gListService.clear();
+});
+
+// Test updates from RemoteSettings service when all tables are up-to-date.
+add_task(async function test_no_update() {
+ let updateEvent = new UpdateEvent();
+ let promise = waitForEvent(updateEvent, "update");
+
+ const TEST_TABLES = [
+ ["mozplugin-block-digest256", 1575583456], // up-to-date
+ ["content-fingerprinting-track-digest256", 1597417364], // up-to-date
+ ];
+
+ gListService.fetchList(buildPayload(TEST_TABLES), {
+ // nsIStreamListener observer
+ onStartRequest(request) {},
+ onDataAvailable(aRequest, aStream, aOffset, aCount) {
+ let stream = Cc["@mozilla.org/scriptableinputstream;1"].createInstance(
+ Ci.nsIScriptableInputStream
+ );
+ stream.init(aStream);
+ let event = new CustomEvent("update", {
+ detail: stream.readBytes(aCount),
+ });
+ updateEvent.dispatchEvent(event);
+ },
+ onStopRequest(request, status) {},
+ });
+
+ // No data is expected
+ let expected = "n:" + SBRS_UPDATE_MINIMUM_DELAY + "\n";
+
+ Assert.equal(
+ await promise,
+ expected,
+ "Receive expected data from onDataAvailable"
+ );
+ gListService.clear();
+});
+
+add_test(function test_update() {
+ let streamUpdater = Cc[
+ "@mozilla.org/url-classifier/streamupdater;1"
+ ].getService(Ci.nsIUrlClassifierStreamUpdater);
+
+ // Download some updates, and don't continue until the downloads are done.
+ function updateSuccess(aEvent) {
+ Assert.equal(SBRS_UPDATE_MINIMUM_DELAY, aEvent);
+ info("All data processed");
+ run_next_test();
+ }
+ // Just throw if we ever get an update or download error.
+ function handleError(aEvent) {
+ do_throw("We didn't download or update correctly: " + aEvent);
+ }
+
+ streamUpdater.downloadUpdates(
+ "content-fingerprinting-track-digest256",
+ "content-fingerprinting-track-digest256;\n",
+ true,
+ "moz-sbrs://remote-setting",
+ updateSuccess,
+ handleError,
+ handleError
+ );
+});
+
+add_test(function test_url_not_denylisted() {
+ let uri = Services.io.newURI("http://example.com");
+ let principal = Services.scriptSecurityManager.createContentPrincipal(
+ uri,
+ {}
+ );
+ gDbService.lookup(
+ principal,
+ "content-fingerprinting-track-digest256",
+ function handleEvent(aEvent) {
+ // This URI is not on any lists.
+ Assert.equal("", aEvent);
+ run_next_test();
+ }
+ );
+});
+
+add_test(function test_url_denylisted() {
+ let uri = Services.io.newURI("https://www.foresee.com");
+ let principal = Services.scriptSecurityManager.createContentPrincipal(
+ uri,
+ {}
+ );
+ gDbService.lookup(
+ principal,
+ "content-fingerprinting-track-digest256",
+ function handleEvent(aEvent) {
+ Assert.equal("content-fingerprinting-track-digest256", aEvent);
+ run_next_test();
+ }
+ );
+});
+
+add_test(function test_update_download_error() {
+ let streamUpdater = Cc[
+ "@mozilla.org/url-classifier/streamupdater;1"
+ ].getService(Ci.nsIUrlClassifierStreamUpdater);
+
+ // Download some updates, and don't continue until the downloads are done.
+ function updateSuccessOrError(aEvent) {
+ do_throw("Should be downbload error");
+ }
+ // Just throw if we ever get an update or download error.
+ function downloadError(aEvent) {
+ run_next_test();
+ }
+
+ streamUpdater.downloadUpdates(
+ "social-track-digest256",
+ "social-track-digest256;\n",
+ true,
+ "moz-sbrs://remote-setting",
+ updateSuccessOrError,
+ updateSuccessOrError,
+ downloadError
+ );
+});
+
+add_test(function test_update_update_error() {
+ let streamUpdater = Cc[
+ "@mozilla.org/url-classifier/streamupdater;1"
+ ].getService(Ci.nsIUrlClassifierStreamUpdater);
+
+ // Download some updates, and don't continue until the downloads are done.
+ function updateSuccessOrDownloadError(aEvent) {
+ do_throw("Should be update error");
+ }
+ // Just throw if we ever get an update or download error.
+ function updateError(aEvent) {
+ run_next_test();
+ }
+
+ streamUpdater.downloadUpdates(
+ "analytic-track-digest256",
+ "analytic-track-digest256;\n",
+ true,
+ "moz-sbrs://remote-setting",
+ updateSuccessOrDownloadError,
+ updateError,
+ updateSuccessOrDownloadError
+ );
+});
diff --git a/toolkit/components/url-classifier/tests/unit/test_safebrowsing_protobuf.js b/toolkit/components/url-classifier/tests/unit/test_safebrowsing_protobuf.js
new file mode 100644
index 0000000000..73426751cb
--- /dev/null
+++ b/toolkit/components/url-classifier/tests/unit/test_safebrowsing_protobuf.js
@@ -0,0 +1,29 @@
+function run_test() {
+ let urlUtils = Cc["@mozilla.org/url-classifier/utils;1"].getService(
+ Ci.nsIUrlClassifierUtils
+ );
+
+ // No list at all.
+ let requestNoList = urlUtils.makeUpdateRequestV4([], []);
+
+ // Only one valid list name.
+ let requestOneValid = urlUtils.makeUpdateRequestV4(
+ ["goog-phish-proto"],
+ ["AAAAAA"]
+ );
+
+ // Only one invalid list name.
+ let requestOneInvalid = urlUtils.makeUpdateRequestV4(
+ ["bad-list-name"],
+ ["AAAAAA"]
+ );
+
+ // One valid and one invalid list name.
+ let requestOneInvalidOneValid = urlUtils.makeUpdateRequestV4(
+ ["goog-phish-proto", "bad-list-name"],
+ ["AAAAAA", "AAAAAA"]
+ );
+
+ equal(requestNoList, requestOneInvalid);
+ equal(requestOneValid, requestOneInvalidOneValid);
+}
diff --git a/toolkit/components/url-classifier/tests/unit/test_shouldclassify.js b/toolkit/components/url-classifier/tests/unit/test_shouldclassify.js
new file mode 100644
index 0000000000..dce00ed0ca
--- /dev/null
+++ b/toolkit/components/url-classifier/tests/unit/test_shouldclassify.js
@@ -0,0 +1,164 @@
+/* Any copyright is dedicated to the Public Domain.
+ http://creativecommons.org/publicdomain/zero/1.0/
+*/
+
+"use strict";
+
+const { NetUtil } = ChromeUtils.import("resource://gre/modules/NetUtil.jsm");
+const { UrlClassifierTestUtils } = ChromeUtils.import(
+ "resource://testing-common/UrlClassifierTestUtils.jsm"
+);
+
+const defaultTopWindowURI = NetUtil.newURI("http://www.example.com/");
+
+var httpServer;
+var trackingOrigin;
+
+// ShouldClassify algorithm uses the following parameters:
+// 1. Ci.nsIChannel.LOAD_ BYPASS_URL_CLASSIFIER loadflags
+// 2. Content type
+// 3. triggering principal
+// 4. be Conservative
+// We test are the combinations here to make sure the algorithm is correct
+
+// const PARAM_LOAD_BYPASS_URL_CLASSIFIER = 1 << 0;
+const PARAM_CONTENT_POLICY_TYPE_DOCUMENT = 1 << 1;
+const PARAM_TRIGGERING_PRINCIPAL_SYSTEM = 1 << 2;
+const PARAM_CAP_BE_CONSERVATIVE = 1 << 3;
+const PARAM_MAX = 1 << 4;
+
+function getParameters(bitFlags) {
+ var params = {
+ loadFlags: Ci.nsIRequest.LOAD_NORMAL,
+ contentType: Ci.nsIContentPolicy.TYPE_OTHER,
+ system: false,
+ beConservative: false,
+ };
+
+ if (bitFlags & PARAM_TRIGGERING_PRINCIPAL_SYSTEM) {
+ params.loadFlags = Ci.nsIChannel.LOAD_BYPASS_URL_CLASSIFIER;
+ }
+
+ if (bitFlags & PARAM_CONTENT_POLICY_TYPE_DOCUMENT) {
+ params.contentType = Ci.nsIContentPolicy.TYPE_DOCUMENT;
+ }
+
+ if (bitFlags & PARAM_TRIGGERING_PRINCIPAL_SYSTEM) {
+ params.system = true;
+ }
+
+ if (bitFlags & PARAM_CAP_BE_CONSERVATIVE) {
+ params.beConservative = true;
+ }
+
+ return params;
+}
+
+function getExpectedResult(params) {
+ if (params.loadFlags & Ci.nsIChannel.LOAD_BYPASS_URL_CLASSIFIER) {
+ return false;
+ }
+ if (params.beConservative) {
+ return false;
+ }
+ if (
+ params.system &&
+ params.contentType != Ci.nsIContentPolicy.TYPE_DOCUMENT
+ ) {
+ return false;
+ }
+
+ return true;
+}
+
+function setupHttpServer() {
+ httpServer = new HttpServer();
+ httpServer.start(-1);
+ httpServer.identity.setPrimary(
+ "http",
+ "tracking.example.org",
+ httpServer.identity.primaryPort
+ );
+ httpServer.identity.add(
+ "http",
+ "example.org",
+ httpServer.identity.primaryPort
+ );
+ trackingOrigin =
+ "http://tracking.example.org:" + httpServer.identity.primaryPort;
+}
+
+function setupChannel(params) {
+ var channel;
+
+ if (params.system) {
+ channel = NetUtil.newChannel({
+ uri: trackingOrigin + "/evil.js",
+ loadUsingSystemPrincipal: true,
+ contentPolicyType: params.contentType,
+ });
+ } else {
+ let principal = Services.scriptSecurityManager.createContentPrincipal(
+ NetUtil.newURI(trackingOrigin),
+ {}
+ );
+ channel = NetUtil.newChannel({
+ uri: trackingOrigin + "/evil.js",
+ loadingPrincipal: principal,
+ securityFlags: Ci.nsILoadInfo.SEC_ALLOW_CROSS_ORIGIN_SEC_CONTEXT_IS_NULL,
+ contentPolicyType: params.contentType,
+ });
+ }
+
+ channel.QueryInterface(Ci.nsIHttpChannel);
+ channel.requestMethod = "GET";
+ channel.loadFlags |= params.loadFlags;
+ channel
+ .QueryInterface(Ci.nsIHttpChannelInternal)
+ .setTopWindowURIIfUnknown(defaultTopWindowURI);
+ channel.QueryInterface(Ci.nsIHttpChannelInternal).beConservative =
+ params.beConservative;
+
+ return channel;
+}
+
+add_task(async function testShouldClassify() {
+ Services.prefs.setBoolPref(
+ "privacy.trackingprotection.annotate_channels",
+ true
+ );
+ Services.prefs.setBoolPref("network.dns.native-is-localhost", true);
+
+ setupHttpServer();
+
+ await UrlClassifierTestUtils.addTestTrackers();
+
+ for (let i = 0; i < PARAM_MAX; i++) {
+ let params = getParameters(i);
+ let channel = setupChannel(params);
+
+ await new Promise(resolve => {
+ channel.asyncOpen({
+ onStartRequest: (request, context) => {
+ Assert.equal(
+ !!(
+ request.QueryInterface(Ci.nsIClassifiedChannel)
+ .classificationFlags &
+ Ci.nsIClassifiedChannel.CLASSIFIED_ANY_BASIC_TRACKING
+ ),
+ getExpectedResult(params)
+ );
+ request.cancel(Cr.NS_ERROR_ABORT);
+ resolve();
+ },
+
+ onDataAvailable: (request, context, stream, offset, count) => {},
+ onStopRequest: (request, context, status) => {},
+ });
+ });
+ }
+
+ UrlClassifierTestUtils.cleanupTestTrackers();
+
+ httpServer.stop(do_test_finished);
+});
diff --git a/toolkit/components/url-classifier/tests/unit/test_streamupdater.js b/toolkit/components/url-classifier/tests/unit/test_streamupdater.js
new file mode 100644
index 0000000000..1a2ee847d1
--- /dev/null
+++ b/toolkit/components/url-classifier/tests/unit/test_streamupdater.js
@@ -0,0 +1,244 @@
+function doTest(updates, assertions, expectError) {
+ if (expectError) {
+ doUpdateTest(updates, assertions, updateError, runNextTest);
+ } else {
+ doUpdateTest(updates, assertions, runNextTest, updateError);
+ }
+}
+
+// Never use the same URLs for multiple tests, because we aren't guaranteed
+// to reset the database between tests.
+function testFillDb() {
+ var add1Urls = ["zaz.com/a", "yxz.com/c"];
+
+ var update = "n:1000\n";
+ update += "i:test-phish-simple\n";
+
+ var update1 = buildBareUpdate([{ chunkNum: 1, urls: add1Urls }]);
+ update += "u:data:," + encodeURIComponent(update1) + "\n";
+
+ var assertions = {
+ tableData: "test-phish-simple;a:1",
+ urlsExist: add1Urls,
+ };
+
+ doTest([update], assertions, false);
+}
+
+function testSimpleForward() {
+ var add1Urls = ["foo-simple.com/a", "bar-simple.com/c"];
+ var add2Urls = ["foo-simple.com/b"];
+ var add3Urls = ["bar-simple.com/d"];
+
+ var update = "n:1000\n";
+ update += "i:test-phish-simple\n";
+
+ var update1 = buildBareUpdate([{ chunkNum: 1, urls: add1Urls }]);
+ update += "u:data:," + encodeURIComponent(update1) + "\n";
+
+ var update2 = buildBareUpdate([{ chunkNum: 2, urls: add2Urls }]);
+ update += "u:data:," + encodeURIComponent(update2) + "\n";
+
+ var update3 = buildBareUpdate([{ chunkNum: 3, urls: add3Urls }]);
+ update += "u:data:," + encodeURIComponent(update3) + "\n";
+
+ var assertions = {
+ tableData: "test-phish-simple;a:1-3",
+ urlsExist: add1Urls.concat(add2Urls).concat(add3Urls),
+ };
+
+ doTest([update], assertions, false);
+}
+
+// Make sure that a nested forward (a forward within a forward) causes
+// the update to fail.
+function testNestedForward() {
+ var add1Urls = ["foo-nested.com/a", "bar-nested.com/c"];
+ var add2Urls = ["foo-nested.com/b"];
+
+ var update = "n:1000\n";
+ update += "i:test-phish-simple\n";
+
+ var update1 = buildBareUpdate([{ chunkNum: 1, urls: add1Urls }]);
+ update += "u:data:," + encodeURIComponent(update1) + "\n";
+
+ var update2 = buildBareUpdate([{ chunkNum: 2 }]);
+ var update3 = buildBareUpdate([{ chunkNum: 3, urls: add1Urls }]);
+
+ update2 += "u:data:," + encodeURIComponent(update3) + "\n";
+
+ update += "u:data:," + encodeURIComponent(update2) + "\n";
+
+ var assertions = {
+ tableData: "",
+ urlsDontExist: add1Urls.concat(add2Urls),
+ };
+
+ doTest([update], assertions, true);
+}
+
+// An invalid URL forward causes the update to fail.
+function testInvalidUrlForward() {
+ var add1Urls = ["foo-invalid.com/a", "bar-invalid.com/c"];
+
+ var update = buildPhishingUpdate([{ chunkNum: 1, urls: add1Urls }]);
+ update += "u:asdf://blah/blah\n"; // invalid URL scheme
+
+ // add1Urls is present, but that is an artifact of the way we do the test.
+ var assertions = {
+ tableData: "test-phish-simple;a:1",
+ urlsExist: add1Urls,
+ };
+
+ doTest([update], assertions, true);
+}
+
+// A failed network request causes the update to fail.
+function testErrorUrlForward() {
+ var add1Urls = ["foo-forward.com/a", "bar-forward.com/c"];
+
+ var update = buildPhishingUpdate([{ chunkNum: 1, urls: add1Urls }]);
+ update += "u:http://test.invalid/asdf/asdf\n"; // invalid URL scheme
+
+ // add1Urls is present, but that is an artifact of the way we do the test.
+ var assertions = {
+ tableData: "test-phish-simple;a:1",
+ urlsExist: add1Urls,
+ };
+
+ doTest([update], assertions, true);
+}
+
+function testMultipleTables() {
+ var add1Urls = ["foo-multiple.com/a", "bar-multiple.com/c"];
+ var add2Urls = ["foo-multiple.com/b"];
+ var add3Urls = ["bar-multiple.com/d"];
+ var add4Urls = ["bar-multiple.com/e"];
+ var add6Urls = ["bar-multiple.com/g"];
+
+ var update = "n:1000\n";
+ update += "i:test-phish-simple\n";
+
+ var update1 = buildBareUpdate([{ chunkNum: 1, urls: add1Urls }]);
+ update += "u:data:," + encodeURIComponent(update1) + "\n";
+
+ var update2 = buildBareUpdate([{ chunkNum: 2, urls: add2Urls }]);
+ update += "u:data:," + encodeURIComponent(update2) + "\n";
+
+ update += "i:test-malware-simple\n";
+
+ var update3 = buildBareUpdate([{ chunkNum: 3, urls: add3Urls }]);
+ update += "u:data:," + encodeURIComponent(update3) + "\n";
+
+ update += "i:test-unwanted-simple\n";
+ var update4 = buildBareUpdate([{ chunkNum: 4, urls: add4Urls }]);
+ update += "u:data:," + encodeURIComponent(update4) + "\n";
+
+ update += "i:test-block-simple\n";
+ var update6 = buildBareUpdate([{ chunkNum: 6, urls: add6Urls }]);
+ update += "u:data:," + encodeURIComponent(update6) + "\n";
+
+ var assertions = {
+ tableData:
+ "test-block-simple;a:6\ntest-malware-simple;a:3\ntest-phish-simple;a:1-2\ntest-unwanted-simple;a:4",
+ urlsExist: add1Urls.concat(add2Urls),
+ malwareUrlsExist: add3Urls,
+ unwantedUrlsExist: add4Urls,
+ blockedUrlsExist: add6Urls,
+ };
+
+ doTest([update], assertions, false);
+}
+
+function testUrlInMultipleTables() {
+ var add1Urls = ["foo-forward.com/a"];
+
+ var update = "n:1000\n";
+ update += "i:test-phish-simple\n";
+
+ var update1 = buildBareUpdate([{ chunkNum: 1, urls: add1Urls }]);
+ update += "u:data:," + encodeURIComponent(update1) + "\n";
+
+ update += "i:test-malware-simple\n";
+ var update2 = buildBareUpdate([{ chunkNum: 2, urls: add1Urls }]);
+ update += "u:data:," + encodeURIComponent(update2) + "\n";
+
+ update += "i:test-unwanted-simple\n";
+ var update3 = buildBareUpdate([{ chunkNum: 3, urls: add1Urls }]);
+ update += "u:data:," + encodeURIComponent(update3) + "\n";
+
+ var assertions = {
+ tableData:
+ "test-malware-simple;a:2\ntest-phish-simple;a:1\ntest-unwanted-simple;a:3",
+ urlExistInMultipleTables: {
+ url: add1Urls,
+ tables: "test-malware-simple,test-phish-simple,test-unwanted-simple",
+ },
+ };
+
+ doTest([update], assertions, false);
+}
+
+function Observer(callback) {
+ this.observe = callback;
+}
+
+Observer.prototype = {
+ QueryInterface: ChromeUtils.generateQI(["nsIObserver"]),
+};
+
+// Tests a database reset request.
+function testReset() {
+ // The moz-phish-simple table is populated separately from the other update in
+ // a separate update request. Therefore it should not be reset when we run the
+ // updates later in this function.
+ var mozAddUrls = ["moz-reset.com/a"];
+ var mozUpdate = buildMozPhishingUpdate([{ chunkNum: 1, urls: mozAddUrls }]);
+
+ var dataUpdate = "data:," + encodeURIComponent(mozUpdate);
+
+ streamUpdater.downloadUpdates(
+ mozTables,
+ "",
+ true,
+ dataUpdate,
+ () => {},
+ updateError,
+ updateError
+ );
+
+ var addUrls1 = ["foo-reset.com/a", "foo-reset.com/b"];
+ var update1 = buildPhishingUpdate([{ chunkNum: 1, urls: addUrls1 }]);
+
+ var update2 = "n:1000\nr:pleasereset\n";
+
+ var addUrls3 = ["bar-reset.com/a", "bar-reset.com/b"];
+ var update3 = buildPhishingUpdate([{ chunkNum: 3, urls: addUrls3 }]);
+
+ var assertions = {
+ tableData: "moz-phish-simple;a:1\ntest-phish-simple;a:3", // tables that should still be there.
+ mozPhishingUrlsExist: mozAddUrls, // mozAddUrls added prior to the reset
+ // but it should still exist after reset.
+ urlsExist: addUrls3, // addUrls3 added after the reset.
+ urlsDontExist: addUrls1, // addUrls1 added prior to the reset
+ };
+
+ // Use these update responses in order. The update request only
+ // contains test-*-simple tables so the reset will only apply to these.
+ doTest([update1, update2, update3], assertions, false);
+}
+
+function run_test() {
+ runTests([
+ testFillDb,
+ testSimpleForward,
+ testNestedForward,
+ testInvalidUrlForward,
+ testErrorUrlForward,
+ testMultipleTables,
+ testUrlInMultipleTables,
+ testReset,
+ ]);
+}
+
+do_test_pending();
diff --git a/toolkit/components/url-classifier/tests/unit/test_threat_type_conversion.js b/toolkit/components/url-classifier/tests/unit/test_threat_type_conversion.js
new file mode 100644
index 0000000000..98ec79d345
--- /dev/null
+++ b/toolkit/components/url-classifier/tests/unit/test_threat_type_conversion.js
@@ -0,0 +1,55 @@
+function run_test() {
+ let urlUtils = Cc["@mozilla.org/url-classifier/utils;1"].getService(
+ Ci.nsIUrlClassifierUtils
+ );
+
+ // Test list name to threat type conversion.
+
+ equal(urlUtils.convertListNameToThreatType("goog-malware-proto"), 1);
+ equal(urlUtils.convertListNameToThreatType("googpub-phish-proto"), 2);
+ equal(urlUtils.convertListNameToThreatType("goog-unwanted-proto"), 3);
+ equal(urlUtils.convertListNameToThreatType("goog-harmful-proto"), 4);
+ equal(urlUtils.convertListNameToThreatType("goog-phish-proto"), 5);
+ equal(urlUtils.convertListNameToThreatType("goog-badbinurl-proto"), 7);
+ equal(urlUtils.convertListNameToThreatType("goog-passwordwhite-proto"), 8);
+ equal(urlUtils.convertListNameToThreatType("goog-downloadwhite-proto"), 9);
+
+ try {
+ urlUtils.convertListNameToThreatType("bad-list-name");
+ ok(false, "Bad list name should lead to exception.");
+ } catch (e) {}
+
+ try {
+ urlUtils.convertListNameToThreatType("bad-list-name");
+ ok(false, "Bad list name should lead to exception.");
+ } catch (e) {}
+
+ // Test threat type to list name conversion.
+ equal(urlUtils.convertThreatTypeToListNames(1), "goog-malware-proto");
+ equal(
+ urlUtils.convertThreatTypeToListNames(2),
+ "googpub-phish-proto,moztest-phish-proto,test-phish-proto"
+ );
+ equal(
+ urlUtils.convertThreatTypeToListNames(3),
+ "goog-unwanted-proto,moztest-unwanted-proto,test-unwanted-proto"
+ );
+ equal(urlUtils.convertThreatTypeToListNames(4), "goog-harmful-proto");
+ equal(urlUtils.convertThreatTypeToListNames(5), "goog-phish-proto");
+ equal(urlUtils.convertThreatTypeToListNames(7), "goog-badbinurl-proto");
+ equal(
+ urlUtils.convertThreatTypeToListNames(8),
+ "goog-passwordwhite-proto,moztest-passwordwhite-proto,test-passwordwhite-proto"
+ );
+ equal(urlUtils.convertThreatTypeToListNames(9), "goog-downloadwhite-proto");
+
+ try {
+ urlUtils.convertThreatTypeToListNames(0);
+ ok(false, "Bad threat type should lead to exception.");
+ } catch (e) {}
+
+ try {
+ urlUtils.convertThreatTypeToListNames(100);
+ ok(false, "Bad threat type should lead to exception.");
+ } catch (e) {}
+}
diff --git a/toolkit/components/url-classifier/tests/unit/xpcshell.ini b/toolkit/components/url-classifier/tests/unit/xpcshell.ini
new file mode 100644
index 0000000000..af77f72a7e
--- /dev/null
+++ b/toolkit/components/url-classifier/tests/unit/xpcshell.ini
@@ -0,0 +1,37 @@
+[DEFAULT]
+head = head_urlclassifier.js
+tags = condprof
+skip-if = toolkit == 'android'
+support-files =
+ data/**
+
+[test_addsub.js]
+[test_bug1274685_unowned_list.js]
+[test_backoff.js]
+[test_canonicalization.js]
+[test_channelClassifierService.js]
+[test_dbservice.js]
+skip-if = condprof # Bug 1769828
+[test_hashcompleter.js]
+[test_hashcompleter_v4.js]
+# Bug 752243: Profile cleanup frequently fails
+#skip-if = os == "mac" || os == "linux"
+[test_partial.js]
+[test_prefixset.js]
+[test_threat_type_conversion.js]
+[test_provider_url.js]
+[test_exceptionListService.js]
+tags = remote-settings
+[test_streamupdater.js]
+[test_digest256.js]
+run-sequentially = very high failure rate in parallel
+[test_listmanager.js]
+run-sequentially = very high failure rate in parallel
+[test_pref.js]
+[test_malwaretable_pref.js]
+[test_safebrowsing_protobuf.js]
+[test_platform_specific_threats.js]
+[test_features.js]
+[test_shouldclassify.js]
+[test_rsListService.js]
+tags = remote-settings