summaryrefslogtreecommitdiffstats
path: root/toolkit/components/url-classifier/tests
diff options
context:
space:
mode:
Diffstat (limited to 'toolkit/components/url-classifier/tests')
-rw-r--r--toolkit/components/url-classifier/tests/UrlClassifierTestUtils.sys.mjs307
-rw-r--r--toolkit/components/url-classifier/tests/browser/browser.toml7
-rw-r--r--toolkit/components/url-classifier/tests/browser/browser_emailtracking_telemetry.js423
-rw-r--r--toolkit/components/url-classifier/tests/browser/page.html8
-rw-r--r--toolkit/components/url-classifier/tests/browser/raptor.jpgbin0 -> 49629 bytes
-rw-r--r--toolkit/components/url-classifier/tests/gtest/Common.cpp219
-rw-r--r--toolkit/components/url-classifier/tests/gtest/Common.h147
-rw-r--r--toolkit/components/url-classifier/tests/gtest/TestCaching.cpp271
-rw-r--r--toolkit/components/url-classifier/tests/gtest/TestChunkSet.cpp281
-rw-r--r--toolkit/components/url-classifier/tests/gtest/TestClassifier.cpp83
-rw-r--r--toolkit/components/url-classifier/tests/gtest/TestFailUpdate.cpp109
-rw-r--r--toolkit/components/url-classifier/tests/gtest/TestFindFullHash.cpp216
-rw-r--r--toolkit/components/url-classifier/tests/gtest/TestLookupCacheV4.cpp152
-rw-r--r--toolkit/components/url-classifier/tests/gtest/TestPerProviderDirectory.cpp127
-rw-r--r--toolkit/components/url-classifier/tests/gtest/TestPrefixSet.cpp87
-rw-r--r--toolkit/components/url-classifier/tests/gtest/TestProtocolParser.cpp140
-rw-r--r--toolkit/components/url-classifier/tests/gtest/TestRiceDeltaDecoder.cpp173
-rw-r--r--toolkit/components/url-classifier/tests/gtest/TestSafeBrowsingProtobuf.cpp30
-rw-r--r--toolkit/components/url-classifier/tests/gtest/TestSafebrowsingHash.cpp58
-rw-r--r--toolkit/components/url-classifier/tests/gtest/TestTable.cpp59
-rw-r--r--toolkit/components/url-classifier/tests/gtest/TestURLsAndHashing.cpp71
-rw-r--r--toolkit/components/url-classifier/tests/gtest/TestUrlClassifierTableUpdateV4.cpp785
-rw-r--r--toolkit/components/url-classifier/tests/gtest/TestUrlClassifierUtils.cpp254
-rw-r--r--toolkit/components/url-classifier/tests/gtest/TestVariableLengthPrefixSet.cpp486
-rw-r--r--toolkit/components/url-classifier/tests/gtest/moz.build40
-rw-r--r--toolkit/components/url-classifier/tests/mochitest/allowlistAnnotatedFrame.html143
-rw-r--r--toolkit/components/url-classifier/tests/mochitest/bad.css1
-rw-r--r--toolkit/components/url-classifier/tests/mochitest/bad.css^headers^1
-rw-r--r--toolkit/components/url-classifier/tests/mochitest/basic.vtt27
-rw-r--r--toolkit/components/url-classifier/tests/mochitest/basic.vtt^headers^1
-rw-r--r--toolkit/components/url-classifier/tests/mochitest/bug_1281083.html11
-rw-r--r--toolkit/components/url-classifier/tests/mochitest/bug_1580416.html13
-rw-r--r--toolkit/components/url-classifier/tests/mochitest/cache.sjs90
-rw-r--r--toolkit/components/url-classifier/tests/mochitest/chrome.toml83
-rw-r--r--toolkit/components/url-classifier/tests/mochitest/classifiedAnnotatedFrame.html154
-rw-r--r--toolkit/components/url-classifier/tests/mochitest/classifiedAnnotatedPBFrame.html26
-rw-r--r--toolkit/components/url-classifier/tests/mochitest/classifierCommon.js108
-rw-r--r--toolkit/components/url-classifier/tests/mochitest/classifierFrame.html57
-rw-r--r--toolkit/components/url-classifier/tests/mochitest/classifierHelper.js204
-rw-r--r--toolkit/components/url-classifier/tests/mochitest/cleanWorker.js12
-rw-r--r--toolkit/components/url-classifier/tests/mochitest/dnt.html31
-rw-r--r--toolkit/components/url-classifier/tests/mochitest/dnt.sjs9
-rw-r--r--toolkit/components/url-classifier/tests/mochitest/evil.css1
-rw-r--r--toolkit/components/url-classifier/tests/mochitest/evil.css^headers^1
-rw-r--r--toolkit/components/url-classifier/tests/mochitest/evil.js3
-rw-r--r--toolkit/components/url-classifier/tests/mochitest/evil.js^headers^2
-rw-r--r--toolkit/components/url-classifier/tests/mochitest/evilWorker.js5
-rw-r--r--toolkit/components/url-classifier/tests/mochitest/features.js291
-rw-r--r--toolkit/components/url-classifier/tests/mochitest/gethash.sjs86
-rw-r--r--toolkit/components/url-classifier/tests/mochitest/gethashFrame.html61
-rw-r--r--toolkit/components/url-classifier/tests/mochitest/good.js3
-rw-r--r--toolkit/components/url-classifier/tests/mochitest/head.js40
-rw-r--r--toolkit/components/url-classifier/tests/mochitest/import.css3
-rw-r--r--toolkit/components/url-classifier/tests/mochitest/import2.css3
-rw-r--r--toolkit/components/url-classifier/tests/mochitest/mochitest.toml91
-rw-r--r--toolkit/components/url-classifier/tests/mochitest/ping.sjs15
-rw-r--r--toolkit/components/url-classifier/tests/mochitest/raptor.jpgbin0 -> 49629 bytes
-rw-r--r--toolkit/components/url-classifier/tests/mochitest/redirect_tracker.sjs7
-rw-r--r--toolkit/components/url-classifier/tests/mochitest/report.sjs71
-rw-r--r--toolkit/components/url-classifier/tests/mochitest/sandboxed.html8
-rw-r--r--toolkit/components/url-classifier/tests/mochitest/sandboxed.html^headers^1
-rw-r--r--toolkit/components/url-classifier/tests/mochitest/seek.webmbin0 -> 215529 bytes
-rw-r--r--toolkit/components/url-classifier/tests/mochitest/sw_register.html33
-rw-r--r--toolkit/components/url-classifier/tests/mochitest/sw_unregister.html12
-rw-r--r--toolkit/components/url-classifier/tests/mochitest/sw_worker.js10
-rw-r--r--toolkit/components/url-classifier/tests/mochitest/test_advisory_link.html138
-rw-r--r--toolkit/components/url-classifier/tests/mochitest/test_allowlisted_annotations.html60
-rw-r--r--toolkit/components/url-classifier/tests/mochitest/test_annotation_vs_TP.html31
-rw-r--r--toolkit/components/url-classifier/tests/mochitest/test_bug1254766.html265
-rw-r--r--toolkit/components/url-classifier/tests/mochitest/test_cachemiss.html167
-rw-r--r--toolkit/components/url-classifier/tests/mochitest/test_classified_annotations.html171
-rw-r--r--toolkit/components/url-classifier/tests/mochitest/test_classifier.html141
-rw-r--r--toolkit/components/url-classifier/tests/mochitest/test_classifier_changetablepref.html157
-rw-r--r--toolkit/components/url-classifier/tests/mochitest/test_classifier_changetablepref_bug1395411.html72
-rw-r--r--toolkit/components/url-classifier/tests/mochitest/test_classifier_match.html179
-rw-r--r--toolkit/components/url-classifier/tests/mochitest/test_classifier_worker.html73
-rw-r--r--toolkit/components/url-classifier/tests/mochitest/test_classify_by_default.html156
-rw-r--r--toolkit/components/url-classifier/tests/mochitest/test_classify_ping.html131
-rw-r--r--toolkit/components/url-classifier/tests/mochitest/test_classify_top_sandboxed.html74
-rw-r--r--toolkit/components/url-classifier/tests/mochitest/test_classify_track.html163
-rw-r--r--toolkit/components/url-classifier/tests/mochitest/test_cryptomining.html100
-rw-r--r--toolkit/components/url-classifier/tests/mochitest/test_cryptomining_annotate.html31
-rw-r--r--toolkit/components/url-classifier/tests/mochitest/test_donottrack.html141
-rw-r--r--toolkit/components/url-classifier/tests/mochitest/test_emailtracking.html130
-rw-r--r--toolkit/components/url-classifier/tests/mochitest/test_fingerprinting.html130
-rw-r--r--toolkit/components/url-classifier/tests/mochitest/test_fingerprinting_annotate.html31
-rw-r--r--toolkit/components/url-classifier/tests/mochitest/test_gethash.html118
-rw-r--r--toolkit/components/url-classifier/tests/mochitest/test_privatebrowsing_trackingprotection.html153
-rw-r--r--toolkit/components/url-classifier/tests/mochitest/test_reporturl.html217
-rw-r--r--toolkit/components/url-classifier/tests/mochitest/test_safebrowsing_bug1272239.html91
-rw-r--r--toolkit/components/url-classifier/tests/mochitest/test_socialtracking.html109
-rw-r--r--toolkit/components/url-classifier/tests/mochitest/test_socialtracking_annotate.html32
-rw-r--r--toolkit/components/url-classifier/tests/mochitest/test_threathit_report.html235
-rw-r--r--toolkit/components/url-classifier/tests/mochitest/test_trackingprotection_bug1157081.html100
-rw-r--r--toolkit/components/url-classifier/tests/mochitest/test_trackingprotection_bug1312515.html164
-rw-r--r--toolkit/components/url-classifier/tests/mochitest/test_trackingprotection_bug1580416.html102
-rw-r--r--toolkit/components/url-classifier/tests/mochitest/test_trackingprotection_whitelist.html165
-rw-r--r--toolkit/components/url-classifier/tests/mochitest/threathit.sjs47
-rw-r--r--toolkit/components/url-classifier/tests/mochitest/track.html7
-rw-r--r--toolkit/components/url-classifier/tests/mochitest/trackerFrame.html91
-rw-r--r--toolkit/components/url-classifier/tests/mochitest/trackerFrame.sjs80
-rw-r--r--toolkit/components/url-classifier/tests/mochitest/trackingRequest.html21
-rw-r--r--toolkit/components/url-classifier/tests/mochitest/trackingRequest.js9
-rw-r--r--toolkit/components/url-classifier/tests/mochitest/trackingRequest.js^headers^2
-rw-r--r--toolkit/components/url-classifier/tests/mochitest/unwantedWorker.js5
-rw-r--r--toolkit/components/url-classifier/tests/mochitest/update.sjs71
-rw-r--r--toolkit/components/url-classifier/tests/mochitest/vp9.webmbin0 -> 97465 bytes
-rw-r--r--toolkit/components/url-classifier/tests/mochitest/whitelistFrame.html15
-rw-r--r--toolkit/components/url-classifier/tests/mochitest/workerFrame.html65
-rw-r--r--toolkit/components/url-classifier/tests/moz.build17
-rw-r--r--toolkit/components/url-classifier/tests/unit/data/content-fingerprinting-track-digest256bin0 -> 948 bytes
-rw-r--r--toolkit/components/url-classifier/tests/unit/data/digest1.chunkbin0 -> 939 bytes
-rw-r--r--toolkit/components/url-classifier/tests/unit/data/digest2.chunk2
-rw-r--r--toolkit/components/url-classifier/tests/unit/data/google-trackwhite-digest256bin0 -> 1470328 bytes
-rw-r--r--toolkit/components/url-classifier/tests/unit/data/invalid.chunk2
-rw-r--r--toolkit/components/url-classifier/tests/unit/data/mozplugin-block-digest256bin0 -> 3029 bytes
-rw-r--r--toolkit/components/url-classifier/tests/unit/head_urlclassifier.js570
-rw-r--r--toolkit/components/url-classifier/tests/unit/test_addsub.js329
-rw-r--r--toolkit/components/url-classifier/tests/unit/test_backoff.js92
-rw-r--r--toolkit/components/url-classifier/tests/unit/test_bug1274685_unowned_list.js65
-rw-r--r--toolkit/components/url-classifier/tests/unit/test_canonicalization.js83
-rw-r--r--toolkit/components/url-classifier/tests/unit/test_channelClassifierService.js225
-rw-r--r--toolkit/components/url-classifier/tests/unit/test_dbservice.js329
-rw-r--r--toolkit/components/url-classifier/tests/unit/test_digest256.js143
-rw-r--r--toolkit/components/url-classifier/tests/unit/test_exceptionListService.js285
-rw-r--r--toolkit/components/url-classifier/tests/unit/test_features.js81
-rw-r--r--toolkit/components/url-classifier/tests/unit/test_hashcompleter.js438
-rw-r--r--toolkit/components/url-classifier/tests/unit/test_hashcompleter_v4.js292
-rw-r--r--toolkit/components/url-classifier/tests/unit/test_listmanager.js355
-rw-r--r--toolkit/components/url-classifier/tests/unit/test_malwaretable_pref.js4
-rw-r--r--toolkit/components/url-classifier/tests/unit/test_partial.js607
-rw-r--r--toolkit/components/url-classifier/tests/unit/test_platform_specific_threats.js104
-rw-r--r--toolkit/components/url-classifier/tests/unit/test_pref.js15
-rw-r--r--toolkit/components/url-classifier/tests/unit/test_prefixset.js178
-rw-r--r--toolkit/components/url-classifier/tests/unit/test_provider_url.js32
-rw-r--r--toolkit/components/url-classifier/tests/unit/test_rsListService.js424
-rw-r--r--toolkit/components/url-classifier/tests/unit/test_safebrowsing_protobuf.js29
-rw-r--r--toolkit/components/url-classifier/tests/unit/test_shouldclassify.js166
-rw-r--r--toolkit/components/url-classifier/tests/unit/test_streamupdater.js244
-rw-r--r--toolkit/components/url-classifier/tests/unit/test_threat_type_conversion.js50
-rw-r--r--toolkit/components/url-classifier/tests/unit/xpcshell.toml56
141 files changed, 15908 insertions, 0 deletions
diff --git a/toolkit/components/url-classifier/tests/UrlClassifierTestUtils.sys.mjs b/toolkit/components/url-classifier/tests/UrlClassifierTestUtils.sys.mjs
new file mode 100644
index 0000000000..c69d0c24b4
--- /dev/null
+++ b/toolkit/components/url-classifier/tests/UrlClassifierTestUtils.sys.mjs
@@ -0,0 +1,307 @@
+const ANNOTATION_TABLE_NAME = "mochitest1-track-simple";
+const ANNOTATION_TABLE_PREF = "urlclassifier.trackingAnnotationTable";
+const ANNOTATION_ENTITYLIST_TABLE_NAME = "mochitest1-trackwhite-simple";
+const ANNOTATION_ENTITYLIST_TABLE_PREF =
+ "urlclassifier.trackingAnnotationWhitelistTable";
+
+const TRACKING_TABLE_NAME = "mochitest2-track-simple";
+const TRACKING_TABLE_PREF = "urlclassifier.trackingTable";
+const ENTITYLIST_TABLE_NAME = "mochitest2-trackwhite-simple";
+const ENTITYLIST_TABLE_PREF = "urlclassifier.trackingWhitelistTable";
+
+const SOCIAL_ANNOTATION_TABLE_NAME = "mochitest3-track-simple";
+const SOCIAL_ANNOTATION_TABLE_PREF =
+ "urlclassifier.features.socialtracking.annotate.blacklistTables";
+const SOCIAL_TRACKING_TABLE_NAME = "mochitest4-track-simple";
+const SOCIAL_TRACKING_TABLE_PREF =
+ "urlclassifier.features.socialtracking.blacklistTables";
+const EMAIL_TRACKING_TABLE_NAME = "mochitest5-track-simple";
+const EMAIL_TRACKING_TABLE_PREF =
+ "urlclassifier.features.emailtracking.blocklistTables";
+
+let timer = Cc["@mozilla.org/timer;1"].createInstance(Ci.nsITimer);
+
+export var UrlClassifierTestUtils = {
+ addTestTrackers() {
+ // Add some URLs to the tracking databases
+ let annotationURL1 = "tracking.example.org/"; // only for annotations
+ let annotationURL2 = "itisatracker.org/";
+ let annotationURL3 = "trackertest.org/";
+ let annotationURL4 = "another-tracking.example.net/";
+ let annotationURL5 = "tlsresumptiontest.example.org/";
+ let annotationEntitylistedURL = "itisatrap.org/?resource=example.org";
+ let trackingURL1 = "tracking.example.com/"; // only for TP
+ let trackingURL2 = "itisatracker.org/";
+ let trackingURL3 = "trackertest.org/";
+ let entitylistedURL = "itisatrap.org/?resource=itisatracker.org";
+ let socialTrackingURL = "social-tracking.example.org/";
+ let emailTrackingURL = "email-tracking.example.org/";
+
+ let annotationUpdate =
+ "n:1000\ni:" +
+ ANNOTATION_TABLE_NAME +
+ "\nad:5\n" +
+ "a:1:32:" +
+ annotationURL1.length +
+ "\n" +
+ annotationURL1 +
+ "\n" +
+ "a:2:32:" +
+ annotationURL2.length +
+ "\n" +
+ annotationURL2 +
+ "\n" +
+ "a:3:32:" +
+ annotationURL3.length +
+ "\n" +
+ annotationURL3 +
+ "\n" +
+ "a:4:32:" +
+ annotationURL4.length +
+ "\n" +
+ annotationURL4 +
+ "\n" +
+ "a:5:32:" +
+ annotationURL5.length +
+ "\n" +
+ annotationURL5 +
+ "\n";
+ let socialAnnotationUpdate =
+ "n:1000\ni:" +
+ SOCIAL_ANNOTATION_TABLE_NAME +
+ "\nad:1\n" +
+ "a:1:32:" +
+ socialTrackingURL.length +
+ "\n" +
+ socialTrackingURL +
+ "\n";
+ let annotationEntitylistUpdate =
+ "n:1000\ni:" +
+ ANNOTATION_ENTITYLIST_TABLE_NAME +
+ "\nad:1\n" +
+ "a:1:32:" +
+ annotationEntitylistedURL.length +
+ "\n" +
+ annotationEntitylistedURL +
+ "\n";
+ let trackingUpdate =
+ "n:1000\ni:" +
+ TRACKING_TABLE_NAME +
+ "\nad:3\n" +
+ "a:1:32:" +
+ trackingURL1.length +
+ "\n" +
+ trackingURL1 +
+ "\n" +
+ "a:2:32:" +
+ trackingURL2.length +
+ "\n" +
+ trackingURL2 +
+ "\n" +
+ "a:3:32:" +
+ trackingURL3.length +
+ "\n" +
+ trackingURL3 +
+ "\n";
+ let socialTrackingUpdate =
+ "n:1000\ni:" +
+ SOCIAL_TRACKING_TABLE_NAME +
+ "\nad:1\n" +
+ "a:1:32:" +
+ socialTrackingURL.length +
+ "\n" +
+ socialTrackingURL +
+ "\n";
+ let emailTrackingUpdate =
+ "n:1000\ni:" +
+ EMAIL_TRACKING_TABLE_NAME +
+ "\nad:1\n" +
+ "a:1:32:" +
+ emailTrackingURL.length +
+ "\n" +
+ emailTrackingURL +
+ "\n";
+ let entitylistUpdate =
+ "n:1000\ni:" +
+ ENTITYLIST_TABLE_NAME +
+ "\nad:1\n" +
+ "a:1:32:" +
+ entitylistedURL.length +
+ "\n" +
+ entitylistedURL +
+ "\n";
+
+ var tables = [
+ {
+ pref: ANNOTATION_TABLE_PREF,
+ name: ANNOTATION_TABLE_NAME,
+ update: annotationUpdate,
+ },
+ {
+ pref: SOCIAL_ANNOTATION_TABLE_PREF,
+ name: SOCIAL_ANNOTATION_TABLE_NAME,
+ update: socialAnnotationUpdate,
+ },
+ {
+ pref: ANNOTATION_ENTITYLIST_TABLE_PREF,
+ name: ANNOTATION_ENTITYLIST_TABLE_NAME,
+ update: annotationEntitylistUpdate,
+ },
+ {
+ pref: TRACKING_TABLE_PREF,
+ name: TRACKING_TABLE_NAME,
+ update: trackingUpdate,
+ },
+ {
+ pref: SOCIAL_TRACKING_TABLE_PREF,
+ name: SOCIAL_TRACKING_TABLE_NAME,
+ update: socialTrackingUpdate,
+ },
+ {
+ pref: EMAIL_TRACKING_TABLE_PREF,
+ name: EMAIL_TRACKING_TABLE_NAME,
+ update: emailTrackingUpdate,
+ },
+ {
+ pref: ENTITYLIST_TABLE_PREF,
+ name: ENTITYLIST_TABLE_NAME,
+ update: entitylistUpdate,
+ },
+ ];
+
+ let tableIndex = 0;
+ let doOneUpdate = () => {
+ if (tableIndex == tables.length) {
+ return Promise.resolve();
+ }
+ return this.useTestDatabase(tables[tableIndex]).then(
+ () => {
+ tableIndex++;
+ return doOneUpdate();
+ },
+ aErrMsg => {
+ dump("Rejected: " + aErrMsg + ". Retry later.\n");
+ return new Promise(resolve => {
+ timer.initWithCallback(resolve, 100, Ci.nsITimer.TYPE_ONE_SHOT);
+ }).then(doOneUpdate);
+ }
+ );
+ };
+
+ return doOneUpdate();
+ },
+
+ cleanupTestTrackers() {
+ Services.prefs.clearUserPref(ANNOTATION_TABLE_PREF);
+ Services.prefs.clearUserPref(SOCIAL_ANNOTATION_TABLE_PREF);
+ Services.prefs.clearUserPref(ANNOTATION_ENTITYLIST_TABLE_PREF);
+ Services.prefs.clearUserPref(TRACKING_TABLE_PREF);
+ Services.prefs.clearUserPref(SOCIAL_TRACKING_TABLE_PREF);
+ Services.prefs.clearUserPref(EMAIL_TRACKING_TABLE_PREF);
+ Services.prefs.clearUserPref(ENTITYLIST_TABLE_PREF);
+ },
+
+ /**
+ * Add some entries to a test tracking protection database, and resets
+ * back to the default database after the test ends.
+ *
+ * @return {Promise}
+ */
+ useTestDatabase(table) {
+ Services.prefs.setCharPref(table.pref, table.name);
+
+ return new Promise((resolve, reject) => {
+ let dbService = Cc["@mozilla.org/url-classifier/dbservice;1"].getService(
+ Ci.nsIUrlClassifierDBService
+ );
+ let listener = {
+ QueryInterface: iid => {
+ if (
+ iid.equals(Ci.nsISupports) ||
+ iid.equals(Ci.nsIUrlClassifierUpdateObserver)
+ ) {
+ return listener;
+ }
+
+ throw Components.Exception("", Cr.NS_ERROR_NO_INTERFACE);
+ },
+ updateUrlRequested: url => {},
+ streamFinished: status => {},
+ updateError: errorCode => {
+ reject("Got updateError when updating " + table.name);
+ },
+ updateSuccess: requestedTimeout => {
+ resolve();
+ },
+ };
+
+ try {
+ dbService.beginUpdate(listener, table.name, "");
+ dbService.beginStream("", "");
+ dbService.updateStream(table.update);
+ dbService.finishStream();
+ dbService.finishUpdate();
+ } catch (e) {
+ reject("Failed to update with dbService: " + table.name);
+ }
+ });
+ },
+
+ /**
+ * Handle the next "urlclassifier-before-block-channel" event.
+ * @param {Object} options
+ * @param {String} [options.filterOrigin] - Only handle event for channels
+ * with matching origin.
+ * @param {function} [options.onBeforeBlockChannel] - Optional callback for
+ * the event. Called before acting on the channel.
+ * @param {("allow"|"replace")} [options.action] - Whether to allow or replace
+ * the channel.
+ * @returns {Promise} - Resolves once event has been handled.
+ */
+ handleBeforeBlockChannel({
+ filterOrigin = null,
+ onBeforeBlockChannel,
+ action,
+ }) {
+ if (action && action != "allow" && action != "replace") {
+ throw new Error("Invalid action " + action);
+ }
+ let channelClassifierService = Cc[
+ "@mozilla.org/url-classifier/channel-classifier-service;1"
+ ].getService(Ci.nsIChannelClassifierService);
+
+ let resolver;
+ let promise = new Promise(resolve => {
+ resolver = resolve;
+ });
+
+ let observer = {
+ observe(subject, topic) {
+ if (topic != "urlclassifier-before-block-channel") {
+ return;
+ }
+ let channel = subject.QueryInterface(Ci.nsIUrlClassifierBlockedChannel);
+
+ if (filterOrigin) {
+ let { url } = channel;
+ let { origin } = new URL(url);
+ if (filterOrigin != origin) {
+ return;
+ }
+ }
+
+ if (onBeforeBlockChannel) {
+ onBeforeBlockChannel(channel);
+ }
+ if (action) {
+ channel[action]();
+ }
+
+ channelClassifierService.removeListener(observer);
+ resolver();
+ },
+ };
+ channelClassifierService.addListener(observer);
+ return promise;
+ },
+};
diff --git a/toolkit/components/url-classifier/tests/browser/browser.toml b/toolkit/components/url-classifier/tests/browser/browser.toml
new file mode 100644
index 0000000000..cd6be59720
--- /dev/null
+++ b/toolkit/components/url-classifier/tests/browser/browser.toml
@@ -0,0 +1,7 @@
+[DEFAULT]
+support-files = [
+ "page.html",
+ "raptor.jpg",
+]
+
+["browser_emailtracking_telemetry.js"]
diff --git a/toolkit/components/url-classifier/tests/browser/browser_emailtracking_telemetry.js b/toolkit/components/url-classifier/tests/browser/browser_emailtracking_telemetry.js
new file mode 100644
index 0000000000..086ca49afe
--- /dev/null
+++ b/toolkit/components/url-classifier/tests/browser/browser_emailtracking_telemetry.js
@@ -0,0 +1,423 @@
+/* vim: set ts=2 et sw=2 tw=80: */
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+"use strict";
+
+let { UrlClassifierTestUtils } = ChromeUtils.importESModule(
+ "resource://testing-common/UrlClassifierTestUtils.sys.mjs"
+);
+
+const TEST_DOMAIN = "https://example.com/";
+const TEST_EMAIL_WEBAPP_DOMAIN = "https://test1.example.com/";
+const EMAIL_TRACKER_DOMAIN = "https://email-tracking.example.org/";
+const TEST_PATH = "browser/toolkit/components/url-classifier/tests/browser/";
+
+const TEST_PAGE = TEST_DOMAIN + TEST_PATH + "page.html";
+const TEST_EMAIL_WEBAPP_PAGE =
+ TEST_EMAIL_WEBAPP_DOMAIN + TEST_PATH + "page.html";
+
+const EMAIL_TRACKER_PAGE = EMAIL_TRACKER_DOMAIN + TEST_PATH + "page.html";
+const EMAIL_TRACKER_IMAGE = EMAIL_TRACKER_DOMAIN + TEST_PATH + "raptor.jpg";
+
+const TELEMETRY_EMAIL_TRACKER_COUNT = "EMAIL_TRACKER_COUNT";
+const TELEMETRY_EMAIL_TRACKER_EMBEDDED_PER_TAB =
+ "EMAIL_TRACKER_EMBEDDED_PER_TAB";
+
+const LABEL_BASE_NORMAL = 0;
+const LABEL_CONTENT_NORMAL = 1;
+const LABEL_BASE_EMAIL_WEBAPP = 2;
+const LABEL_CONTENT_EMAIL_WEBAPP = 3;
+
+const KEY_BASE_NORMAL = "base_normal";
+const KEY_CONTENT_NORMAL = "content_normal";
+const KEY_ALL_NORMAL = "all_normal";
+const KEY_BASE_EMAILAPP = "base_emailapp";
+const KEY_CONTENT_EMAILAPP = "content_emailapp";
+const KEY_ALL_EMAILAPP = "all_emailapp";
+
+async function clearTelemetry() {
+ Services.telemetry.getSnapshotForHistograms("main", true /* clear */);
+ Services.telemetry.getHistogramById(TELEMETRY_EMAIL_TRACKER_COUNT).clear();
+ Services.telemetry
+ .getKeyedHistogramById(TELEMETRY_EMAIL_TRACKER_EMBEDDED_PER_TAB)
+ .clear();
+}
+
+async function loadImage(browser, url) {
+ return SpecialPowers.spawn(browser, [url], page => {
+ return new Promise(resolve => {
+ let image = new content.Image();
+ image.src = page + "?" + Math.random();
+ image.onload = _ => resolve(true);
+ image.onerror = _ => resolve(false);
+ });
+ });
+}
+
+async function getTelemetryProbe(key, label, checkCntFn) {
+ let histogram;
+
+ // Wait until the telemetry probe appears.
+ await TestUtils.waitForCondition(() => {
+ let histograms = Services.telemetry.getSnapshotForHistograms(
+ "main",
+ false /* clear */
+ ).parent;
+
+ histogram = histograms[key];
+
+ let checkRes = false;
+
+ if (histogram) {
+ checkRes = checkCntFn ? checkCntFn(histogram.values[label]) : true;
+ }
+
+ return checkRes;
+ });
+
+ return histogram.values[label] || 0;
+}
+
+async function getKeyedHistogram(histogram_id, key, bucket, checkCntFn) {
+ let histogram;
+
+ // Wait until the telemetry probe appears.
+ await TestUtils.waitForCondition(() => {
+ let histograms = Services.telemetry.getSnapshotForKeyedHistograms(
+ "main",
+ false /* clear */
+ ).parent;
+
+ histogram = histograms[histogram_id];
+
+ let checkRes = false;
+
+ if (histogram && histogram[key]) {
+ checkRes = checkCntFn ? checkCntFn(histogram[key].values[bucket]) : true;
+ }
+
+ return checkRes;
+ });
+
+ return histogram[key].values[bucket] || 0;
+}
+
+async function checkTelemetryProbe(key, label, expectedCnt) {
+ let cnt = await getTelemetryProbe(key, label, cnt => {
+ if (cnt === undefined) {
+ cnt = 0;
+ }
+
+ return cnt == expectedCnt;
+ });
+
+ is(cnt, expectedCnt, "There should be expected count in telemetry.");
+}
+
+async function checkKeyedHistogram(histogram_id, key, bucket, expectedCnt) {
+ let cnt = await getKeyedHistogram(histogram_id, key, bucket, cnt => {
+ if (cnt === undefined) {
+ cnt = 0;
+ }
+
+ return cnt == expectedCnt;
+ });
+
+ is(cnt, expectedCnt, "There should be expected count in keyed telemetry.");
+}
+
+function checkNoTelemetryProbe(key) {
+ let histograms = Services.telemetry.getSnapshotForHistograms(
+ "main",
+ false /* clear */
+ ).parent;
+
+ let histogram = histograms[key];
+
+ ok(!histogram, `No Telemetry has been recorded for ${key}`);
+}
+
+add_setup(async function () {
+ await SpecialPowers.pushPrefEnv({
+ set: [
+ [
+ "urlclassifier.features.emailtracking.datacollection.blocklistTables",
+ "mochitest5-track-simple",
+ ],
+ [
+ "urlclassifier.features.emailtracking.datacollection.allowlistTables",
+ "",
+ ],
+ [
+ "urlclassifier.features.emailtracking.blocklistTables",
+ "mochitest5-track-simple",
+ ],
+ ["urlclassifier.features.emailtracking.allowlistTables", ""],
+ ["privacy.trackingprotection.enabled", false],
+ ["privacy.trackingprotection.annotate_channels", false],
+ ["privacy.trackingprotection.cryptomining.enabled", false],
+ ["privacy.trackingprotection.emailtracking.enabled", true],
+ [
+ "privacy.trackingprotection.emailtracking.data_collection.enabled",
+ true,
+ ],
+ ["privacy.trackingprotection.fingerprinting.enabled", false],
+ ["privacy.trackingprotection.socialtracking.enabled", false],
+ [
+ "privacy.trackingprotection.emailtracking.webapp.domains",
+ "test1.example.com",
+ ],
+ ],
+ });
+
+ await UrlClassifierTestUtils.addTestTrackers();
+
+ registerCleanupFunction(function () {
+ UrlClassifierTestUtils.cleanupTestTrackers();
+ });
+
+ await clearTelemetry();
+});
+
+add_task(async function test_email_tracking_telemetry() {
+ // Open a non email webapp tab.
+ await BrowserTestUtils.withNewTab(TEST_PAGE, async browser => {
+ // Load a image from the email tracker
+ let res = await loadImage(browser, EMAIL_TRACKER_IMAGE);
+
+ is(res, false, "The image is blocked.");
+
+ // Verify the telemetry of the email tracker count.
+ await checkTelemetryProbe(
+ TELEMETRY_EMAIL_TRACKER_COUNT,
+ LABEL_BASE_NORMAL,
+ 1
+ );
+ await checkTelemetryProbe(
+ TELEMETRY_EMAIL_TRACKER_COUNT,
+ LABEL_CONTENT_NORMAL,
+ 0
+ );
+ await checkTelemetryProbe(
+ TELEMETRY_EMAIL_TRACKER_COUNT,
+ LABEL_BASE_EMAIL_WEBAPP,
+ 0
+ );
+ await checkTelemetryProbe(
+ TELEMETRY_EMAIL_TRACKER_COUNT,
+ LABEL_CONTENT_EMAIL_WEBAPP,
+ 0
+ );
+ });
+
+ // Open an email webapp tab.
+ await BrowserTestUtils.withNewTab(TEST_EMAIL_WEBAPP_PAGE, async browser => {
+ // Load a image from the email tracker
+ let res = await loadImage(browser, EMAIL_TRACKER_IMAGE);
+
+ is(res, false, "The image is blocked.");
+
+ // Verify the telemetry of the email tracker count.
+ await checkTelemetryProbe(
+ TELEMETRY_EMAIL_TRACKER_COUNT,
+ LABEL_BASE_NORMAL,
+ 1
+ );
+ await checkTelemetryProbe(
+ TELEMETRY_EMAIL_TRACKER_COUNT,
+ LABEL_CONTENT_NORMAL,
+ 0
+ );
+ await checkTelemetryProbe(
+ TELEMETRY_EMAIL_TRACKER_COUNT,
+ LABEL_BASE_EMAIL_WEBAPP,
+ 1
+ );
+ await checkTelemetryProbe(
+ TELEMETRY_EMAIL_TRACKER_COUNT,
+ LABEL_CONTENT_EMAIL_WEBAPP,
+ 0
+ );
+ });
+ // Make sure the tab was closed properly before clearing Telemetry.
+ await BrowserUtils.promiseObserved("window-global-destroyed");
+
+ await clearTelemetry();
+});
+
+add_task(async function test_no_telemetry_for_first_party_email_tracker() {
+ // Open a email tracker tab.
+ await BrowserTestUtils.withNewTab(EMAIL_TRACKER_PAGE, async browser => {
+ // Load a image from the first-party email tracker
+ let res = await loadImage(browser, EMAIL_TRACKER_IMAGE);
+
+ is(res, true, "The image is loaded.");
+
+ // Verify that there was no telemetry recorded.
+ checkNoTelemetryProbe(TELEMETRY_EMAIL_TRACKER_COUNT);
+ });
+ // Make sure the tab was closed properly before clearing Telemetry.
+ await BrowserUtils.promiseObserved("window-global-destroyed");
+
+ await clearTelemetry();
+});
+
+add_task(async function test_disable_email_data_collection() {
+ // Disable Email Tracking Data Collection.
+ await SpecialPowers.pushPrefEnv({
+ set: [
+ [
+ "privacy.trackingprotection.emailtracking.data_collection.enabled",
+ false,
+ ],
+ ],
+ });
+
+ // Open an email webapp tab.
+ await BrowserTestUtils.withNewTab(TEST_EMAIL_WEBAPP_PAGE, async browser => {
+ // Load a image from the email tracker
+ let res = await loadImage(browser, EMAIL_TRACKER_IMAGE);
+
+ is(res, false, "The image is blocked.");
+
+ // Verify that there was no telemetry recorded.
+ checkNoTelemetryProbe(TELEMETRY_EMAIL_TRACKER_COUNT);
+ });
+ // Make sure the tab was closed properly before clearing Telemetry.
+ await BrowserUtils.promiseObserved("window-global-destroyed");
+
+ await SpecialPowers.popPrefEnv();
+ await clearTelemetry();
+});
+
+add_task(async function test_email_tracker_embedded_telemetry() {
+ // First, we open a page without loading any email trackers.
+ await BrowserTestUtils.withNewTab(TEST_PAGE, async _ => {});
+ // Make sure the tab was closed properly before checking Telemetry.
+ await BrowserUtils.promiseObserved("window-global-destroyed");
+
+ // Check that the telemetry has been record properly for normal page. The
+ // telemetry should show there was no email tracker loaded.
+ await checkKeyedHistogram(
+ TELEMETRY_EMAIL_TRACKER_EMBEDDED_PER_TAB,
+ KEY_BASE_NORMAL,
+ 0,
+ 1
+ );
+ await checkKeyedHistogram(
+ TELEMETRY_EMAIL_TRACKER_EMBEDDED_PER_TAB,
+ KEY_CONTENT_NORMAL,
+ 0,
+ 1
+ );
+ await checkKeyedHistogram(
+ TELEMETRY_EMAIL_TRACKER_EMBEDDED_PER_TAB,
+ KEY_ALL_NORMAL,
+ 0,
+ 1
+ );
+
+ // Second, Open a email webapp tab that doesn't a load email tracker.
+ await BrowserTestUtils.withNewTab(TEST_EMAIL_WEBAPP_PAGE, async _ => {});
+ // Make sure the tab was closed properly before checking Telemetry.
+ await BrowserUtils.promiseObserved("window-global-destroyed");
+
+ // Check that the telemetry has been record properly for the email webapp. The
+ // telemetry should show there was no email tracker loaded.
+ await checkKeyedHistogram(
+ TELEMETRY_EMAIL_TRACKER_EMBEDDED_PER_TAB,
+ KEY_BASE_EMAILAPP,
+ 0,
+ 1
+ );
+ await checkKeyedHistogram(
+ TELEMETRY_EMAIL_TRACKER_EMBEDDED_PER_TAB,
+ KEY_CONTENT_EMAILAPP,
+ 0,
+ 1
+ );
+ await checkKeyedHistogram(
+ TELEMETRY_EMAIL_TRACKER_EMBEDDED_PER_TAB,
+ KEY_ALL_EMAILAPP,
+ 0,
+ 1
+ );
+
+ // Third, open a page with one email tracker loaded.
+ await BrowserTestUtils.withNewTab(TEST_PAGE, async browser => {
+ // Load a image from the email tracker
+ let res = await loadImage(browser, EMAIL_TRACKER_IMAGE);
+
+ is(res, false, "The image is blocked.");
+ });
+ // Make sure the tab was closed properly before checking Telemetry.
+ await BrowserUtils.promiseObserved("window-global-destroyed");
+
+ // Verify that the telemetry has been record properly, The telemetry should
+ // show there was one base email tracker loaded.
+ await checkKeyedHistogram(
+ TELEMETRY_EMAIL_TRACKER_EMBEDDED_PER_TAB,
+ KEY_BASE_NORMAL,
+ 1,
+ 1
+ );
+ await checkKeyedHistogram(
+ TELEMETRY_EMAIL_TRACKER_EMBEDDED_PER_TAB,
+ KEY_CONTENT_NORMAL,
+ 0,
+ 2
+ );
+ await checkKeyedHistogram(
+ TELEMETRY_EMAIL_TRACKER_EMBEDDED_PER_TAB,
+ KEY_ALL_NORMAL,
+ 0,
+ 1
+ );
+ await checkKeyedHistogram(
+ TELEMETRY_EMAIL_TRACKER_EMBEDDED_PER_TAB,
+ KEY_ALL_NORMAL,
+ 1,
+ 1
+ );
+
+ // Open a page and load the same email tracker multiple times. There
+ // should be only one count for the same tracker.
+ await BrowserTestUtils.withNewTab(TEST_PAGE, async browser => {
+ // Load a image from the email tracker two times.
+ await loadImage(browser, EMAIL_TRACKER_IMAGE);
+ await loadImage(browser, EMAIL_TRACKER_IMAGE);
+ });
+ // Make sure the tab was closed properly before checking Telemetry.
+ await BrowserUtils.promiseObserved("window-global-destroyed");
+
+ // Verify that there is still only one count when loading the same tracker
+ // multiple times.
+ await checkKeyedHistogram(
+ TELEMETRY_EMAIL_TRACKER_EMBEDDED_PER_TAB,
+ KEY_BASE_NORMAL,
+ 1,
+ 2
+ );
+ await checkKeyedHistogram(
+ TELEMETRY_EMAIL_TRACKER_EMBEDDED_PER_TAB,
+ KEY_CONTENT_NORMAL,
+ 0,
+ 3
+ );
+ await checkKeyedHistogram(
+ TELEMETRY_EMAIL_TRACKER_EMBEDDED_PER_TAB,
+ KEY_ALL_NORMAL,
+ 0,
+ 1
+ );
+ await checkKeyedHistogram(
+ TELEMETRY_EMAIL_TRACKER_EMBEDDED_PER_TAB,
+ KEY_ALL_NORMAL,
+ 1,
+ 2
+ );
+
+ await clearTelemetry();
+});
diff --git a/toolkit/components/url-classifier/tests/browser/page.html b/toolkit/components/url-classifier/tests/browser/page.html
new file mode 100644
index 0000000000..a99e8be179
--- /dev/null
+++ b/toolkit/components/url-classifier/tests/browser/page.html
@@ -0,0 +1,8 @@
+<html>
+<head>
+ <title>Just a top-level page</title>
+</head>
+<body>
+ <h1>This is the top-level page</h1>
+</body>
+</html>
diff --git a/toolkit/components/url-classifier/tests/browser/raptor.jpg b/toolkit/components/url-classifier/tests/browser/raptor.jpg
new file mode 100644
index 0000000000..243ba9e2d4
--- /dev/null
+++ b/toolkit/components/url-classifier/tests/browser/raptor.jpg
Binary files differ
diff --git a/toolkit/components/url-classifier/tests/gtest/Common.cpp b/toolkit/components/url-classifier/tests/gtest/Common.cpp
new file mode 100644
index 0000000000..172ded051f
--- /dev/null
+++ b/toolkit/components/url-classifier/tests/gtest/Common.cpp
@@ -0,0 +1,219 @@
+/* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+#include "Common.h"
+
+#include "Classifier.h"
+#include "HashStore.h"
+#include "mozilla/Components.h"
+#include "mozilla/SpinEventLoopUntil.h"
+#include "mozilla/Unused.h"
+#include "nsAppDirectoryServiceDefs.h"
+#include "nsIThread.h"
+#include "nsTArray.h"
+#include "nsThreadUtils.h"
+#include "nsUrlClassifierUtils.h"
+
+using namespace mozilla;
+using namespace mozilla::safebrowsing;
+
+nsresult SyncApplyUpdates(TableUpdateArray& aUpdates) {
+ // We need to spin a new thread specifically because the callback
+ // will be on the caller thread. If we call Classifier::AsyncApplyUpdates
+ // and wait on the same thread, this function will never return.
+
+ nsresult ret = NS_ERROR_FAILURE;
+ bool done = false;
+ auto onUpdateComplete = [&done, &ret](nsresult rv) {
+ // We are on the "ApplyUpdate" thread. Post an event to main thread
+ // so that we can avoid busy waiting on the main thread.
+ nsCOMPtr<nsIRunnable> r =
+ NS_NewRunnableFunction("SyncApplyUpdates", [&done, &ret, rv] {
+ ret = rv;
+ done = true;
+ });
+ NS_DispatchToMainThread(r);
+ };
+
+ nsCOMPtr<nsIFile> file;
+ NS_GetSpecialDirectory(NS_APP_USER_PROFILE_50_DIR, getter_AddRefs(file));
+
+ nsCOMPtr<nsIRunnable> r = NS_NewRunnableFunction("SyncApplyUpdates", [&]() {
+ RefPtr<Classifier> classifier = new Classifier();
+ classifier->Open(*file);
+
+ nsresult rv = classifier->AsyncApplyUpdates(aUpdates, onUpdateComplete);
+ if (NS_FAILED(rv)) {
+ onUpdateComplete(rv);
+ }
+ });
+
+ nsCOMPtr<nsIThread> testingThread;
+ NS_NewNamedThread("ApplyUpdates", getter_AddRefs(testingThread));
+ if (!testingThread) {
+ return NS_ERROR_FAILURE;
+ }
+
+ testingThread->Dispatch(r, NS_DISPATCH_NORMAL);
+
+ // NS_NewCheckSummedOutputStream in HashStore::WriteFile
+ // will synchronously init NS_CRYPTO_HASH_CONTRACTID on
+ // the main thread. As a result we have to keep processing
+ // pending event until |done| becomes true. If there's no
+ // more pending event, what we only can do is wait.
+ MOZ_ALWAYS_TRUE(SpinEventLoopUntil("url-classifier:SyncApplyUpdates"_ns,
+ [&]() { return done; }));
+
+ return ret;
+}
+
+already_AddRefed<nsIFile> GetFile(const nsTArray<nsString>& path) {
+ nsCOMPtr<nsIFile> file;
+ nsresult rv =
+ NS_GetSpecialDirectory(NS_APP_USER_PROFILE_50_DIR, getter_AddRefs(file));
+ if (NS_WARN_IF(NS_FAILED(rv))) {
+ return nullptr;
+ }
+
+ for (uint32_t i = 0; i < path.Length(); i++) {
+ file->Append(path[i]);
+ }
+ return file.forget();
+}
+
+void ApplyUpdate(TableUpdateArray& updates) {
+ // Force nsUrlClassifierUtils loading on main thread
+ // because nsIUrlClassifierDBService will not run in advance
+ // in gtest.
+ nsUrlClassifierUtils::GetInstance();
+
+ SyncApplyUpdates(updates);
+}
+
+void ApplyUpdate(TableUpdate* update) {
+ TableUpdateArray updates = {update};
+ ApplyUpdate(updates);
+}
+
+nsresult PrefixArrayToPrefixStringMap(const _PrefixArray& aPrefixArray,
+ PrefixStringMap& aOut) {
+ aOut.Clear();
+
+ // Buckets are keyed by prefix length and contain an array of
+ // all prefixes of that length.
+ nsClassHashtable<nsUint32HashKey, _PrefixArray> table;
+ for (const auto& prefix : aPrefixArray) {
+ _PrefixArray* array = table.GetOrInsertNew(prefix.Length());
+ array->AppendElement(prefix);
+ }
+
+ // The resulting map entries will be a concatenation of all
+ // prefix data for the prefixes of a given size.
+ for (const auto& entry : table) {
+ uint32_t size = entry.GetKey();
+ uint32_t count = entry.GetData()->Length();
+
+ auto str = MakeUnique<_Prefix>();
+ str->SetLength(size * count);
+
+ char* dst = str->BeginWriting();
+
+ entry.GetData()->Sort();
+ for (uint32_t i = 0; i < count; i++) {
+ memcpy(dst, entry.GetData()->ElementAt(i).get(), size);
+ dst += size;
+ }
+
+ aOut.InsertOrUpdate(size, std::move(str));
+ }
+
+ return NS_OK;
+}
+
+nsresult PrefixArrayToAddPrefixArray(const _PrefixArray& aPrefixArray,
+ AddPrefixArray& aOut) {
+ aOut.Clear();
+
+ for (const auto& prefix : aPrefixArray) {
+ // Create prefix hash from string
+ AddPrefix* add = aOut.AppendElement(fallible);
+ if (!add) {
+ return NS_ERROR_OUT_OF_MEMORY;
+ }
+
+ add->addChunk = 1;
+ add->prefix.Assign(prefix);
+ }
+
+ EntrySort(aOut);
+
+ return NS_OK;
+}
+
+_Prefix CreatePrefixFromURL(const char* aURL, uint8_t aPrefixSize) {
+ return CreatePrefixFromURL(nsCString(aURL), aPrefixSize);
+}
+
+_Prefix CreatePrefixFromURL(const nsCString& aURL, uint8_t aPrefixSize) {
+ Completion complete;
+ complete.FromPlaintext(aURL);
+
+ _Prefix prefix;
+ prefix.Assign((const char*)complete.buf, aPrefixSize);
+ return prefix;
+}
+
+void CheckContent(LookupCacheV4* aCache, const _PrefixArray& aPrefixArray) {
+ PrefixStringMap vlPSetMap;
+ aCache->GetPrefixes(vlPSetMap);
+
+ PrefixStringMap expected;
+ PrefixArrayToPrefixStringMap(aPrefixArray, expected);
+
+ for (const auto& entry : vlPSetMap) {
+ nsCString* expectedPrefix = expected.Get(entry.GetKey());
+ nsCString* resultPrefix = entry.GetWeak();
+
+ ASSERT_TRUE(resultPrefix->Equals(*expectedPrefix));
+ }
+}
+
+nsresult BuildLookupCache(const RefPtr<Classifier>& classifier,
+ const nsACString& aTable,
+ _PrefixArray& aPrefixArray) {
+ RefPtr<LookupCache> cache = classifier->GetLookupCache(aTable, false);
+ if (!cache) {
+ return NS_ERROR_FAILURE;
+ }
+
+ if (LookupCache::Cast<LookupCacheV4>(cache)) {
+ // V4
+ RefPtr<LookupCacheV4> cacheV4 = LookupCache::Cast<LookupCacheV4>(cache);
+
+ PrefixStringMap map;
+ PrefixArrayToPrefixStringMap(aPrefixArray, map);
+ return cacheV4->Build(map);
+ } else {
+ // V2
+ RefPtr<LookupCacheV2> cacheV2 = LookupCache::Cast<LookupCacheV2>(cache);
+
+ AddPrefixArray addPrefixes;
+ AddCompleteArray addComples;
+
+ PrefixArrayToAddPrefixArray(aPrefixArray, addPrefixes);
+ return cacheV2->Build(addPrefixes, addComples);
+ }
+}
+
+RefPtr<Classifier> GetClassifier() {
+ nsCOMPtr<nsIFile> file;
+ NS_GetSpecialDirectory(NS_APP_USER_PROFILE_50_DIR, getter_AddRefs(file));
+
+ RefPtr<Classifier> classifier = new Classifier();
+ nsresult rv = classifier->Open(*file);
+ EXPECT_TRUE(rv == NS_OK);
+
+ return classifier;
+}
diff --git a/toolkit/components/url-classifier/tests/gtest/Common.h b/toolkit/components/url-classifier/tests/gtest/Common.h
new file mode 100644
index 0000000000..9c196abafa
--- /dev/null
+++ b/toolkit/components/url-classifier/tests/gtest/Common.h
@@ -0,0 +1,147 @@
+/* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+#ifndef nsUrlClassifierGTestCommon_h__
+#define nsUrlClassifierGTestCommon_h__
+
+#include "Entries.h"
+#include "nsAppDirectoryServiceDefs.h"
+#include "nsIFile.h"
+#include "nsTArray.h"
+#include "nsIThread.h"
+#include "nsThreadUtils.h"
+#include "HashStore.h"
+
+#include "gtest/gtest.h"
+#include "mozilla/gtest/MozAssertions.h"
+#include "LookupCacheV4.h"
+
+using namespace mozilla::safebrowsing;
+
+namespace mozilla {
+namespace safebrowsing {
+class Classifier;
+class LookupCacheV4;
+class TableUpdate;
+} // namespace safebrowsing
+} // namespace mozilla
+
+#define GTEST_SAFEBROWSING_DIR "safebrowsing"_ns
+#define GTEST_TABLE_V4 "gtest-malware-proto"_ns
+#define GTEST_TABLE_V2 "gtest-malware-simple"_ns
+
+template <typename Function>
+void RunTestInNewThread(Function&& aFunction) {
+ nsCOMPtr<nsIRunnable> r = NS_NewRunnableFunction(
+ "RunTestInNewThread", std::forward<Function>(aFunction));
+ nsCOMPtr<nsIThread> testingThread;
+ nsresult rv =
+ NS_NewNamedThread("Testing Thread", getter_AddRefs(testingThread), r);
+ ASSERT_EQ(rv, NS_OK);
+ testingThread->Shutdown();
+}
+
+// Synchronously apply updates by calling Classifier::AsyncApplyUpdates.
+nsresult SyncApplyUpdates(Classifier* aClassifier,
+ nsTArray<TableUpdate*>* aUpdates);
+nsresult SyncApplyUpdates(TableUpdateArray& aUpdates);
+
+// Return nsIFile with root directory - NS_APP_USER_PROFILE_50_DIR
+// Sub-directories are passed in path argument.
+already_AddRefed<nsIFile> GetFile(const nsTArray<nsString>& aPath);
+
+// ApplyUpdate will call |ApplyUpdates| of Classifier within a new thread
+void ApplyUpdate(nsTArray<TableUpdate*>& aUpdates);
+
+void ApplyUpdate(TableUpdate* aUpdate);
+
+/**
+ * Prefix processing utility functions
+ */
+
+typedef nsCString _Prefix;
+typedef nsTArray<nsCString> _PrefixArray;
+
+// This function converts a lexigraphic-sorted prefixes array
+// to a hash table keyed by prefix size(PrefixStringMap is defined in Entries.h)
+nsresult PrefixArrayToPrefixStringMap(const _PrefixArray& aPrefixArray,
+ PrefixStringMap& aOut);
+
+// This function converts a lexigraphic-sorted prefixes array
+// to an array containing AddPrefix(AddPrefix is defined in Entries.h)
+nsresult PrefixArrayToAddPrefixArray(const _PrefixArray& aPrefixArray,
+ AddPrefixArray& aOut);
+
+_Prefix CreatePrefixFromURL(const char* aURL, uint8_t aPrefixSize);
+
+_Prefix CreatePrefixFromURL(const nsCString& aURL, uint8_t aPrefixSize);
+
+// To test if the content is equal
+void CheckContent(LookupCacheV4* cache, const _PrefixArray& aPrefixArray);
+
+/**
+ * Utility function to generate safebrowsing internal structure
+ */
+
+static inline nsresult BuildCache(LookupCacheV2* cache,
+ const _PrefixArray& aPrefixArray) {
+ AddPrefixArray prefixes;
+ AddCompleteArray completions;
+ nsresult rv = PrefixArrayToAddPrefixArray(aPrefixArray, prefixes);
+ if (NS_FAILED(rv)) {
+ return rv;
+ }
+
+ return cache->Build(prefixes, completions);
+}
+
+static inline nsresult BuildCache(LookupCacheV4* cache,
+ const _PrefixArray& aPrefixArray) {
+ PrefixStringMap map;
+ PrefixArrayToPrefixStringMap(aPrefixArray, map);
+ return cache->Build(map);
+}
+
+// Create a LookupCacheV4 object with sepecified prefix array.
+template <typename T>
+RefPtr<T> SetupLookupCache(const _PrefixArray& aPrefixArray,
+ nsCOMPtr<nsIFile>& aFile) {
+ RefPtr<T> cache = new T(GTEST_TABLE_V4, ""_ns, aFile);
+
+ nsresult rv = cache->Init();
+ EXPECT_EQ(rv, NS_OK);
+
+ rv = BuildCache(cache, aPrefixArray);
+ EXPECT_EQ(rv, NS_OK);
+
+ return cache;
+}
+
+template <typename T>
+RefPtr<T> SetupLookupCache(const _PrefixArray& aPrefixArray) {
+ nsCOMPtr<nsIFile> file;
+ NS_GetSpecialDirectory(NS_APP_USER_PROFILE_50_DIR, getter_AddRefs(file));
+
+ file->AppendNative(GTEST_SAFEBROWSING_DIR);
+
+ RefPtr<T> cache = new T(GTEST_TABLE_V4, ""_ns, file);
+ nsresult rv = cache->Init();
+ EXPECT_EQ(rv, NS_OK);
+
+ rv = BuildCache(cache, aPrefixArray);
+ EXPECT_EQ(rv, NS_OK);
+
+ return cache;
+}
+
+/**
+ * Retrieve Classifer class
+ */
+RefPtr<Classifier> GetClassifier();
+
+nsresult BuildLookupCache(const RefPtr<Classifier>& aClassifier,
+ const nsACString& aTable, _PrefixArray& aPrefixArray);
+
+#endif // nsUrlClassifierGTestCommon_h__
diff --git a/toolkit/components/url-classifier/tests/gtest/TestCaching.cpp b/toolkit/components/url-classifier/tests/gtest/TestCaching.cpp
new file mode 100644
index 0000000000..520eb35dcb
--- /dev/null
+++ b/toolkit/components/url-classifier/tests/gtest/TestCaching.cpp
@@ -0,0 +1,271 @@
+/* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+#include "Common.h"
+#include "LookupCacheV4.h"
+
+#define EXPIRED_TIME_SEC (PR_Now() / PR_USEC_PER_SEC - 3600)
+#define NOTEXPIRED_TIME_SEC (PR_Now() / PR_USEC_PER_SEC + 3600)
+
+#define CACHED_URL "cache.com/"_ns
+#define NEG_CACHE_EXPIRED_URL "cache.negExpired.com/"_ns
+#define POS_CACHE_EXPIRED_URL "cache.posExpired.com/"_ns
+#define BOTH_CACHE_EXPIRED_URL "cache.negAndposExpired.com/"_ns
+
+static void SetupCacheEntry(LookupCacheV2* aLookupCache,
+ const nsCString& aCompletion,
+ bool aNegExpired = false,
+ bool aPosExpired = false) {
+ AddCompleteArray completes;
+ AddCompleteArray emptyCompletes;
+ MissPrefixArray misses;
+ MissPrefixArray emptyMisses;
+
+ AddComplete* add = completes.AppendElement(mozilla::fallible);
+ add->complete.FromPlaintext(aCompletion);
+
+ Prefix* prefix = misses.AppendElement(mozilla::fallible);
+ prefix->FromPlaintext(aCompletion);
+
+ // Setup positive cache first otherwise negative cache expiry will be
+ // overwritten.
+ int64_t posExpirySec = aPosExpired ? EXPIRED_TIME_SEC : NOTEXPIRED_TIME_SEC;
+ aLookupCache->AddGethashResultToCache(completes, emptyMisses, posExpirySec);
+
+ int64_t negExpirySec = aNegExpired ? EXPIRED_TIME_SEC : NOTEXPIRED_TIME_SEC;
+ aLookupCache->AddGethashResultToCache(emptyCompletes, misses, negExpirySec);
+}
+
+static void SetupCacheEntry(LookupCacheV4* aLookupCache,
+ const nsCString& aCompletion,
+ bool aNegExpired = false,
+ bool aPosExpired = false) {
+ FullHashResponseMap map;
+
+ Prefix prefix;
+ prefix.FromPlaintext(aCompletion);
+
+ CachedFullHashResponse* response = map.GetOrInsertNew(prefix.ToUint32());
+
+ response->negativeCacheExpirySec =
+ aNegExpired ? EXPIRED_TIME_SEC : NOTEXPIRED_TIME_SEC;
+ response->fullHashes.InsertOrUpdate(
+ CreatePrefixFromURL(aCompletion, COMPLETE_SIZE),
+ aPosExpired ? EXPIRED_TIME_SEC : NOTEXPIRED_TIME_SEC);
+
+ aLookupCache->AddFullHashResponseToCache(map);
+}
+
+template <typename T>
+static void TestCache(const Completion aCompletion, bool aExpectedHas,
+ bool aExpectedConfirmed, bool aExpectedInCache,
+ T* aCache = nullptr) {
+ bool has, inCache, confirmed;
+ uint32_t matchLength;
+
+ if (aCache) {
+ aCache->Has(aCompletion, &has, &matchLength, &confirmed);
+ inCache = aCache->IsInCache(aCompletion.ToUint32());
+ } else {
+ _PrefixArray array = {CreatePrefixFromURL("cache.notexpired.com/", 10),
+ CreatePrefixFromURL("cache.expired.com/", 8),
+ CreatePrefixFromURL("gound.com/", 5),
+ CreatePrefixFromURL("small.com/", 4)};
+
+ RefPtr<T> cache = SetupLookupCache<T>(array);
+
+ // Create an expired entry and a non-expired entry
+ SetupCacheEntry(cache, "cache.notexpired.com/"_ns);
+ SetupCacheEntry(cache, "cache.expired.com/"_ns, true, true);
+
+ cache->Has(aCompletion, &has, &matchLength, &confirmed);
+ inCache = cache->IsInCache(aCompletion.ToUint32());
+ }
+
+ EXPECT_EQ(has, aExpectedHas);
+ EXPECT_EQ(confirmed, aExpectedConfirmed);
+ EXPECT_EQ(inCache, aExpectedInCache);
+}
+
+template <typename T>
+static void TestCache(const nsCString& aURL, bool aExpectedHas,
+ bool aExpectedConfirmed, bool aExpectedInCache,
+ T* aCache = nullptr) {
+ Completion lookupHash;
+ lookupHash.FromPlaintext(aURL);
+
+ TestCache<T>(lookupHash, aExpectedHas, aExpectedConfirmed, aExpectedInCache,
+ aCache);
+}
+
+// This testcase check the returned result of |Has| API if fullhash cannot match
+// any prefix in the local database.
+TEST(UrlClassifierCaching, NotFound)
+{
+ TestCache<LookupCacheV2>("nomatch.com/"_ns, false, false, false);
+ TestCache<LookupCacheV4>("nomatch.com/"_ns, false, false, false);
+}
+
+// This testcase check the returned result of |Has| API if fullhash find a match
+// in the local database but not in the cache.
+TEST(UrlClassifierCaching, NotInCache)
+{
+ TestCache<LookupCacheV2>("gound.com/"_ns, true, false, false);
+ TestCache<LookupCacheV4>("gound.com/"_ns, true, false, false);
+}
+
+// This testcase check the returned result of |Has| API if fullhash matches
+// a cache entry in positive cache.
+TEST(UrlClassifierCaching, InPositiveCacheNotExpired)
+{
+ TestCache<LookupCacheV2>("cache.notexpired.com/"_ns, true, true, true);
+ TestCache<LookupCacheV4>("cache.notexpired.com/"_ns, true, true, true);
+}
+
+// This testcase check the returned result of |Has| API if fullhash matches
+// a cache entry in positive cache but that it is expired.
+TEST(UrlClassifierCaching, InPositiveCacheExpired)
+{
+ TestCache<LookupCacheV2>("cache.expired.com/"_ns, true, false, true);
+ TestCache<LookupCacheV4>("cache.expired.com/"_ns, true, false, true);
+}
+
+// This testcase check the returned result of |Has| API if fullhash matches
+// a cache entry in negative cache.
+TEST(UrlClassifierCaching, InNegativeCacheNotExpired)
+{
+ // Create a fullhash whose prefix matches the prefix in negative cache
+ // but completion doesn't match any fullhash in positive cache.
+
+ Completion prefix;
+ prefix.FromPlaintext("cache.notexpired.com/"_ns);
+
+ Completion fullhash;
+ fullhash.FromPlaintext("firefox.com/"_ns);
+
+ // Overwrite the 4-byte prefix of `fullhash` so that it conflicts with
+ // `prefix`. Since "cache.notexpired.com" is added to database in TestCache as
+ // a 10-byte prefix, we should copy more than 10 bytes to fullhash to ensure
+ // it can match the prefix in database.
+ memcpy(fullhash.buf, prefix.buf, 10);
+
+ TestCache<LookupCacheV2>(fullhash, false, false, true);
+ TestCache<LookupCacheV4>(fullhash, false, false, true);
+}
+
+// This testcase check the returned result of |Has| API if fullhash matches
+// a cache entry in negative cache but that entry is expired.
+TEST(UrlClassifierCaching, InNegativeCacheExpired)
+{
+ // Create a fullhash whose prefix is in the cache.
+
+ Completion prefix;
+ prefix.FromPlaintext("cache.expired.com/"_ns);
+
+ Completion fullhash;
+ fullhash.FromPlaintext("firefox.com/"_ns);
+
+ memcpy(fullhash.buf, prefix.buf, 10);
+
+ TestCache<LookupCacheV2>(fullhash, true, false, true);
+ TestCache<LookupCacheV4>(fullhash, true, false, true);
+}
+
+// This testcase create 4 cache entries.
+// 1. unexpired entry.
+// 2. an entry whose negative cache time is expired but whose positive cache
+// is not expired.
+// 3. an entry whose positive cache time is expired
+// 4. an entry whose negative cache time and positive cache time are expired
+// After calling |InvalidateExpiredCacheEntry| API, entries with expired
+// negative time should be removed from cache(2 & 4)
+template <typename T>
+void TestInvalidateExpiredCacheEntry() {
+ _PrefixArray array = {CreatePrefixFromURL(CACHED_URL, 10),
+ CreatePrefixFromURL(NEG_CACHE_EXPIRED_URL, 8),
+ CreatePrefixFromURL(POS_CACHE_EXPIRED_URL, 5),
+ CreatePrefixFromURL(BOTH_CACHE_EXPIRED_URL, 4)};
+ RefPtr<T> cache = SetupLookupCache<T>(array);
+
+ SetupCacheEntry(cache, CACHED_URL, false, false);
+ SetupCacheEntry(cache, NEG_CACHE_EXPIRED_URL, true, false);
+ SetupCacheEntry(cache, POS_CACHE_EXPIRED_URL, false, true);
+ SetupCacheEntry(cache, BOTH_CACHE_EXPIRED_URL, true, true);
+
+ // Before invalidate
+ TestCache<T>(CACHED_URL, true, true, true, cache.get());
+ TestCache<T>(NEG_CACHE_EXPIRED_URL, true, true, true, cache.get());
+ TestCache<T>(POS_CACHE_EXPIRED_URL, true, false, true, cache.get());
+ TestCache<T>(BOTH_CACHE_EXPIRED_URL, true, false, true, cache.get());
+
+ // Call InvalidateExpiredCacheEntry to remove cache entries whose negative
+ // cache time is expired
+ cache->InvalidateExpiredCacheEntries();
+
+ // After invalidate, NEG_CACHE_EXPIRED_URL & BOTH_CACHE_EXPIRED_URL should
+ // not be found in cache.
+ TestCache<T>(NEG_CACHE_EXPIRED_URL, true, false, false, cache.get());
+ TestCache<T>(BOTH_CACHE_EXPIRED_URL, true, false, false, cache.get());
+
+ // Other entries should remain the same result.
+ TestCache<T>(CACHED_URL, true, true, true, cache.get());
+ TestCache<T>(POS_CACHE_EXPIRED_URL, true, false, true, cache.get());
+}
+
+TEST(UrlClassifierCaching, InvalidateExpiredCacheEntryV2)
+{ TestInvalidateExpiredCacheEntry<LookupCacheV2>(); }
+
+TEST(UrlClassifierCaching, InvalidateExpiredCacheEntryV4)
+{ TestInvalidateExpiredCacheEntry<LookupCacheV4>(); }
+
+// This testcase check if an cache entry whose negative cache time is expired
+// and it doesn't have any postive cache entries in it, it should be removed
+// from cache after calling |Has|.
+TEST(UrlClassifierCaching, NegativeCacheExpireV2)
+{
+ _PrefixArray array = {CreatePrefixFromURL(NEG_CACHE_EXPIRED_URL, 8)};
+ RefPtr<LookupCacheV2> cache = SetupLookupCache<LookupCacheV2>(array);
+
+ nsCOMPtr<nsICryptoHash> cryptoHash =
+ do_CreateInstance(NS_CRYPTO_HASH_CONTRACTID);
+
+ MissPrefixArray misses;
+ Prefix* prefix = misses.AppendElement(mozilla::fallible);
+ prefix->FromPlaintext(NEG_CACHE_EXPIRED_URL);
+
+ AddCompleteArray dummy;
+ cache->AddGethashResultToCache(dummy, misses, EXPIRED_TIME_SEC);
+
+ // Ensure it is in cache in the first place.
+ EXPECT_EQ(cache->IsInCache(prefix->ToUint32()), true);
+
+ // It should be removed after calling Has API.
+ TestCache<LookupCacheV2>(NEG_CACHE_EXPIRED_URL, true, false, false,
+ cache.get());
+}
+
+TEST(UrlClassifierCaching, NegativeCacheExpireV4)
+{
+ _PrefixArray array = {CreatePrefixFromURL(NEG_CACHE_EXPIRED_URL, 8)};
+ RefPtr<LookupCacheV4> cache = SetupLookupCache<LookupCacheV4>(array);
+
+ FullHashResponseMap map;
+ Prefix prefix;
+ nsCOMPtr<nsICryptoHash> cryptoHash =
+ do_CreateInstance(NS_CRYPTO_HASH_CONTRACTID);
+ prefix.FromPlaintext(NEG_CACHE_EXPIRED_URL);
+ CachedFullHashResponse* response = map.GetOrInsertNew(prefix.ToUint32());
+
+ response->negativeCacheExpirySec = EXPIRED_TIME_SEC;
+
+ cache->AddFullHashResponseToCache(map);
+
+ // Ensure it is in cache in the first place.
+ EXPECT_EQ(cache->IsInCache(prefix.ToUint32()), true);
+
+ // It should be removed after calling Has API.
+ TestCache<LookupCacheV4>(NEG_CACHE_EXPIRED_URL, true, false, false,
+ cache.get());
+}
diff --git a/toolkit/components/url-classifier/tests/gtest/TestChunkSet.cpp b/toolkit/components/url-classifier/tests/gtest/TestChunkSet.cpp
new file mode 100644
index 0000000000..6835103b30
--- /dev/null
+++ b/toolkit/components/url-classifier/tests/gtest/TestChunkSet.cpp
@@ -0,0 +1,281 @@
+/* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+#include <stdio.h>
+#include <stdlib.h>
+
+#include <set>
+
+#include "ChunkSet.h"
+#include "mozilla/ArrayUtils.h"
+
+#include "Common.h"
+
+TEST(UrlClassifierChunkSet, Empty)
+{
+ mozilla::safebrowsing::ChunkSet chunkSet;
+ mozilla::safebrowsing::ChunkSet removeSet;
+
+ removeSet.Set(0);
+
+ ASSERT_FALSE(chunkSet.Has(0));
+ ASSERT_FALSE(chunkSet.Has(1));
+ ASSERT_TRUE(chunkSet.Remove(removeSet) == NS_OK);
+ ASSERT_TRUE(chunkSet.Length() == 0);
+
+ chunkSet.Set(0);
+
+ ASSERT_TRUE(chunkSet.Has(0));
+ ASSERT_TRUE(chunkSet.Length() == 1);
+ ASSERT_TRUE(chunkSet.Remove(removeSet) == NS_OK);
+ ASSERT_FALSE(chunkSet.Has(0));
+ ASSERT_TRUE(chunkSet.Length() == 0);
+}
+
+TEST(UrlClassifierChunkSet, Main)
+{
+ static int testVals[] = {2, 1, 5, 6, 8, 7, 14, 10, 12, 13};
+
+ mozilla::safebrowsing::ChunkSet chunkSet;
+
+ for (size_t i = 0; i < MOZ_ARRAY_LENGTH(testVals); i++) {
+ chunkSet.Set(testVals[i]);
+ }
+
+ for (size_t i = 0; i < MOZ_ARRAY_LENGTH(testVals); i++) {
+ ASSERT_TRUE(chunkSet.Has(testVals[i]));
+ }
+
+ ASSERT_FALSE(chunkSet.Has(3));
+ ASSERT_FALSE(chunkSet.Has(4));
+ ASSERT_FALSE(chunkSet.Has(9));
+ ASSERT_FALSE(chunkSet.Has(11));
+
+ ASSERT_TRUE(chunkSet.Length() == MOZ_ARRAY_LENGTH(testVals));
+}
+
+TEST(UrlClassifierChunkSet, Merge)
+{
+ static int testVals[] = {2, 1, 5, 6, 8, 7, 14, 10, 12, 13};
+ static int mergeVals[] = {9, 3, 4, 20, 14, 16};
+
+ mozilla::safebrowsing::ChunkSet chunkSet;
+ mozilla::safebrowsing::ChunkSet mergeSet;
+
+ for (size_t i = 0; i < MOZ_ARRAY_LENGTH(testVals); i++) {
+ chunkSet.Set(testVals[i]);
+ }
+
+ for (size_t i = 0; i < MOZ_ARRAY_LENGTH(mergeVals); i++) {
+ mergeSet.Set(mergeVals[i]);
+ }
+
+ chunkSet.Merge(mergeSet);
+
+ for (size_t i = 0; i < MOZ_ARRAY_LENGTH(testVals); i++) {
+ ASSERT_TRUE(chunkSet.Has(testVals[i]));
+ }
+ for (size_t i = 0; i < MOZ_ARRAY_LENGTH(mergeVals); i++) {
+ ASSERT_TRUE(chunkSet.Has(mergeVals[i]));
+ }
+
+ // -1 because 14 is duplicated in both sets
+ ASSERT_TRUE(chunkSet.Length() ==
+ MOZ_ARRAY_LENGTH(testVals) + MOZ_ARRAY_LENGTH(mergeVals) - 1);
+
+ ASSERT_FALSE(chunkSet.Has(11));
+ ASSERT_FALSE(chunkSet.Has(15));
+ ASSERT_FALSE(chunkSet.Has(17));
+ ASSERT_FALSE(chunkSet.Has(18));
+ ASSERT_FALSE(chunkSet.Has(19));
+}
+
+TEST(UrlClassifierChunkSet, Merge2)
+{
+ static int testVals[] = {2, 1, 5, 6, 8, 7, 14, 10, 12, 13};
+ static int mergeVals[] = {9, 3, 4, 20, 14, 16};
+ static int mergeVals2[] = {0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11};
+
+ mozilla::safebrowsing::ChunkSet chunkSet;
+ mozilla::safebrowsing::ChunkSet mergeSet;
+ mozilla::safebrowsing::ChunkSet mergeSet2;
+
+ for (size_t i = 0; i < MOZ_ARRAY_LENGTH(testVals); i++) {
+ chunkSet.Set(testVals[i]);
+ }
+
+ for (size_t i = 0; i < MOZ_ARRAY_LENGTH(mergeVals); i++) {
+ mergeSet.Set(mergeVals[i]);
+ }
+ for (size_t i = 0; i < MOZ_ARRAY_LENGTH(mergeVals2); i++) {
+ mergeSet2.Set(mergeVals2[i]);
+ }
+
+ chunkSet.Merge(mergeSet);
+ chunkSet.Merge(mergeSet2);
+
+ for (size_t i = 0; i < MOZ_ARRAY_LENGTH(testVals); i++) {
+ ASSERT_TRUE(chunkSet.Has(testVals[i]));
+ }
+ for (size_t i = 0; i < MOZ_ARRAY_LENGTH(mergeVals); i++) {
+ ASSERT_TRUE(chunkSet.Has(mergeVals[i]));
+ }
+ for (size_t i = 0; i < MOZ_ARRAY_LENGTH(mergeVals2); i++) {
+ ASSERT_TRUE(chunkSet.Has(mergeVals2[i]));
+ }
+
+ ASSERT_FALSE(chunkSet.Has(15));
+ ASSERT_FALSE(chunkSet.Has(17));
+ ASSERT_FALSE(chunkSet.Has(18));
+ ASSERT_FALSE(chunkSet.Has(19));
+}
+
+TEST(UrlClassifierChunkSet, Stress)
+{
+ mozilla::safebrowsing::ChunkSet chunkSet;
+ mozilla::safebrowsing::ChunkSet mergeSet;
+ std::set<int> refSet;
+ std::set<int> refMergeSet;
+ static const int TEST_ITERS = 7000;
+ static const int REMOVE_ITERS = 3000;
+ static const int TEST_RANGE = 10000;
+
+ // Construction by Set
+ for (int i = 0; i < TEST_ITERS; i++) {
+ int chunk = rand() % TEST_RANGE;
+ chunkSet.Set(chunk);
+ refSet.insert(chunk);
+ }
+
+ // Same elements as reference set
+ for (auto it = refSet.begin(); it != refSet.end(); ++it) {
+ ASSERT_TRUE(chunkSet.Has(*it));
+ }
+
+ // Hole punching via Remove
+ for (int i = 0; i < REMOVE_ITERS; i++) {
+ int chunk = rand() % TEST_RANGE;
+ mozilla::safebrowsing::ChunkSet helpChunk;
+ helpChunk.Set(chunk);
+
+ chunkSet.Remove(helpChunk);
+ refSet.erase(chunk);
+
+ ASSERT_FALSE(chunkSet.Has(chunk));
+ }
+
+ // Should have chunks present in reference set
+ // Should not have chunks absent in reference set
+ for (int it = 0; it < TEST_RANGE; ++it) {
+ auto found = refSet.find(it);
+ if (chunkSet.Has(it)) {
+ ASSERT_FALSE(found == refSet.end());
+ } else {
+ ASSERT_TRUE(found == refSet.end());
+ }
+ }
+
+ // Construct set to merge with
+ for (int i = 0; i < TEST_ITERS; i++) {
+ int chunk = rand() % TEST_RANGE;
+ mergeSet.Set(chunk);
+ refMergeSet.insert(chunk);
+ }
+
+ // Merge set constructed correctly
+ for (auto it = refMergeSet.begin(); it != refMergeSet.end(); ++it) {
+ ASSERT_TRUE(mergeSet.Has(*it));
+ }
+
+ mozilla::safebrowsing::ChunkSet origSet;
+ origSet = chunkSet.InfallibleClone();
+
+ chunkSet.Merge(mergeSet);
+ refSet.insert(refMergeSet.begin(), refMergeSet.end());
+
+ // Check for presence of elements from both source
+ // Should not have chunks absent in reference set
+ for (int it = 0; it < TEST_RANGE; ++it) {
+ auto found = refSet.find(it);
+ if (chunkSet.Has(it)) {
+ ASSERT_FALSE(found == refSet.end());
+ } else {
+ ASSERT_TRUE(found == refSet.end());
+ }
+ }
+
+ // Unmerge
+ chunkSet.Remove(origSet);
+ for (int it = 0; it < TEST_RANGE; ++it) {
+ if (origSet.Has(it)) {
+ ASSERT_FALSE(chunkSet.Has(it));
+ } else if (mergeSet.Has(it)) {
+ ASSERT_TRUE(chunkSet.Has(it));
+ }
+ }
+}
+
+TEST(UrlClassifierChunkSet, RemoveClear)
+{
+ static int testVals[] = {2, 1, 5, 6, 8, 7, 14, 10, 12, 13};
+ static int mergeVals[] = {3, 4, 9, 16, 20};
+
+ mozilla::safebrowsing::ChunkSet chunkSet;
+ mozilla::safebrowsing::ChunkSet mergeSet;
+ mozilla::safebrowsing::ChunkSet removeSet;
+
+ for (size_t i = 0; i < MOZ_ARRAY_LENGTH(testVals); i++) {
+ chunkSet.Set(testVals[i]);
+ removeSet.Set(testVals[i]);
+ }
+
+ for (size_t i = 0; i < MOZ_ARRAY_LENGTH(mergeVals); i++) {
+ mergeSet.Set(mergeVals[i]);
+ }
+
+ ASSERT_TRUE(chunkSet.Merge(mergeSet) == NS_OK);
+ ASSERT_TRUE(chunkSet.Remove(removeSet) == NS_OK);
+
+ for (size_t i = 0; i < MOZ_ARRAY_LENGTH(mergeVals); i++) {
+ ASSERT_TRUE(chunkSet.Has(mergeVals[i]));
+ }
+ for (size_t i = 0; i < MOZ_ARRAY_LENGTH(testVals); i++) {
+ ASSERT_FALSE(chunkSet.Has(testVals[i]));
+ }
+
+ chunkSet.Clear();
+
+ for (size_t i = 0; i < MOZ_ARRAY_LENGTH(mergeVals); i++) {
+ ASSERT_FALSE(chunkSet.Has(mergeVals[i]));
+ }
+}
+
+TEST(UrlClassifierChunkSet, Serialize)
+{
+ static int testVals[] = {2, 1, 5, 6, 8, 7, 14, 10, 12, 13};
+ static int mergeVals[] = {3, 4, 9, 16, 20};
+
+ mozilla::safebrowsing::ChunkSet chunkSet;
+ mozilla::safebrowsing::ChunkSet mergeSet;
+
+ for (size_t i = 0; i < MOZ_ARRAY_LENGTH(testVals); i++) {
+ chunkSet.Set(testVals[i]);
+ }
+
+ for (size_t i = 0; i < MOZ_ARRAY_LENGTH(mergeVals); i++) {
+ mergeSet.Set(mergeVals[i]);
+ }
+
+ chunkSet.Merge(mergeSet);
+
+ nsAutoCString mergeResult;
+ chunkSet.Serialize(mergeResult);
+
+ printf("mergeResult: %s\n", mergeResult.get());
+
+ nsAutoCString expected("1-10,12-14,16,20"_ns);
+
+ ASSERT_TRUE(mergeResult.Equals(expected));
+}
diff --git a/toolkit/components/url-classifier/tests/gtest/TestClassifier.cpp b/toolkit/components/url-classifier/tests/gtest/TestClassifier.cpp
new file mode 100644
index 0000000000..4000d6b32e
--- /dev/null
+++ b/toolkit/components/url-classifier/tests/gtest/TestClassifier.cpp
@@ -0,0 +1,83 @@
+/* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+#include "Classifier.h"
+#include "LookupCacheV4.h"
+
+#include "Common.h"
+
+static void TestReadNoiseEntries(RefPtr<Classifier> classifier,
+ const nsCString& aTable, const nsCString& aURL,
+ const _PrefixArray& aPrefixArray) {
+ Completion lookupHash;
+ lookupHash.FromPlaintext(aURL);
+ RefPtr<LookupResult> result = new LookupResult;
+ result->hash.complete = lookupHash;
+
+ PrefixArray noiseEntries;
+ uint32_t noiseCount = 3;
+ nsresult rv;
+ rv = classifier->ReadNoiseEntries(result->hash.fixedLengthPrefix, aTable,
+ noiseCount, noiseEntries);
+ ASSERT_TRUE(rv == NS_OK)
+ << "ReadNoiseEntries returns an error";
+ EXPECT_TRUE(noiseEntries.Length() > 0)
+ << "Number of noise entries is less than 0";
+
+ for (uint32_t i = 0; i < noiseEntries.Length(); i++) {
+ // Test the noise entry should not equal the "real" hash request
+ EXPECT_NE(noiseEntries[i], result->hash.fixedLengthPrefix)
+ << "Noise entry is the same as real hash request";
+ // Test the noise entry should exist in the cached prefix array
+ nsAutoCString partialHash;
+ partialHash.Assign(reinterpret_cast<char*>(&noiseEntries[i]), PREFIX_SIZE);
+ EXPECT_TRUE(aPrefixArray.Contains(partialHash))
+ << "Noise entry is not in the cached prefix array";
+ }
+}
+
+TEST(UrlClassifierClassifier, ReadNoiseEntriesV4)
+{
+ RefPtr<Classifier> classifier = GetClassifier();
+ _PrefixArray array = {
+ CreatePrefixFromURL("bravo.com/", 5),
+ CreatePrefixFromURL("browsing.com/", 9),
+ CreatePrefixFromURL("gound.com/", 4),
+ CreatePrefixFromURL("small.com/", 4),
+ CreatePrefixFromURL("gdfad.com/", 4),
+ CreatePrefixFromURL("afdfound.com/", 4),
+ CreatePrefixFromURL("dffa.com/", 4),
+ };
+
+ nsresult rv;
+ rv = BuildLookupCache(classifier, GTEST_TABLE_V4, array);
+ ASSERT_TRUE(rv == NS_OK)
+ << "Fail to build LookupCache";
+
+ TestReadNoiseEntries(classifier, GTEST_TABLE_V4, "gound.com/"_ns, array);
+}
+
+TEST(UrlClassifierClassifier, ReadNoiseEntriesV2)
+{
+ RefPtr<Classifier> classifier = GetClassifier();
+ _PrefixArray array = {
+ CreatePrefixFromURL("helloworld.com/", 4),
+ CreatePrefixFromURL("firefox.com/", 4),
+ CreatePrefixFromURL("chrome.com/", 4),
+ CreatePrefixFromURL("safebrowsing.com/", 4),
+ CreatePrefixFromURL("opera.com/", 4),
+ CreatePrefixFromURL("torbrowser.com/", 4),
+ CreatePrefixFromURL("gfaads.com/", 4),
+ CreatePrefixFromURL("qggdsas.com/", 4),
+ CreatePrefixFromURL("nqtewq.com/", 4),
+ };
+
+ nsresult rv;
+ rv = BuildLookupCache(classifier, GTEST_TABLE_V2, array);
+ ASSERT_TRUE(rv == NS_OK)
+ << "Fail to build LookupCache";
+
+ TestReadNoiseEntries(classifier, GTEST_TABLE_V2, "helloworld.com/"_ns, array);
+}
diff --git a/toolkit/components/url-classifier/tests/gtest/TestFailUpdate.cpp b/toolkit/components/url-classifier/tests/gtest/TestFailUpdate.cpp
new file mode 100644
index 0000000000..f94852a821
--- /dev/null
+++ b/toolkit/components/url-classifier/tests/gtest/TestFailUpdate.cpp
@@ -0,0 +1,109 @@
+/* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+#include "HashStore.h"
+#include "mozilla/Unused.h"
+#include "nsPrintfCString.h"
+#include "string.h"
+
+#include "Common.h"
+
+static const char* kFilesInV2[] = {".vlpset", ".sbstore"};
+static const char* kFilesInV4[] = {".vlpset", ".metadata"};
+
+#define GTEST_MALWARE_TABLE_V4 "goog-malware-proto"_ns
+#define GTEST_PHISH_TABLE_V4 "goog-phish-proto"_ns
+
+#define ROOT_DIR u"safebrowsing"_ns
+#define SB_FILE(x, y) NS_ConvertUTF8toUTF16(nsPrintfCString("%s%s", x, y))
+
+template <typename T, size_t N>
+static void CheckFileExist(const nsCString& aTable, const T (&aFiles)[N],
+ bool aExpectExists, const char* aMsg = nullptr) {
+ for (uint32_t i = 0; i < N; i++) {
+ // This is just a quick way to know if this is v4 table
+ NS_ConvertUTF8toUTF16 SUB_DIR(strstr(aTable.get(), "-proto") ? "google4"
+ : "");
+ nsCOMPtr<nsIFile> file = GetFile(nsTArray<nsString>{
+ ROOT_DIR, SUB_DIR, SB_FILE(aTable.get(), aFiles[i])});
+
+ bool exists;
+ file->Exists(&exists);
+
+ if (aMsg) {
+ ASSERT_EQ(aExpectExists, exists)
+ << file->HumanReadablePath().get() << " " << aMsg;
+ } else {
+ ASSERT_EQ(aExpectExists, exists) << file->HumanReadablePath().get();
+ }
+ }
+}
+
+TEST(UrlClassifierFailUpdate, CheckTableReset)
+{
+ const bool FULL_UPDATE = true;
+ const bool PARTIAL_UPDATE = false;
+
+ // Apply V2 update
+ {
+ RefPtr<TableUpdateV2> update = new TableUpdateV2(GTEST_TABLE_V2);
+ mozilla::Unused << update->NewAddChunk(1);
+
+ ApplyUpdate(update);
+
+ // A successful V2 update should create .vlpset & .sbstore files
+ CheckFileExist(GTEST_TABLE_V2, kFilesInV2, true,
+ "V2 update doesn't create vlpset or sbstore");
+ }
+
+ // Helper function to generate table update data
+ auto func = [](RefPtr<TableUpdateV4> update, bool full, const char* str) {
+ update->SetFullUpdate(full);
+ nsCString prefix(str);
+ update->NewPrefixes(prefix.Length(), prefix);
+ };
+
+ // Apply V4 update for table1
+ {
+ RefPtr<TableUpdateV4> update = new TableUpdateV4(GTEST_MALWARE_TABLE_V4);
+ func(update, FULL_UPDATE, "test_prefix");
+
+ ApplyUpdate(update);
+
+ // A successful V4 update should create .vlpset & .metadata files
+ CheckFileExist(GTEST_MALWARE_TABLE_V4, kFilesInV4, true,
+ "v4 update doesn't create vlpset or metadata");
+ }
+
+ // Apply V4 update for table2
+ {
+ RefPtr<TableUpdateV4> update = new TableUpdateV4(GTEST_PHISH_TABLE_V4);
+ func(update, FULL_UPDATE, "test_prefix");
+
+ ApplyUpdate(update);
+
+ CheckFileExist(GTEST_PHISH_TABLE_V4, kFilesInV4, true,
+ "v4 update doesn't create vlpset or metadata");
+ }
+
+ // Apply V4 update with the same prefix in previous full udpate
+ // This should cause an update error.
+ {
+ RefPtr<TableUpdateV4> update = new TableUpdateV4(GTEST_MALWARE_TABLE_V4);
+ func(update, PARTIAL_UPDATE, "test_prefix");
+
+ ApplyUpdate(update);
+
+ // A fail update should remove files for that table
+ CheckFileExist(GTEST_MALWARE_TABLE_V4, kFilesInV4, false,
+ "a fail v4 update doesn't remove the tables");
+
+ // A fail update should NOT remove files for the other tables
+ CheckFileExist(GTEST_TABLE_V2, kFilesInV2, true,
+ "a fail v4 update removes a v2 table");
+ CheckFileExist(GTEST_PHISH_TABLE_V4, kFilesInV4, true,
+ "a fail v4 update removes the other v4 table");
+ }
+}
diff --git a/toolkit/components/url-classifier/tests/gtest/TestFindFullHash.cpp b/toolkit/components/url-classifier/tests/gtest/TestFindFullHash.cpp
new file mode 100644
index 0000000000..f0f5785017
--- /dev/null
+++ b/toolkit/components/url-classifier/tests/gtest/TestFindFullHash.cpp
@@ -0,0 +1,216 @@
+/* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+#include "mozilla/Base64.h"
+#include "nsUrlClassifierUtils.h"
+#include "safebrowsing.pb.h"
+
+#include "Common.h"
+
+using namespace mozilla;
+
+template <size_t N>
+static void ToBase64EncodedStringArray(nsCString (&aInput)[N],
+ nsTArray<nsCString>& aEncodedArray) {
+ for (size_t i = 0; i < N; i++) {
+ nsCString encoded;
+ nsresult rv = mozilla::Base64Encode(aInput[i], encoded);
+ NS_ENSURE_SUCCESS_VOID(rv);
+ aEncodedArray.AppendElement(std::move(encoded));
+ }
+}
+
+TEST(UrlClassifierFindFullHash, Request)
+{
+ nsUrlClassifierUtils* urlUtil = nsUrlClassifierUtils::GetInstance();
+
+ nsTArray<nsCString> listNames;
+ listNames.AppendElement("moztest-phish-proto");
+ listNames.AppendElement("moztest-unwanted-proto");
+
+ nsCString listStates[] = {nsCString("sta\x00te1", 7),
+ nsCString("sta\x00te2", 7)};
+ nsTArray<nsCString> listStateArray;
+ ToBase64EncodedStringArray(listStates, listStateArray);
+
+ nsCString prefixes[] = {nsCString("\x00\x00\x00\x01", 4),
+ nsCString("\x00\x00\x00\x00\x01", 5),
+ nsCString("\x00\xFF\x00\x01", 4),
+ nsCString("\x00\xFF\x00\x01\x11\x23\xAA\xBC", 8),
+ nsCString("\x00\x00\x00\x01\x00\x01\x98", 7)};
+ nsTArray<nsCString> prefixArray;
+ ToBase64EncodedStringArray(prefixes, prefixArray);
+
+ nsCString requestBase64;
+ nsresult rv;
+ rv = urlUtil->MakeFindFullHashRequestV4(listNames, listStateArray,
+ prefixArray, requestBase64);
+ ASSERT_NS_SUCCEEDED(rv);
+
+ // Base64 URL decode first.
+ FallibleTArray<uint8_t> requestBinary;
+ rv = Base64URLDecode(requestBase64, Base64URLDecodePaddingPolicy::Require,
+ requestBinary);
+ ASSERT_NS_SUCCEEDED(rv);
+
+ // Parse the FindFullHash binary and compare with the expected values.
+ FindFullHashesRequest r;
+ ASSERT_TRUE(r.ParseFromArray(&requestBinary[0], requestBinary.Length()));
+
+ // Compare client states.
+ ASSERT_EQ(r.client_states_size(), (int)ArrayLength(listStates));
+ for (int i = 0; i < r.client_states_size(); i++) {
+ auto s = r.client_states(i);
+ ASSERT_TRUE(listStates[i].Equals(nsCString(s.c_str(), s.size())));
+ }
+
+ auto threatInfo = r.threat_info();
+
+ // Compare threat types.
+ ASSERT_EQ(threatInfo.threat_types_size(), (int)ArrayLength(listStates));
+ for (int i = 0; i < threatInfo.threat_types_size(); i++) {
+ uint32_t expectedThreatType;
+ rv =
+ urlUtil->ConvertListNameToThreatType(listNames[i], &expectedThreatType);
+ ASSERT_NS_SUCCEEDED(rv);
+ ASSERT_EQ(threatInfo.threat_types(i), (int)expectedThreatType);
+ }
+
+ // Compare prefixes.
+ ASSERT_EQ(threatInfo.threat_entries_size(), (int)ArrayLength(prefixes));
+ for (int i = 0; i < threatInfo.threat_entries_size(); i++) {
+ auto p = threatInfo.threat_entries(i).hash();
+ ASSERT_TRUE(prefixes[i].Equals(nsCString(p.c_str(), p.size())));
+ }
+}
+
+/////////////////////////////////////////////////////////////
+// Following is to test parsing the gethash response.
+
+namespace {
+
+// safebrowsing::Duration manipulation.
+struct MyDuration {
+ uint32_t mSecs;
+ uint32_t mNanos;
+};
+void PopulateDuration(Duration& aDest, const MyDuration& aSrc) {
+ aDest.set_seconds(aSrc.mSecs);
+ aDest.set_nanos(aSrc.mNanos);
+}
+
+// The expected match data.
+static MyDuration EXPECTED_MIN_WAIT_DURATION = {12, 10};
+static MyDuration EXPECTED_NEG_CACHE_DURATION = {120, 9};
+static const struct ExpectedMatch {
+ nsCString mCompleteHash;
+ ThreatType mThreatType;
+ MyDuration mPerHashCacheDuration;
+} EXPECTED_MATCH[] = {
+ {nsCString("01234567890123456789012345678901"),
+ SOCIAL_ENGINEERING_PUBLIC,
+ {8, 500}},
+ {nsCString("12345678901234567890123456789012"),
+ SOCIAL_ENGINEERING_PUBLIC,
+ {7, 100}},
+ {nsCString("23456789012345678901234567890123"),
+ SOCIAL_ENGINEERING_PUBLIC,
+ {1, 20}},
+};
+
+class MyParseCallback final : public nsIUrlClassifierParseFindFullHashCallback {
+ public:
+ NS_DECL_ISUPPORTS
+
+ explicit MyParseCallback(uint32_t& aCallbackCount)
+ : mCallbackCount(aCallbackCount) {}
+
+ NS_IMETHOD
+ OnCompleteHashFound(const nsACString& aCompleteHash,
+ const nsACString& aTableNames,
+ uint32_t aPerHashCacheDuration) override {
+ Verify(aCompleteHash, aTableNames, aPerHashCacheDuration);
+
+ return NS_OK;
+ }
+
+ NS_IMETHOD
+ OnResponseParsed(uint32_t aMinWaitDuration,
+ uint32_t aNegCacheDuration) override {
+ VerifyDuration(aMinWaitDuration / 1000, EXPECTED_MIN_WAIT_DURATION);
+ VerifyDuration(aNegCacheDuration, EXPECTED_NEG_CACHE_DURATION);
+
+ return NS_OK;
+ }
+
+ private:
+ void Verify(const nsACString& aCompleteHash, const nsACString& aTableNames,
+ uint32_t aPerHashCacheDuration) {
+ auto expected = EXPECTED_MATCH[mCallbackCount];
+
+ ASSERT_TRUE(aCompleteHash.Equals(expected.mCompleteHash));
+
+ // Verify aTableNames
+ nsUrlClassifierUtils* urlUtil = nsUrlClassifierUtils::GetInstance();
+
+ nsCString tableNames;
+ nsresult rv =
+ urlUtil->ConvertThreatTypeToListNames(expected.mThreatType, tableNames);
+ ASSERT_NS_SUCCEEDED(rv);
+ ASSERT_TRUE(aTableNames.Equals(tableNames));
+
+ VerifyDuration(aPerHashCacheDuration, expected.mPerHashCacheDuration);
+
+ mCallbackCount++;
+ }
+
+ void VerifyDuration(uint32_t aToVerify, const MyDuration& aExpected) {
+ ASSERT_TRUE(aToVerify == aExpected.mSecs);
+ }
+
+ ~MyParseCallback() = default;
+
+ uint32_t& mCallbackCount;
+};
+
+NS_IMPL_ISUPPORTS(MyParseCallback, nsIUrlClassifierParseFindFullHashCallback)
+
+} // end of unnamed namespace.
+
+TEST(UrlClassifierFindFullHash, ParseRequest)
+{
+ // Build response.
+ FindFullHashesResponse r;
+
+ // Init response-wise durations.
+ auto minWaitDuration = r.mutable_minimum_wait_duration();
+ PopulateDuration(*minWaitDuration, EXPECTED_MIN_WAIT_DURATION);
+ auto negCacheDuration = r.mutable_negative_cache_duration();
+ PopulateDuration(*negCacheDuration, EXPECTED_NEG_CACHE_DURATION);
+
+ // Init matches.
+ for (uint32_t i = 0; i < ArrayLength(EXPECTED_MATCH); i++) {
+ auto expected = EXPECTED_MATCH[i];
+ auto match = r.mutable_matches()->Add();
+ match->set_threat_type(expected.mThreatType);
+ match->mutable_threat()->set_hash(expected.mCompleteHash.BeginReading(),
+ expected.mCompleteHash.Length());
+ auto perHashCacheDuration = match->mutable_cache_duration();
+ PopulateDuration(*perHashCacheDuration, expected.mPerHashCacheDuration);
+ }
+ std::string s;
+ r.SerializeToString(&s);
+
+ uint32_t callbackCount = 0;
+ nsCOMPtr<nsIUrlClassifierParseFindFullHashCallback> callback =
+ new MyParseCallback(callbackCount);
+
+ nsUrlClassifierUtils* urlUtil = nsUrlClassifierUtils::GetInstance();
+ nsresult rv = urlUtil->ParseFindFullHashResponseV4(
+ nsCString(s.c_str(), s.size()), callback);
+ NS_ENSURE_SUCCESS_VOID(rv);
+
+ ASSERT_EQ(callbackCount, ArrayLength(EXPECTED_MATCH));
+}
diff --git a/toolkit/components/url-classifier/tests/gtest/TestLookupCacheV4.cpp b/toolkit/components/url-classifier/tests/gtest/TestLookupCacheV4.cpp
new file mode 100644
index 0000000000..e1482c1416
--- /dev/null
+++ b/toolkit/components/url-classifier/tests/gtest/TestLookupCacheV4.cpp
@@ -0,0 +1,152 @@
+/* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+#include "Classifier.h"
+#include "LookupCacheV4.h"
+#include "nsAppDirectoryServiceDefs.h"
+
+#include "Common.h"
+
+#define GTEST_SAFEBROWSING_DIR "safebrowsing"_ns
+
+static void TestHasPrefix(const nsCString& aURL, bool aExpectedHas,
+ bool aExpectedComplete) {
+ _PrefixArray array = {CreatePrefixFromURL("bravo.com/", 32),
+ CreatePrefixFromURL("browsing.com/", 8),
+ CreatePrefixFromURL("gound.com/", 5),
+ CreatePrefixFromURL("small.com/", 4)};
+
+ nsCOMPtr<nsIFile> file;
+ NS_GetSpecialDirectory(NS_APP_USER_PROFILE_50_DIR, getter_AddRefs(file));
+ file->AppendNative(GTEST_SAFEBROWSING_DIR);
+
+ RunTestInNewThread([&]() -> void {
+ RefPtr<LookupCache> cache = SetupLookupCache<LookupCacheV4>(array, file);
+
+ Completion lookupHash;
+ lookupHash.FromPlaintext(aURL);
+
+ bool has, confirmed;
+ uint32_t matchLength;
+ // Freshness is not used in V4 so we just put dummy values here.
+ TableFreshnessMap dummy;
+ nsresult rv = cache->Has(lookupHash, &has, &matchLength, &confirmed);
+
+ EXPECT_EQ(rv, NS_OK);
+ EXPECT_EQ(has, aExpectedHas);
+ EXPECT_EQ(matchLength == COMPLETE_SIZE, aExpectedComplete);
+ EXPECT_EQ(confirmed, false);
+
+ cache->ClearAll();
+ });
+}
+
+TEST(UrlClassifierLookupCacheV4, HasComplete)
+{ TestHasPrefix("bravo.com/"_ns, true, true); }
+
+TEST(UrlClassifierLookupCacheV4, HasPrefix)
+{ TestHasPrefix("browsing.com/"_ns, true, false); }
+
+TEST(UrlClassifierLookupCacheV4, Nomatch)
+{ TestHasPrefix("nomatch.com/"_ns, false, false); }
+
+// Test an existing .pset should be removed after .vlpset is written
+TEST(UrlClassifierLookupCacheV4, RemoveOldPset)
+{
+ nsCOMPtr<nsIFile> oldPsetFile;
+ NS_GetSpecialDirectory(NS_APP_USER_PROFILE_50_DIR,
+ getter_AddRefs(oldPsetFile));
+ oldPsetFile->AppendNative("safebrowsing"_ns);
+ oldPsetFile->AppendNative(GTEST_TABLE_V4 + ".pset"_ns);
+
+ nsCOMPtr<nsIFile> newPsetFile;
+ NS_GetSpecialDirectory(NS_APP_USER_PROFILE_50_DIR,
+ getter_AddRefs(newPsetFile));
+ newPsetFile->AppendNative("safebrowsing"_ns);
+ newPsetFile->AppendNative(GTEST_TABLE_V4 + ".vlpset"_ns);
+
+ // Create the legacy .pset file
+ nsresult rv = oldPsetFile->Create(nsIFile::NORMAL_FILE_TYPE, 0666);
+ EXPECT_EQ(rv, NS_OK);
+
+ bool exists;
+ rv = oldPsetFile->Exists(&exists);
+ EXPECT_EQ(rv, NS_OK);
+ EXPECT_EQ(exists, true);
+
+ // Setup the data in lookup cache and write its data to disk
+ RefPtr<Classifier> classifier = GetClassifier();
+ _PrefixArray array = {CreatePrefixFromURL("entry.com/", 4)};
+ rv = BuildLookupCache(classifier, GTEST_TABLE_V4, array);
+ EXPECT_EQ(rv, NS_OK);
+
+ RefPtr<LookupCache> cache = classifier->GetLookupCache(GTEST_TABLE_V4, false);
+ rv = cache->WriteFile();
+ EXPECT_EQ(rv, NS_OK);
+
+ // .vlpset should exist while .pset should be removed
+ rv = newPsetFile->Exists(&exists);
+ EXPECT_EQ(rv, NS_OK);
+ EXPECT_EQ(exists, true);
+
+ rv = oldPsetFile->Exists(&exists);
+ EXPECT_EQ(rv, NS_OK);
+ EXPECT_EQ(exists, false);
+
+ newPsetFile->Remove(false);
+}
+
+// Test the legacy load
+TEST(UrlClassifierLookupCacheV4, LoadOldPset)
+{
+ nsCOMPtr<nsIFile> oldPsetFile;
+
+ _PrefixArray array = {CreatePrefixFromURL("entry.com/", 4)};
+ PrefixStringMap map;
+ PrefixArrayToPrefixStringMap(array, map);
+
+ // Prepare .pset file on disk
+ {
+ NS_GetSpecialDirectory(NS_APP_USER_PROFILE_50_DIR,
+ getter_AddRefs(oldPsetFile));
+ oldPsetFile->AppendNative("safebrowsing"_ns);
+ oldPsetFile->AppendNative(GTEST_TABLE_V4 + ".pset"_ns);
+
+ RefPtr<VariableLengthPrefixSet> pset = new VariableLengthPrefixSet;
+ pset->SetPrefixes(map);
+
+ nsCOMPtr<nsIOutputStream> stream;
+ nsresult rv =
+ NS_NewLocalFileOutputStream(getter_AddRefs(stream), oldPsetFile);
+ EXPECT_EQ(rv, NS_OK);
+
+ rv = pset->WritePrefixes(stream);
+ EXPECT_EQ(rv, NS_OK);
+ }
+
+ // Load data from disk
+ RefPtr<Classifier> classifier = GetClassifier();
+ RefPtr<LookupCache> cache = classifier->GetLookupCache(GTEST_TABLE_V4, false);
+
+ RefPtr<LookupCacheV4> cacheV4 = LookupCache::Cast<LookupCacheV4>(cache);
+ CheckContent(cacheV4, array);
+
+ oldPsetFile->Remove(false);
+}
+
+TEST(UrlClassifierLookupCacheV4, BuildAPI)
+{
+ _PrefixArray init = {_Prefix("alph")};
+ RefPtr<LookupCacheV4> cache = SetupLookupCache<LookupCacheV4>(init);
+
+ _PrefixArray update = {_Prefix("beta")};
+ PrefixStringMap map;
+ PrefixArrayToPrefixStringMap(update, map);
+
+ cache->Build(map);
+ EXPECT_TRUE(map.IsEmpty());
+
+ CheckContent(cache, update);
+}
diff --git a/toolkit/components/url-classifier/tests/gtest/TestPerProviderDirectory.cpp b/toolkit/components/url-classifier/tests/gtest/TestPerProviderDirectory.cpp
new file mode 100644
index 0000000000..a79ab643f4
--- /dev/null
+++ b/toolkit/components/url-classifier/tests/gtest/TestPerProviderDirectory.cpp
@@ -0,0 +1,127 @@
+/* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+#include "HashStore.h"
+#include "LookupCache.h"
+#include "LookupCacheV4.h"
+#include "nsAppDirectoryServiceDefs.h"
+
+#include "Common.h"
+
+namespace mozilla {
+namespace safebrowsing {
+
+class PerProviderDirectoryTestUtils {
+ public:
+ template <typename T>
+ static nsIFile* InspectStoreDirectory(const T& aT) {
+ return aT.mStoreDirectory;
+ }
+};
+
+} // end of namespace safebrowsing
+} // end of namespace mozilla
+
+template <typename T>
+static void VerifyPrivateStorePath(T* target, const nsCString& aTableName,
+ const nsCString& aProvider,
+ const nsCOMPtr<nsIFile>& aRootDir,
+ bool aUsePerProviderStore) {
+ nsString rootStorePath;
+ nsresult rv = aRootDir->GetPath(rootStorePath);
+ EXPECT_EQ(rv, NS_OK);
+
+ nsIFile* privateStoreDirectory =
+ PerProviderDirectoryTestUtils::InspectStoreDirectory(*target);
+
+ nsString privateStorePath;
+ rv = privateStoreDirectory->GetPath(privateStorePath);
+ ASSERT_EQ(rv, NS_OK);
+
+ nsString expectedPrivateStorePath = rootStorePath;
+
+ if (aUsePerProviderStore) {
+ // Use API to append "provider" to the root directoy path
+ nsCOMPtr<nsIFile> expectedPrivateStoreDir;
+ rv = aRootDir->Clone(getter_AddRefs(expectedPrivateStoreDir));
+ ASSERT_EQ(rv, NS_OK);
+
+ expectedPrivateStoreDir->AppendNative(aProvider);
+ rv = expectedPrivateStoreDir->GetPath(expectedPrivateStorePath);
+ ASSERT_EQ(rv, NS_OK);
+ }
+
+ printf("table: %s\nprovider: %s\nroot path: %s\nprivate path: %s\n\n",
+ aTableName.get(), aProvider.get(),
+ NS_ConvertUTF16toUTF8(rootStorePath).get(),
+ NS_ConvertUTF16toUTF8(privateStorePath).get());
+
+ ASSERT_TRUE(privateStorePath == expectedPrivateStorePath);
+}
+
+TEST(UrlClassifierPerProviderDirectory, LookupCache)
+{
+ nsCOMPtr<nsIFile> rootDir;
+ NS_GetSpecialDirectory(NS_APP_USER_PROFILE_50_DIR, getter_AddRefs(rootDir));
+
+ RunTestInNewThread([&]() -> void {
+ // For V2 tables (NOT ending with '-proto'), root directory should be
+ // used as the private store.
+ {
+ nsAutoCString table("goog-phish-shavar");
+ nsAutoCString provider("google");
+ RefPtr<LookupCacheV2> lc = new LookupCacheV2(table, provider, rootDir);
+ VerifyPrivateStorePath<LookupCacheV2>(lc, table, provider, rootDir,
+ false);
+ }
+
+ // For V4 tables, if provider is found, use per-provider subdirectory;
+ // If not found, use root directory.
+ {
+ nsAutoCString table("goog-noprovider-proto");
+ nsAutoCString provider("");
+ RefPtr<LookupCacheV4> lc = new LookupCacheV4(table, provider, rootDir);
+ VerifyPrivateStorePath<LookupCacheV4>(lc, table, provider, rootDir,
+ false);
+ }
+ {
+ nsAutoCString table("goog-phish-proto");
+ nsAutoCString provider("google4");
+ RefPtr<LookupCacheV4> lc = new LookupCacheV4(table, provider, rootDir);
+ VerifyPrivateStorePath<LookupCacheV4>(lc, table, provider, rootDir, true);
+ }
+ });
+}
+
+TEST(UrlClassifierPerProviderDirectory, HashStore)
+{
+ nsCOMPtr<nsIFile> rootDir;
+ NS_GetSpecialDirectory(NS_APP_USER_PROFILE_50_DIR, getter_AddRefs(rootDir));
+
+ RunTestInNewThread([&]() -> void {
+ // For V2 tables (NOT ending with '-proto'), root directory should be
+ // used as the private store.
+ {
+ nsAutoCString table("goog-phish-shavar");
+ nsAutoCString provider("google");
+ HashStore hs(table, provider, rootDir);
+ VerifyPrivateStorePath(&hs, table, provider, rootDir, false);
+ }
+ // For V4 tables, if provider is found, use per-provider subdirectory;
+ // If not found, use root directory.
+ {
+ nsAutoCString table("goog-noprovider-proto");
+ nsAutoCString provider("");
+ HashStore hs(table, provider, rootDir);
+ VerifyPrivateStorePath(&hs, table, provider, rootDir, false);
+ }
+ {
+ nsAutoCString table("goog-phish-proto");
+ nsAutoCString provider("google4");
+ HashStore hs(table, provider, rootDir);
+ VerifyPrivateStorePath(&hs, table, provider, rootDir, true);
+ }
+ });
+}
diff --git a/toolkit/components/url-classifier/tests/gtest/TestPrefixSet.cpp b/toolkit/components/url-classifier/tests/gtest/TestPrefixSet.cpp
new file mode 100644
index 0000000000..6d83cd02f6
--- /dev/null
+++ b/toolkit/components/url-classifier/tests/gtest/TestPrefixSet.cpp
@@ -0,0 +1,87 @@
+/* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+#include "mozilla/Preferences.h"
+#include "nsString.h"
+#include "nsUrlClassifierPrefixSet.h"
+
+#include "Common.h"
+
+// This function generate N 4 byte prefixes.
+static void RandomPrefixes(uint32_t N, nsTArray<uint32_t>& array) {
+ array.Clear();
+ array.SetCapacity(N);
+
+ for (uint32_t i = 0; i < N; i++) {
+ bool added = false;
+
+ while (!added) {
+ nsAutoCString prefix;
+ char* dst = prefix.BeginWriting();
+ for (uint32_t j = 0; j < 4; j++) {
+ dst[j] = static_cast<char>(rand() % 256);
+ }
+
+ const char* src = prefix.BeginReading();
+ uint32_t data = 0;
+ memcpy(&data, src, sizeof(data));
+
+ if (!array.Contains(data)) {
+ array.AppendElement(data);
+ added = true;
+ }
+ }
+ }
+
+ struct Comparator {
+ bool LessThan(const uint32_t& aA, const uint32_t& aB) const {
+ return aA < aB;
+ }
+
+ bool Equals(const uint32_t& aA, const uint32_t& aB) const {
+ return aA == aB;
+ }
+ };
+
+ array.Sort(Comparator());
+}
+
+void RunTest(uint32_t aTestSize) {
+ RefPtr<nsUrlClassifierPrefixSet> prefixSet = new nsUrlClassifierPrefixSet();
+ nsTArray<uint32_t> array;
+
+ RandomPrefixes(aTestSize, array);
+
+ nsresult rv = prefixSet->SetPrefixes(array.Elements(), array.Length());
+ ASSERT_NS_SUCCEEDED(rv);
+
+ for (uint32_t i = 0; i < array.Length(); i++) {
+ uint32_t value = 0;
+ rv = prefixSet->GetPrefixByIndex(i, &value);
+ ASSERT_NS_SUCCEEDED(rv);
+
+ ASSERT_TRUE(value == array[i]);
+ }
+}
+
+TEST(URLClassifierPrefixSet, GetTargetPrefixWithLargeSet)
+{
+ // Make sure the delta algorithm will be used.
+ static const char prefKey[] = "browser.safebrowsing.prefixset_max_array_size";
+ mozilla::Preferences::SetUint(prefKey, 10000);
+
+ // Ideally, we should test more than 512 * 1024 entries. But, it will make the
+ // test too long. So, we test 100k entries instead.
+ RunTest(100000);
+}
+
+TEST(URLClassifierPrefixSet, GetTargetPrefixWithSmallSet)
+{
+ // Make sure the delta algorithm won't be used.
+ static const char prefKey[] = "browser.safebrowsing.prefixset_max_array_size";
+ mozilla::Preferences::SetUint(prefKey, 10000);
+
+ RunTest(1000);
+}
diff --git a/toolkit/components/url-classifier/tests/gtest/TestProtocolParser.cpp b/toolkit/components/url-classifier/tests/gtest/TestProtocolParser.cpp
new file mode 100644
index 0000000000..2155140b4d
--- /dev/null
+++ b/toolkit/components/url-classifier/tests/gtest/TestProtocolParser.cpp
@@ -0,0 +1,140 @@
+/* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+#include "mozilla/EndianUtils.h"
+#include "ProtocolParser.h"
+
+#include "Common.h"
+
+typedef FetchThreatListUpdatesResponse_ListUpdateResponse ListUpdateResponse;
+
+static bool InitUpdateResponse(ListUpdateResponse* aUpdateResponse,
+ ThreatType aThreatType, const nsACString& aState,
+ const nsACString& aChecksum, bool isFullUpdate,
+ const nsTArray<uint32_t>& aFixedLengthPrefixes,
+ bool aDoPrefixEncoding) {
+ aUpdateResponse->set_threat_type(aThreatType);
+ aUpdateResponse->set_new_client_state(aState.BeginReading(), aState.Length());
+ aUpdateResponse->mutable_checksum()->set_sha256(aChecksum.BeginReading(),
+ aChecksum.Length());
+ aUpdateResponse->set_response_type(isFullUpdate
+ ? ListUpdateResponse::FULL_UPDATE
+ : ListUpdateResponse::PARTIAL_UPDATE);
+
+ auto additions = aUpdateResponse->mutable_additions()->Add();
+
+ if (!aDoPrefixEncoding) {
+ additions->set_compression_type(RAW);
+ auto rawHashes = additions->mutable_raw_hashes();
+ rawHashes->set_prefix_size(4);
+ auto prefixes = rawHashes->mutable_raw_hashes();
+ for (auto p : aFixedLengthPrefixes) {
+ char buffer[4];
+ mozilla::NativeEndian::copyAndSwapToBigEndian(buffer, &p, 1);
+ prefixes->append(buffer, 4);
+ }
+ return true;
+ }
+
+ if (1 != aFixedLengthPrefixes.Length()) {
+ printf("This function only supports single value encoding.\n");
+ return false;
+ }
+
+ uint32_t firstValue = aFixedLengthPrefixes[0];
+ additions->set_compression_type(RICE);
+ auto riceHashes = additions->mutable_rice_hashes();
+ riceHashes->set_first_value(firstValue);
+ riceHashes->set_num_entries(0);
+
+ return true;
+}
+
+static void DumpBinary(const nsACString& aBinary) {
+ nsCString s;
+ for (size_t i = 0; i < aBinary.Length(); i++) {
+ s.AppendPrintf("\\x%.2X", (uint8_t)aBinary[i]);
+ }
+ printf("%s\n", s.get());
+}
+
+TEST(UrlClassifierProtocolParser, UpdateWait)
+{
+ // Top level response which contains a list of update response
+ // for different lists.
+ FetchThreatListUpdatesResponse response;
+
+ auto r = response.mutable_list_update_responses()->Add();
+ InitUpdateResponse(r, SOCIAL_ENGINEERING_PUBLIC, nsCString("sta\x00te", 6),
+ nsCString("check\x0sum", 9), true, {0, 1, 2, 3},
+ false /* aDoPrefixEncoding */);
+
+ // Set min wait duration.
+ auto minWaitDuration = response.mutable_minimum_wait_duration();
+ minWaitDuration->set_seconds(8);
+ minWaitDuration->set_nanos(1 * 1000000000);
+
+ std::string s;
+ response.SerializeToString(&s);
+
+ DumpBinary(nsCString(s.c_str(), s.length()));
+
+ ProtocolParser* p = new ProtocolParserProtobuf();
+ p->AppendStream(nsCString(s.c_str(), s.length()));
+ p->End();
+ ASSERT_EQ(p->UpdateWaitSec(), 9u);
+ delete p;
+}
+
+TEST(UrlClassifierProtocolParser, SingleValueEncoding)
+{
+ // Top level response which contains a list of update response
+ // for different lists.
+ FetchThreatListUpdatesResponse response;
+
+ auto r = response.mutable_list_update_responses()->Add();
+
+ const char* expectedPrefix = "\x00\x01\x02\x00";
+ if (!InitUpdateResponse(r, SOCIAL_ENGINEERING_PUBLIC,
+ nsCString("sta\x00te", 6),
+ nsCString("check\x0sum", 9), true,
+ // As per spec, we should interpret the prefix as
+ // uint32 in little endian before encoding.
+ {mozilla::LittleEndian::readUint32(expectedPrefix)},
+ true /* aDoPrefixEncoding */)) {
+ printf("Failed to initialize update response.");
+ ASSERT_TRUE(false);
+ return;
+ }
+
+ // Set min wait duration.
+ auto minWaitDuration = response.mutable_minimum_wait_duration();
+ minWaitDuration->set_seconds(8);
+ minWaitDuration->set_nanos(1 * 1000000000);
+
+ std::string s;
+ response.SerializeToString(&s);
+
+ // Feed data to the protocol parser.
+ ProtocolParser* p = new ProtocolParserProtobuf();
+ p->SetRequestedTables({nsCString("googpub-phish-proto")});
+ p->AppendStream(nsCString(s.c_str(), s.length()));
+ p->End();
+
+ const TableUpdateArray& tus = p->GetTableUpdates();
+ RefPtr<const TableUpdateV4> tuv4 = TableUpdate::Cast<TableUpdateV4>(tus[0]);
+ auto& prefixMap = tuv4->Prefixes();
+ for (const auto& entry : prefixMap) {
+ // This prefix map should contain only a single 4-byte prefixe.
+ ASSERT_EQ(entry.GetKey(), 4u);
+
+ // The fixed-length prefix string from ProtocolParser should
+ // exactly match the expected prefix string.
+ nsCString* prefix = entry.GetWeak();
+ ASSERT_TRUE(prefix->Equals(nsCString(expectedPrefix, 4)));
+ }
+
+ delete p;
+}
diff --git a/toolkit/components/url-classifier/tests/gtest/TestRiceDeltaDecoder.cpp b/toolkit/components/url-classifier/tests/gtest/TestRiceDeltaDecoder.cpp
new file mode 100644
index 0000000000..9130cfe385
--- /dev/null
+++ b/toolkit/components/url-classifier/tests/gtest/TestRiceDeltaDecoder.cpp
@@ -0,0 +1,173 @@
+/* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+#include "mozilla/ArrayUtils.h"
+#include "RiceDeltaDecoder.h"
+
+#include "Common.h"
+
+namespace {
+
+struct TestingData {
+ std::vector<uint32_t> mExpectedDecoded;
+ std::vector<uint8_t> mEncoded;
+ uint32_t mRiceParameter;
+};
+
+} // namespace
+
+static bool runOneTest(TestingData& aData) {
+ RiceDeltaDecoder decoder(&aData.mEncoded[0], aData.mEncoded.size());
+
+ std::vector<uint32_t> decoded(aData.mExpectedDecoded.size());
+
+ uint32_t firstValue = aData.mExpectedDecoded[0];
+ bool rv = decoder.Decode(
+ aData.mRiceParameter, firstValue,
+ decoded.size() - 1, // # of entries (first value not included).
+ &decoded[0]);
+
+ return rv && decoded == aData.mExpectedDecoded;
+}
+
+TEST(UrlClassifierRiceDeltaDecoder, SingleEncodedValue)
+{
+ TestingData td = {{99}, {99}, 0};
+
+ ASSERT_TRUE(runOneTest(td));
+}
+
+// In this batch of tests, the encoded data would be like
+// what we originally receive from the network. See comment
+// in |runOneTest| for more detail.
+TEST(UrlClassifierRiceDeltaDecoder, Empty)
+{
+ // The following structure and testing data is copied from Chromium source
+ // code:
+ //
+ // https://chromium.googlesource.com/chromium/src.git/+/950f9975599768b6a08c7146cb4befa161be87aa/components/safe_browsing_db/v4_rice_unittest.cc#75
+ //
+ // and will be translated to our own testing format.
+
+ struct RiceDecodingTestInfo {
+ uint32_t mRiceParameter;
+ std::vector<uint32_t> mDeltas;
+ std::string mEncoded;
+
+ RiceDecodingTestInfo(uint32_t aRiceParameter,
+ const std::vector<uint32_t>& aDeltas,
+ const std::string& aEncoded)
+ : mRiceParameter(aRiceParameter),
+ mDeltas(aDeltas),
+ mEncoded(aEncoded) {}
+ };
+
+ // Copyright 2016 The Chromium Authors. All rights reserved.
+ // Use of this source code is governed by a BSD-style license that can be
+ // found in the media/webrtc/trunk/webrtc/LICENSE.
+
+ // ----- Start of Chromium test code ----
+ const std::vector<RiceDecodingTestInfo> TESTING_DATA_CHROMIUM = {
+ RiceDecodingTestInfo(2, {15, 9}, "\xf7\x2"),
+ RiceDecodingTestInfo(
+ 28, {1777762129, 2093280223, 924369848},
+ "\xbf\xa8\x3f\xfb\xfc\xfb\x5e\x27\xe6\xc3\x1d\xc6\x38"),
+ RiceDecodingTestInfo(
+ 28, {62763050, 1046523781, 192522171, 1800511020, 4442775, 582142548},
+ "\x54\x60\x7b\xe7\x0a\x5f\xc1\xdc\xee\x69\xde"
+ "\xfe\x58\x3c\xa3\xd6\xa5\xf2\x10\x8c\x4a\x59"
+ "\x56\x00"),
+ RiceDecodingTestInfo(
+ 28,
+ {26067715, 344823336, 8420095, 399843890, 95029378, 731622412,
+ 35811335, 1047558127, 1117722715, 78698892},
+ "\x06\x86\x1b\x23\x14\xcb\x46\xf2\xaf\x07\x08\xc9\x88\x54\x1f\x41\x04"
+ "\xd5\x1a\x03\xeb\xe6\x3a\x80\x13\x91\x7b\xbf\x83\xf3\xb7\x85\xf1\x29"
+ "\x18\xb3\x61\x09"),
+ RiceDecodingTestInfo(
+ 27,
+ {225846818, 328287420, 166748623, 29117720, 552397365, 350353215,
+ 558267528, 4738273, 567093445, 28563065, 55077698, 73091685,
+ 339246010, 98242620, 38060941, 63917830, 206319759, 137700744},
+ "\x89\x98\xd8\x75\xbc\x44\x91\xeb\x39\x0c\x3e\x30\x9a\x78\xf3\x6a\xd4"
+ "\xd9\xb1\x9f\xfb\x70\x3e\x44\x3e\xa3\x08\x67\x42\xc2\x2b\x46\x69\x8e"
+ "\x3c\xeb\xd9\x10\x5a\x43\x9a\x32\xa5\x2d\x4e\x77\x0f\x87\x78\x20\xb6"
+ "\xab\x71\x98\x48\x0c\x9e\x9e\xd7\x23\x0c\x13\x43\x2c\xa9\x01"),
+ RiceDecodingTestInfo(
+ 28,
+ {339784008, 263128563, 63871877, 69723256, 826001074, 797300228,
+ 671166008, 207712688},
+ std::string("\x21\xc5\x02\x91\xf9\x82\xd7\x57\xb8\xe9\x3c\xf0\xc8\x4f"
+ "\xe8\x64\x8d\x77\x62\x04\xd6\x85\x3f\x1c\x97\x00\x04\x1b"
+ "\x17\xc6",
+ 30)),
+ RiceDecodingTestInfo(
+ 28,
+ {471820069, 196333855, 855579133, 122737976, 203433838, 85354544,
+ 1307949392, 165938578, 195134475, 553930435, 49231136},
+ "\x95\x9c\x7d\xb0\x8f\xe8\xd9\xbd\xfe\x8c\x7f\x81\x53\x0d\x75\xdc\x4e"
+ "\x40\x18\x0c\x9a\x45\x3d\xa8\xdc\xfa\x26\x59\x40\x9e\x16\x08\x43\x77"
+ "\xc3\x4e\x04\x01\xa4\xe6\x5d\x00"),
+ RiceDecodingTestInfo(
+ 27,
+ {87336845, 129291033, 30906211, 433549264, 30899891, 53207875,
+ 11959529, 354827862, 82919275, 489637251, 53561020, 336722992,
+ 408117728, 204506246, 188216092, 9047110, 479817359, 230317256},
+ "\x1a\x4f\x69\x2a\x63\x9a\xf6\xc6\x2e\xaf\x73\xd0\x6f\xd7\x31\xeb\x77"
+ "\x1d\x43\xe3\x2b\x93\xce\x67\x8b\x59\xf9\x98\xd4\xda\x4f\x3c\x6f\xb0"
+ "\xe8\xa5\x78\x8d\x62\x36\x18\xfe\x08\x1e\x78\xd8\x14\x32\x24\x84\x61"
+ "\x1c\xf3\x37\x63\xc4\xa0\x88\x7b\x74\xcb\x64\xc8\x5c\xba\x05"),
+ RiceDecodingTestInfo(
+ 28,
+ {297968956, 19709657, 259702329, 76998112, 1023176123, 29296013,
+ 1602741145, 393745181, 177326295, 55225536, 75194472},
+ "\xf1\x94\x0a\x87\x6c\x5f\x96\x90\xe3\xab\xf7\xc0\xcb\x2d\xe9\x76\xdb"
+ "\xf8\x59\x63\xc1\x6f\x7c\x99\xe3\x87\x5f\xc7\x04\xde\xb9\x46\x8e\x54"
+ "\xc0\xac\x4a\x03\x0d\x6c\x8f\x00"),
+ RiceDecodingTestInfo(
+ 28,
+ {532220688, 780594691, 436816483, 163436269, 573044456, 1069604,
+ 39629436, 211410997, 227714491, 381562898, 75610008, 196754597,
+ 40310339, 15204118, 99010842},
+ "\x41\x2c\xe4\xfe\x06\xdc\x0d\xbd\x31\xa5\x04\xd5\x6e\xdd\x9b\x43\xb7"
+ "\x3f\x11\x24\x52\x10\x80\x4f\x96\x4b\xd4\x80\x67\xb2\xdd\x52\xc9\x4e"
+ "\x02\xc6\xd7\x60\xde\x06\x92\x52\x1e\xdd\x35\x64\x71\x26\x2c\xfe\xcf"
+ "\x81\x46\xb2\x79\x01"),
+ RiceDecodingTestInfo(
+ 28,
+ {219354713, 389598618, 750263679, 554684211, 87381124, 4523497,
+ 287633354, 801308671, 424169435, 372520475, 277287849},
+ "\xb2\x2c\x26\x3a\xcd\x66\x9c\xdb\x5f\x07\x2e\x6f\xe6\xf9\x21\x10\x52"
+ "\xd5\x94\xf4\x82\x22\x48\xf9\x9d\x24\xf6\xff\x2f\xfc\x6d\x3f\x21\x65"
+ "\x1b\x36\x34\x56\xea\xc4\x21\x00"),
+ };
+
+ // ----- End of Chromium test code ----
+
+ for (auto tdc : TESTING_DATA_CHROMIUM) {
+ // Populate chromium testing data to our native testing data struct.
+ TestingData d;
+
+ d.mRiceParameter = tdc.mRiceParameter; // Populate rice parameter.
+
+ // Populate encoded data from std::string to vector<uint8>.
+ d.mEncoded.resize(tdc.mEncoded.size());
+ memcpy(&d.mEncoded[0], tdc.mEncoded.c_str(), tdc.mEncoded.size());
+
+ // Populate deltas to expected decoded data. The first value would be just
+ // set to an arbitrary value, say 7, to avoid any assumption to the
+ // first value in the implementation.
+ d.mExpectedDecoded.resize(tdc.mDeltas.size() + 1);
+ for (size_t i = 0; i < d.mExpectedDecoded.size(); i++) {
+ if (0 == i) {
+ d.mExpectedDecoded[i] = 7; // "7" is an arbitrary starting value
+ } else {
+ d.mExpectedDecoded[i] = d.mExpectedDecoded[i - 1] + tdc.mDeltas[i - 1];
+ }
+ }
+
+ ASSERT_TRUE(runOneTest(d));
+ }
+}
diff --git a/toolkit/components/url-classifier/tests/gtest/TestSafeBrowsingProtobuf.cpp b/toolkit/components/url-classifier/tests/gtest/TestSafeBrowsingProtobuf.cpp
new file mode 100644
index 0000000000..aa15344324
--- /dev/null
+++ b/toolkit/components/url-classifier/tests/gtest/TestSafeBrowsingProtobuf.cpp
@@ -0,0 +1,30 @@
+/* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+#include "safebrowsing.pb.h"
+
+#include "Common.h"
+
+TEST(UrlClassifierProtobuf, Empty)
+{
+ using namespace mozilla::safebrowsing;
+
+ const std::string CLIENT_ID = "firefox";
+
+ // Construct a simple update request.
+ FetchThreatListUpdatesRequest r;
+ r.set_allocated_client(new ClientInfo());
+ r.mutable_client()->set_client_id(CLIENT_ID);
+
+ // Then serialize.
+ std::string s;
+ r.SerializeToString(&s);
+
+ // De-serialize.
+ FetchThreatListUpdatesRequest r2;
+ r2.ParseFromString(s);
+
+ ASSERT_EQ(r2.client().client_id(), CLIENT_ID);
+}
diff --git a/toolkit/components/url-classifier/tests/gtest/TestSafebrowsingHash.cpp b/toolkit/components/url-classifier/tests/gtest/TestSafebrowsingHash.cpp
new file mode 100644
index 0000000000..2fc4c793f7
--- /dev/null
+++ b/toolkit/components/url-classifier/tests/gtest/TestSafebrowsingHash.cpp
@@ -0,0 +1,58 @@
+/* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+#include "mozilla/EndianUtils.h"
+
+#include "Common.h"
+
+TEST(UrlClassifierHash, ToFromUint32)
+{
+ using namespace mozilla::safebrowsing;
+
+ // typedef SafebrowsingHash<PREFIX_SIZE, PrefixComparator> Prefix;
+ // typedef nsTArray<Prefix> PrefixArray;
+
+ const char PREFIX_RAW[4] = {0x1, 0x2, 0x3, 0x4};
+ uint32_t PREFIX_UINT32;
+ memcpy(&PREFIX_UINT32, PREFIX_RAW, 4);
+
+ Prefix p;
+ p.Assign(nsCString(PREFIX_RAW, 4));
+ ASSERT_EQ(p.ToUint32(), PREFIX_UINT32);
+
+ p.FromUint32(PREFIX_UINT32);
+ ASSERT_EQ(memcmp(PREFIX_RAW, p.buf, 4), 0);
+}
+
+TEST(UrlClassifierHash, Compare)
+{
+ using namespace mozilla;
+ using namespace mozilla::safebrowsing;
+
+ Prefix p1, p2, p3;
+
+ // The order of p1,p2,p3 is "p1 == p3 < p2"
+#if MOZ_LITTLE_ENDIAN()
+ p1.Assign(nsCString("\x01\x00\x00\x00", 4));
+ p2.Assign(nsCString("\x00\x00\x00\x01", 4));
+ p3.Assign(nsCString("\x01\x00\x00\x00", 4));
+#else
+ p1.Assign(nsCString("\x00\x00\x00\x01", 4));
+ p2.Assign(nsCString("\x01\x00\x00\x00", 4));
+ p3.Assign(nsCString("\x00\x00\x00\x01", 4));
+#endif
+
+ // Make sure "p1 == p3 < p2" is true
+ // on both little and big endian machine.
+
+ ASSERT_EQ(p1.Compare(p2), -1);
+ ASSERT_EQ(p1.Compare(p1), 0);
+ ASSERT_EQ(p2.Compare(p1), 1);
+ ASSERT_EQ(p1.Compare(p3), 0);
+
+ ASSERT_TRUE(p1 < p2);
+ ASSERT_TRUE(p1 == p1);
+ ASSERT_TRUE(p1 == p3);
+}
diff --git a/toolkit/components/url-classifier/tests/gtest/TestTable.cpp b/toolkit/components/url-classifier/tests/gtest/TestTable.cpp
new file mode 100644
index 0000000000..796f40b265
--- /dev/null
+++ b/toolkit/components/url-classifier/tests/gtest/TestTable.cpp
@@ -0,0 +1,59 @@
+/* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+#include "nsUrlClassifierDBService.h"
+
+#include "Common.h"
+
+static void TestResponseCode(const char* table, nsresult result) {
+ nsCString tableName(table);
+ ASSERT_EQ(TablesToResponse(tableName), result);
+}
+
+TEST(UrlClassifierTable, ResponseCode)
+{
+ // malware URIs.
+ TestResponseCode("goog-malware-shavar", NS_ERROR_MALWARE_URI);
+ TestResponseCode("test-malware-simple", NS_ERROR_MALWARE_URI);
+ TestResponseCode("goog-phish-shavar,test-malware-simple",
+ NS_ERROR_MALWARE_URI);
+ TestResponseCode(
+ "test-malware-simple,mozstd-track-digest256,mozplugin-block-digest256",
+ NS_ERROR_MALWARE_URI);
+
+ // phish URIs.
+ TestResponseCode("goog-phish-shavar", NS_ERROR_PHISHING_URI);
+ TestResponseCode("test-phish-simple", NS_ERROR_PHISHING_URI);
+ TestResponseCode("test-phish-simple,mozplugin-block-digest256",
+ NS_ERROR_PHISHING_URI);
+ TestResponseCode(
+ "mozstd-track-digest256,test-phish-simple,goog-unwanted-shavar",
+ NS_ERROR_PHISHING_URI);
+
+ // unwanted URIs.
+ TestResponseCode("goog-unwanted-shavar", NS_ERROR_UNWANTED_URI);
+ TestResponseCode("test-unwanted-simple", NS_ERROR_UNWANTED_URI);
+ TestResponseCode("mozplugin-unwanted-digest256,mozfull-track-digest256",
+ NS_ERROR_UNWANTED_URI);
+ TestResponseCode(
+ "test-block-simple,mozfull-track-digest256,test-unwanted-simple",
+ NS_ERROR_UNWANTED_URI);
+
+ // track URIs.
+ TestResponseCode("test-track-simple", NS_ERROR_TRACKING_URI);
+ TestResponseCode("mozstd-track-digest256", NS_ERROR_TRACKING_URI);
+ TestResponseCode("test-block-simple,mozstd-track-digest256",
+ NS_ERROR_TRACKING_URI);
+
+ // block URIs
+ TestResponseCode("test-block-simple", NS_ERROR_BLOCKED_URI);
+ TestResponseCode("mozplugin-block-digest256", NS_ERROR_BLOCKED_URI);
+ TestResponseCode("mozplugin2-block-digest256", NS_ERROR_BLOCKED_URI);
+
+ TestResponseCode("test-trackwhite-simple", NS_OK);
+ TestResponseCode("mozstd-trackwhite-digest256", NS_OK);
+ TestResponseCode("goog-badbinurl-shavar", NS_OK);
+ TestResponseCode("goog-downloadwhite-digest256", NS_OK);
+}
diff --git a/toolkit/components/url-classifier/tests/gtest/TestURLsAndHashing.cpp b/toolkit/components/url-classifier/tests/gtest/TestURLsAndHashing.cpp
new file mode 100644
index 0000000000..0563c6a776
--- /dev/null
+++ b/toolkit/components/url-classifier/tests/gtest/TestURLsAndHashing.cpp
@@ -0,0 +1,71 @@
+/* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+#include "LookupCache.h"
+
+#include "Common.h"
+
+static void VerifyFragments(const nsACString& aURL,
+ const nsTArray<nsCString>& aExpected) {
+ nsTArray<nsCString> fragments;
+ nsresult rv = LookupCache::GetLookupFragments(aURL, &fragments);
+ ASSERT_EQ(rv, NS_OK) << "GetLookupFragments should not fail";
+
+ ASSERT_EQ(aExpected.Length(), fragments.Length())
+ << "Fragments generated from " << aURL.BeginReading()
+ << " are not the same as expected";
+
+ for (const auto& fragment : fragments) {
+ ASSERT_TRUE(aExpected.Contains(fragment))
+ << "Fragments generated from " << aURL.BeginReading()
+ << " are not the same as expected";
+ }
+}
+
+// This testcase references SafeBrowsing spec:
+// https://developers.google.com/safe-browsing/v4/urls-hashing#suffixprefix-expressions
+TEST(URLsAndHashing, FragmentURLWithQuery)
+{
+ const nsLiteralCString url("a.b.c/1/2.html?param=1");
+ nsTArray<nsCString> expect = {
+ "a.b.c/1/2.html?param=1"_ns,
+ "a.b.c/1/2.html"_ns,
+ "a.b.c/"_ns,
+ "a.b.c/1/"_ns,
+ "b.c/1/2.html?param=1"_ns,
+ "b.c/1/2.html"_ns,
+ "b.c/"_ns,
+ "b.c/1/"_ns,
+ };
+
+ VerifyFragments(url, expect);
+}
+
+// This testcase references SafeBrowsing spec:
+// https://developers.google.com/safe-browsing/v4/urls-hashing#suffixprefix-expressions
+TEST(URLsAndHashing, FragmentURLWithoutQuery)
+{
+ const nsLiteralCString url("a.b.c.d.e.f.g/1.html");
+ nsTArray<nsCString> expect = {
+ "a.b.c.d.e.f.g/1.html"_ns, "a.b.c.d.e.f.g/"_ns,
+ "c.d.e.f.g/1.html"_ns, "c.d.e.f.g/"_ns,
+ "d.e.f.g/1.html"_ns, "d.e.f.g/"_ns,
+ "e.f.g/1.html"_ns, "e.f.g/"_ns,
+ "f.g/1.html"_ns, "f.g/"_ns,
+ };
+
+ VerifyFragments(url, expect);
+}
+
+TEST(URLsAndHashing, FragmentURLEndWithoutPath)
+{
+ const nsLiteralCString url("1.2.3.4/?query=string");
+ nsTArray<nsCString> expect = {
+ "1.2.3.4/?query=string"_ns,
+ "1.2.3.4/"_ns,
+ };
+
+ VerifyFragments(url, expect);
+}
diff --git a/toolkit/components/url-classifier/tests/gtest/TestUrlClassifierTableUpdateV4.cpp b/toolkit/components/url-classifier/tests/gtest/TestUrlClassifierTableUpdateV4.cpp
new file mode 100644
index 0000000000..d4dec867bd
--- /dev/null
+++ b/toolkit/components/url-classifier/tests/gtest/TestUrlClassifierTableUpdateV4.cpp
@@ -0,0 +1,785 @@
+/* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+#include "Classifier.h"
+#include "HashStore.h"
+#include "mozilla/Components.h"
+#include "mozilla/Unused.h"
+#include "nsAppDirectoryServiceDefs.h"
+#include "nsIFile.h"
+#include "nsThreadUtils.h"
+#include "string.h"
+#include "LookupCacheV4.h"
+#include "nsUrlClassifierUtils.h"
+
+#include "Common.h"
+
+#define GTEST_SAFEBROWSING_DIR "safebrowsing"_ns
+#define GTEST_TABLE "gtest-malware-proto"_ns
+#define GTEST_PREFIXFILE "gtest-malware-proto.vlpset"_ns
+
+// This function removes common elements of inArray and outArray from
+// outArray. This is used by partial update testcase to ensure partial update
+// data won't contain prefixes we already have.
+static void RemoveIntersection(const _PrefixArray& inArray,
+ _PrefixArray& outArray) {
+ for (uint32_t i = 0; i < inArray.Length(); i++) {
+ int32_t idx = outArray.BinaryIndexOf(inArray[i]);
+ if (idx >= 0) {
+ outArray.RemoveElementAt(idx);
+ }
+ }
+}
+
+// This fucntion removes elements from outArray by index specified in
+// removal array.
+static void RemoveElements(const nsTArray<uint32_t>& removal,
+ _PrefixArray& outArray) {
+ for (int32_t i = removal.Length() - 1; i >= 0; i--) {
+ outArray.RemoveElementAt(removal[i]);
+ }
+}
+
+static void MergeAndSortArray(const _PrefixArray& array1,
+ const _PrefixArray& array2,
+ _PrefixArray& output) {
+ output.Clear();
+ output.AppendElements(array1);
+ output.AppendElements(array2);
+ output.Sort();
+}
+
+static void CalculateSHA256(_PrefixArray& prefixArray, nsCString& sha256) {
+ prefixArray.Sort();
+
+ nsresult rv;
+ nsCOMPtr<nsICryptoHash> cryptoHash =
+ do_CreateInstance(NS_CRYPTO_HASH_CONTRACTID, &rv);
+
+ cryptoHash->Init(nsICryptoHash::SHA256);
+ for (uint32_t i = 0; i < prefixArray.Length(); i++) {
+ const _Prefix& prefix = prefixArray[i];
+ cryptoHash->Update(
+ reinterpret_cast<uint8_t*>(const_cast<char*>(prefix.get())),
+ prefix.Length());
+ }
+ cryptoHash->Finish(false, sha256);
+}
+
+// N: Number of prefixes, MIN/MAX: minimum/maximum prefix size
+// This function will append generated prefixes to outArray.
+static void CreateRandomSortedPrefixArray(uint32_t N, uint32_t MIN,
+ uint32_t MAX,
+ _PrefixArray& outArray) {
+ outArray.SetCapacity(outArray.Length() + N);
+
+ const uint32_t range = (MAX - MIN + 1);
+
+ for (uint32_t i = 0; i < N; i++) {
+ uint32_t prefixSize = (rand() % range) + MIN;
+ _Prefix prefix;
+ prefix.SetLength(prefixSize);
+
+ while (true) {
+ char* dst = prefix.BeginWriting();
+ for (uint32_t j = 0; j < prefixSize; j++) {
+ dst[j] = rand() % 256;
+ }
+
+ if (!outArray.Contains(prefix)) {
+ outArray.AppendElement(prefix);
+ break;
+ }
+ }
+ }
+
+ outArray.Sort();
+}
+
+// N: Number of removal indices, MAX: maximum index
+static void CreateRandomRemovalIndices(uint32_t N, uint32_t MAX,
+ nsTArray<uint32_t>& outArray) {
+ for (uint32_t i = 0; i < N; i++) {
+ uint32_t idx = rand() % MAX;
+ if (!outArray.Contains(idx)) {
+ outArray.InsertElementSorted(idx);
+ }
+ }
+}
+
+// Function to generate TableUpdateV4.
+static void GenerateUpdateData(bool fullUpdate, PrefixStringMap& add,
+ nsTArray<uint32_t>* removal, nsCString* sha256,
+ TableUpdateArray& tableUpdates) {
+ RefPtr<TableUpdateV4> tableUpdate = new TableUpdateV4(GTEST_TABLE);
+ tableUpdate->SetFullUpdate(fullUpdate);
+
+ for (const auto& entry : add) {
+ nsCString* pstring = entry.GetWeak();
+ tableUpdate->NewPrefixes(entry.GetKey(), *pstring);
+ }
+
+ if (removal) {
+ tableUpdate->NewRemovalIndices(removal->Elements(), removal->Length());
+ }
+
+ if (sha256) {
+ std::string stdSHA256;
+ stdSHA256.assign(const_cast<char*>(sha256->BeginReading()),
+ sha256->Length());
+
+ tableUpdate->SetSHA256(stdSHA256);
+ }
+
+ tableUpdates.AppendElement(tableUpdate);
+}
+
+static void VerifyPrefixSet(PrefixStringMap& expected) {
+ // Verify the prefix set is written to disk.
+ nsCOMPtr<nsIFile> file;
+ NS_GetSpecialDirectory(NS_APP_USER_PROFILE_50_DIR, getter_AddRefs(file));
+ file->AppendNative(GTEST_SAFEBROWSING_DIR);
+
+ RefPtr<LookupCacheV4> lookup =
+ new LookupCacheV4(GTEST_TABLE, "test"_ns, file);
+ lookup->Init();
+
+ file->AppendNative(GTEST_PREFIXFILE);
+ lookup->LoadFromFile(file);
+
+ PrefixStringMap prefixesInFile;
+ lookup->GetPrefixes(prefixesInFile);
+
+ for (const auto& entry : expected) {
+ nsCString* expectedPrefix = entry.GetWeak();
+ nsCString* resultPrefix = prefixesInFile.Get(entry.GetKey());
+
+ ASSERT_TRUE(*resultPrefix == *expectedPrefix);
+ }
+}
+
+static void Clear() {
+ nsCOMPtr<nsIFile> file;
+ NS_GetSpecialDirectory(NS_APP_USER_PROFILE_50_DIR, getter_AddRefs(file));
+
+ RefPtr<Classifier> classifier = new Classifier();
+ classifier->Open(*file);
+ classifier->Reset();
+}
+
+static void testUpdateFail(TableUpdateArray& tableUpdates) {
+ nsresult rv = SyncApplyUpdates(tableUpdates);
+ ASSERT_NS_FAILED(rv);
+}
+
+static void testUpdate(TableUpdateArray& tableUpdates,
+ PrefixStringMap& expected) {
+ // Force nsUrlClassifierUtils loading on main thread
+ // because nsIUrlClassifierDBService will not run in advance
+ // in gtest.
+ nsUrlClassifierUtils::GetInstance();
+
+ nsresult rv = SyncApplyUpdates(tableUpdates);
+ ASSERT_TRUE(rv == NS_OK);
+ VerifyPrefixSet(expected);
+}
+
+static void testFullUpdate(PrefixStringMap& add, nsCString* sha256) {
+ TableUpdateArray tableUpdates;
+
+ GenerateUpdateData(true, add, nullptr, sha256, tableUpdates);
+
+ testUpdate(tableUpdates, add);
+}
+
+static void testPartialUpdate(PrefixStringMap& add, nsTArray<uint32_t>* removal,
+ nsCString* sha256, PrefixStringMap& expected) {
+ TableUpdateArray tableUpdates;
+ GenerateUpdateData(false, add, removal, sha256, tableUpdates);
+
+ testUpdate(tableUpdates, expected);
+}
+
+static void testOpenLookupCache() {
+ nsCOMPtr<nsIFile> file;
+ NS_GetSpecialDirectory(NS_APP_USER_PROFILE_50_DIR, getter_AddRefs(file));
+ file->AppendNative(GTEST_SAFEBROWSING_DIR);
+
+ RunTestInNewThread([&]() -> void {
+ RefPtr<LookupCacheV4> cache =
+ new LookupCacheV4(nsCString(GTEST_TABLE), ""_ns, file);
+ nsresult rv = cache->Init();
+ ASSERT_EQ(rv, NS_OK);
+
+ rv = cache->Open();
+ ASSERT_EQ(rv, NS_OK);
+ });
+}
+
+// Tests start from here.
+TEST(UrlClassifierTableUpdateV4, FixLengthPSetFullUpdate)
+{
+ srand(time(NULL));
+
+ _PrefixArray array;
+ PrefixStringMap map;
+ nsCString sha256;
+
+ CreateRandomSortedPrefixArray(5000, 4, 4, array);
+ PrefixArrayToPrefixStringMap(array, map);
+ CalculateSHA256(array, sha256);
+
+ testFullUpdate(map, &sha256);
+
+ Clear();
+}
+
+TEST(UrlClassifierTableUpdateV4, VariableLengthPSetFullUpdate)
+{
+ _PrefixArray array;
+ PrefixStringMap map;
+ nsCString sha256;
+
+ CreateRandomSortedPrefixArray(5000, 5, 32, array);
+ PrefixArrayToPrefixStringMap(array, map);
+ CalculateSHA256(array, sha256);
+
+ testFullUpdate(map, &sha256);
+
+ Clear();
+}
+
+// This test contain both variable length prefix set and fixed-length prefix set
+TEST(UrlClassifierTableUpdateV4, MixedPSetFullUpdate)
+{
+ _PrefixArray array;
+ PrefixStringMap map;
+ nsCString sha256;
+
+ CreateRandomSortedPrefixArray(5000, 4, 4, array);
+ CreateRandomSortedPrefixArray(1000, 5, 32, array);
+ PrefixArrayToPrefixStringMap(array, map);
+ CalculateSHA256(array, sha256);
+
+ testFullUpdate(map, &sha256);
+
+ Clear();
+}
+
+TEST(UrlClassifierTableUpdateV4, PartialUpdateWithRemoval)
+{
+ _PrefixArray fArray;
+
+ // Apply a full update first.
+ {
+ PrefixStringMap fMap;
+ nsCString sha256;
+
+ CreateRandomSortedPrefixArray(10000, 4, 4, fArray);
+ CreateRandomSortedPrefixArray(2000, 5, 32, fArray);
+ PrefixArrayToPrefixStringMap(fArray, fMap);
+ CalculateSHA256(fArray, sha256);
+
+ testFullUpdate(fMap, &sha256);
+ }
+
+ // Apply a partial update with removal.
+ {
+ _PrefixArray pArray, mergedArray;
+ PrefixStringMap pMap, mergedMap;
+ nsCString sha256;
+
+ CreateRandomSortedPrefixArray(5000, 4, 4, pArray);
+ CreateRandomSortedPrefixArray(1000, 5, 32, pArray);
+ RemoveIntersection(fArray, pArray);
+ PrefixArrayToPrefixStringMap(pArray, pMap);
+
+ // Remove 1/5 of elements of original prefix set.
+ nsTArray<uint32_t> removal;
+ CreateRandomRemovalIndices(fArray.Length() / 5, fArray.Length(), removal);
+ RemoveElements(removal, fArray);
+
+ // Calculate the expected prefix map.
+ MergeAndSortArray(fArray, pArray, mergedArray);
+ PrefixArrayToPrefixStringMap(mergedArray, mergedMap);
+ CalculateSHA256(mergedArray, sha256);
+
+ testPartialUpdate(pMap, &removal, &sha256, mergedMap);
+ }
+
+ Clear();
+}
+
+TEST(UrlClassifierTableUpdateV4, PartialUpdateWithoutRemoval)
+{
+ _PrefixArray fArray;
+
+ // Apply a full update first.
+ {
+ PrefixStringMap fMap;
+ nsCString sha256;
+
+ CreateRandomSortedPrefixArray(10000, 4, 4, fArray);
+ CreateRandomSortedPrefixArray(2000, 5, 32, fArray);
+ PrefixArrayToPrefixStringMap(fArray, fMap);
+ CalculateSHA256(fArray, sha256);
+
+ testFullUpdate(fMap, &sha256);
+ }
+
+ // Apply a partial update without removal
+ {
+ _PrefixArray pArray, mergedArray;
+ PrefixStringMap pMap, mergedMap;
+ nsCString sha256;
+
+ CreateRandomSortedPrefixArray(5000, 4, 4, pArray);
+ CreateRandomSortedPrefixArray(1000, 5, 32, pArray);
+ RemoveIntersection(fArray, pArray);
+ PrefixArrayToPrefixStringMap(pArray, pMap);
+
+ // Calculate the expected prefix map.
+ MergeAndSortArray(fArray, pArray, mergedArray);
+ PrefixArrayToPrefixStringMap(mergedArray, mergedMap);
+ CalculateSHA256(mergedArray, sha256);
+
+ testPartialUpdate(pMap, nullptr, &sha256, mergedMap);
+ }
+
+ Clear();
+}
+
+// Expect failure because partial update contains prefix already
+// in old prefix set.
+TEST(UrlClassifierTableUpdateV4, PartialUpdatePrefixAlreadyExist)
+{
+ _PrefixArray fArray;
+
+ // Apply a full update fist.
+ {
+ PrefixStringMap fMap;
+ nsCString sha256;
+
+ CreateRandomSortedPrefixArray(1000, 4, 32, fArray);
+ PrefixArrayToPrefixStringMap(fArray, fMap);
+ CalculateSHA256(fArray, sha256);
+
+ testFullUpdate(fMap, &sha256);
+ }
+
+ // Apply a partial update which contains a prefix in previous full update.
+ // This should cause an update error.
+ {
+ _PrefixArray pArray;
+ PrefixStringMap pMap;
+ TableUpdateArray tableUpdates;
+
+ // Pick one prefix from full update prefix and add it to partial update.
+ // This should result a failure when call ApplyUpdates.
+ pArray.AppendElement(fArray[rand() % fArray.Length()]);
+ CreateRandomSortedPrefixArray(200, 4, 32, pArray);
+ PrefixArrayToPrefixStringMap(pArray, pMap);
+
+ GenerateUpdateData(false, pMap, nullptr, nullptr, tableUpdates);
+ testUpdateFail(tableUpdates);
+ }
+
+ Clear();
+}
+
+// Test apply partial update directly without applying an full update first.
+TEST(UrlClassifierTableUpdateV4, OnlyPartialUpdate)
+{
+ _PrefixArray pArray;
+ PrefixStringMap pMap;
+ nsCString sha256;
+
+ CreateRandomSortedPrefixArray(5000, 4, 4, pArray);
+ CreateRandomSortedPrefixArray(1000, 5, 32, pArray);
+ PrefixArrayToPrefixStringMap(pArray, pMap);
+ CalculateSHA256(pArray, sha256);
+
+ testPartialUpdate(pMap, nullptr, &sha256, pMap);
+
+ Clear();
+}
+
+// Test partial update without any ADD prefixes, only removalIndices.
+TEST(UrlClassifierTableUpdateV4, PartialUpdateOnlyRemoval)
+{
+ _PrefixArray fArray;
+
+ // Apply a full update first.
+ {
+ PrefixStringMap fMap;
+ nsCString sha256;
+
+ CreateRandomSortedPrefixArray(5000, 4, 4, fArray);
+ CreateRandomSortedPrefixArray(1000, 5, 32, fArray);
+ PrefixArrayToPrefixStringMap(fArray, fMap);
+ CalculateSHA256(fArray, sha256);
+
+ testFullUpdate(fMap, &sha256);
+ }
+
+ // Apply a partial update without add prefix, only contain removal indices.
+ {
+ _PrefixArray pArray;
+ PrefixStringMap pMap, mergedMap;
+ nsCString sha256;
+
+ // Remove 1/5 of elements of original prefix set.
+ nsTArray<uint32_t> removal;
+ CreateRandomRemovalIndices(fArray.Length() / 5, fArray.Length(), removal);
+ RemoveElements(removal, fArray);
+
+ PrefixArrayToPrefixStringMap(fArray, mergedMap);
+ CalculateSHA256(fArray, sha256);
+
+ testPartialUpdate(pMap, &removal, &sha256, mergedMap);
+ }
+
+ Clear();
+}
+
+// Test one tableupdate array contains full update and multiple partial updates.
+TEST(UrlClassifierTableUpdateV4, MultipleTableUpdates)
+{
+ _PrefixArray fArray, pArray, mergedArray;
+ PrefixStringMap fMap, pMap, mergedMap;
+ nsCString sha256;
+
+ TableUpdateArray tableUpdates;
+
+ // Generate first full udpate
+ CreateRandomSortedPrefixArray(10000, 4, 4, fArray);
+ CreateRandomSortedPrefixArray(2000, 5, 32, fArray);
+ PrefixArrayToPrefixStringMap(fArray, fMap);
+ CalculateSHA256(fArray, sha256);
+
+ GenerateUpdateData(true, fMap, nullptr, &sha256, tableUpdates);
+
+ // Generate second partial update
+ CreateRandomSortedPrefixArray(3000, 4, 4, pArray);
+ CreateRandomSortedPrefixArray(1000, 5, 32, pArray);
+ RemoveIntersection(fArray, pArray);
+ PrefixArrayToPrefixStringMap(pArray, pMap);
+
+ MergeAndSortArray(fArray, pArray, mergedArray);
+ CalculateSHA256(mergedArray, sha256);
+
+ GenerateUpdateData(false, pMap, nullptr, &sha256, tableUpdates);
+
+ // Generate thrid partial update
+ fArray.AppendElements(pArray);
+ fArray.Sort();
+ pArray.Clear();
+ CreateRandomSortedPrefixArray(3000, 4, 4, pArray);
+ CreateRandomSortedPrefixArray(1000, 5, 32, pArray);
+ RemoveIntersection(fArray, pArray);
+ PrefixArrayToPrefixStringMap(pArray, pMap);
+
+ // Remove 1/5 of elements of original prefix set.
+ nsTArray<uint32_t> removal;
+ CreateRandomRemovalIndices(fArray.Length() / 5, fArray.Length(), removal);
+ RemoveElements(removal, fArray);
+
+ MergeAndSortArray(fArray, pArray, mergedArray);
+ PrefixArrayToPrefixStringMap(mergedArray, mergedMap);
+ CalculateSHA256(mergedArray, sha256);
+
+ GenerateUpdateData(false, pMap, &removal, &sha256, tableUpdates);
+
+ testUpdate(tableUpdates, mergedMap);
+
+ Clear();
+}
+
+// Test apply full update first, and then apply multiple partial updates
+// in one tableupdate array.
+TEST(UrlClassifierTableUpdateV4, MultiplePartialUpdateTableUpdates)
+{
+ _PrefixArray fArray;
+
+ // Apply a full update first
+ {
+ PrefixStringMap fMap;
+ nsCString sha256;
+
+ // Generate first full udpate
+ CreateRandomSortedPrefixArray(10000, 4, 4, fArray);
+ CreateRandomSortedPrefixArray(3000, 5, 32, fArray);
+ PrefixArrayToPrefixStringMap(fArray, fMap);
+ CalculateSHA256(fArray, sha256);
+
+ testFullUpdate(fMap, &sha256);
+ }
+
+ // Apply multiple partial updates in one table update
+ {
+ _PrefixArray pArray, mergedArray;
+ PrefixStringMap pMap, mergedMap;
+ nsCString sha256;
+ nsTArray<uint32_t> removal;
+ TableUpdateArray tableUpdates;
+
+ // Generate first partial update
+ CreateRandomSortedPrefixArray(3000, 4, 4, pArray);
+ CreateRandomSortedPrefixArray(1000, 5, 32, pArray);
+ RemoveIntersection(fArray, pArray);
+ PrefixArrayToPrefixStringMap(pArray, pMap);
+
+ // Remove 1/5 of elements of original prefix set.
+ CreateRandomRemovalIndices(fArray.Length() / 5, fArray.Length(), removal);
+ RemoveElements(removal, fArray);
+
+ MergeAndSortArray(fArray, pArray, mergedArray);
+ CalculateSHA256(mergedArray, sha256);
+
+ GenerateUpdateData(false, pMap, &removal, &sha256, tableUpdates);
+
+ fArray.AppendElements(pArray);
+ fArray.Sort();
+ pArray.Clear();
+ removal.Clear();
+
+ // Generate second partial update.
+ CreateRandomSortedPrefixArray(2000, 4, 4, pArray);
+ CreateRandomSortedPrefixArray(1000, 5, 32, pArray);
+ RemoveIntersection(fArray, pArray);
+ PrefixArrayToPrefixStringMap(pArray, pMap);
+
+ // Remove 1/5 of elements of original prefix set.
+ CreateRandomRemovalIndices(fArray.Length() / 5, fArray.Length(), removal);
+ RemoveElements(removal, fArray);
+
+ MergeAndSortArray(fArray, pArray, mergedArray);
+ PrefixArrayToPrefixStringMap(mergedArray, mergedMap);
+ CalculateSHA256(mergedArray, sha256);
+
+ GenerateUpdateData(false, pMap, &removal, &sha256, tableUpdates);
+
+ testUpdate(tableUpdates, mergedMap);
+ }
+
+ Clear();
+}
+
+// Test removal indices are larger than the original prefix set.
+TEST(UrlClassifierTableUpdateV4, RemovalIndexTooLarge)
+{
+ _PrefixArray fArray;
+
+ // Apply a full update first
+ {
+ PrefixStringMap fMap;
+ nsCString sha256;
+
+ CreateRandomSortedPrefixArray(1000, 4, 32, fArray);
+ PrefixArrayToPrefixStringMap(fArray, fMap);
+ CalculateSHA256(fArray, sha256);
+
+ testFullUpdate(fMap, &sha256);
+ }
+
+ // Apply a partial update with removal indice array larger than
+ // old prefix set(fArray). This should cause an error.
+ {
+ _PrefixArray pArray;
+ PrefixStringMap pMap;
+ nsTArray<uint32_t> removal;
+ TableUpdateArray tableUpdates;
+
+ CreateRandomSortedPrefixArray(200, 4, 32, pArray);
+ RemoveIntersection(fArray, pArray);
+ PrefixArrayToPrefixStringMap(pArray, pMap);
+
+ for (uint32_t i = 0; i < fArray.Length() + 1; i++) {
+ removal.AppendElement(i);
+ }
+
+ GenerateUpdateData(false, pMap, &removal, nullptr, tableUpdates);
+ testUpdateFail(tableUpdates);
+ }
+
+ Clear();
+}
+
+TEST(UrlClassifierTableUpdateV4, ChecksumMismatch)
+{
+ // Apply a full update first
+ {
+ _PrefixArray fArray;
+ PrefixStringMap fMap;
+ nsCString sha256;
+
+ CreateRandomSortedPrefixArray(1000, 4, 32, fArray);
+ PrefixArrayToPrefixStringMap(fArray, fMap);
+ CalculateSHA256(fArray, sha256);
+
+ testFullUpdate(fMap, &sha256);
+ }
+
+ // Apply a partial update with incorrect sha256
+ {
+ _PrefixArray pArray;
+ PrefixStringMap pMap;
+ nsCString sha256;
+ TableUpdateArray tableUpdates;
+
+ CreateRandomSortedPrefixArray(200, 4, 32, pArray);
+ PrefixArrayToPrefixStringMap(pArray, pMap);
+
+ // sha256 should be calculated with both old prefix set and add prefix
+ // set, here we only calculate sha256 with add prefix set to check if
+ // applyUpdate will return failure.
+ CalculateSHA256(pArray, sha256);
+
+ GenerateUpdateData(false, pMap, nullptr, &sha256, tableUpdates);
+ testUpdateFail(tableUpdates);
+ }
+
+ Clear();
+}
+
+TEST(UrlClassifierTableUpdateV4, ApplyUpdateThenLoad)
+{
+ // Apply update with sha256
+ {
+ _PrefixArray fArray;
+ PrefixStringMap fMap;
+ nsCString sha256;
+
+ CreateRandomSortedPrefixArray(1000, 4, 32, fArray);
+ PrefixArrayToPrefixStringMap(fArray, fMap);
+ CalculateSHA256(fArray, sha256);
+
+ testFullUpdate(fMap, &sha256);
+
+ // Open lookup cache will load prefix set and verify the sha256
+ testOpenLookupCache();
+ }
+
+ Clear();
+
+ // Apply update without sha256
+ {
+ _PrefixArray fArray;
+ PrefixStringMap fMap;
+
+ CreateRandomSortedPrefixArray(1000, 4, 32, fArray);
+ PrefixArrayToPrefixStringMap(fArray, fMap);
+
+ testFullUpdate(fMap, nullptr);
+
+ testOpenLookupCache();
+ }
+
+ Clear();
+}
+
+// This test is used to avoid an eror from nsICryptoHash
+TEST(UrlClassifierTableUpdateV4, ApplyUpdateWithFixedChecksum)
+{
+ _PrefixArray fArray = {_Prefix("enus"),
+ _Prefix("apollo"),
+ _Prefix("mars"),
+ _Prefix("Hecatonchires cyclopes"),
+ _Prefix("vesta"),
+ _Prefix("neptunus"),
+ _Prefix("jupiter"),
+ _Prefix("diana"),
+ _Prefix("minerva"),
+ _Prefix("ceres"),
+ _Prefix("Aidos,Adephagia,Adikia,Aletheia"),
+ _Prefix("hecatonchires"),
+ _Prefix("alcyoneus"),
+ _Prefix("hades"),
+ _Prefix("vulcanus"),
+ _Prefix("juno"),
+ _Prefix("mercury"),
+ _Prefix("Stheno, Euryale and Medusa")};
+ fArray.Sort();
+
+ PrefixStringMap fMap;
+ PrefixArrayToPrefixStringMap(fArray, fMap);
+
+ nsCString sha256(
+ "\xae\x18\x94\xd7\xd0\x83\x5f\xc1"
+ "\x58\x59\x5c\x2c\x72\xb9\x6e\x5e"
+ "\xf4\xe8\x0a\x6b\xff\x5e\x6b\x81"
+ "\x65\x34\x06\x16\x06\x59\xa0\x67");
+
+ testFullUpdate(fMap, &sha256);
+
+ // Open lookup cache will load prefix set and verify the sha256
+ testOpenLookupCache();
+
+ Clear();
+}
+
+// This test ensure that an empty update works correctly. Empty update
+// should be skipped by CheckValidUpdate in Classifier::UpdateTableV4.
+TEST(UrlClassifierTableUpdateV4, EmptyUpdate)
+{
+ PrefixStringMap emptyAddition;
+ nsTArray<uint32_t> emptyRemoval;
+
+ _PrefixArray array;
+ PrefixStringMap map;
+ nsCString sha256;
+
+ CalculateSHA256(array, sha256);
+
+ // Test apply empty full/partial update before we already
+ // have data in DB.
+ testFullUpdate(emptyAddition, &sha256);
+ testPartialUpdate(emptyAddition, &emptyRemoval, &sha256, map);
+
+ // Apply an full update.
+ CreateRandomSortedPrefixArray(100, 4, 4, array);
+ CreateRandomSortedPrefixArray(10, 5, 32, array);
+ PrefixArrayToPrefixStringMap(array, map);
+ CalculateSHA256(array, sha256);
+
+ testFullUpdate(map, &sha256);
+
+ // Test apply empty full/partial update when we already
+ // have data in DB
+ testPartialUpdate(emptyAddition, &emptyRemoval, &sha256, map);
+ testFullUpdate(emptyAddition, &sha256);
+
+ Clear();
+}
+
+// This test ensure applying an empty update directly through update algorithm
+// should be correct.
+TEST(UrlClassifierTableUpdateV4, EmptyUpdate2)
+{
+ // Setup LookupCache with initial data
+ _PrefixArray array;
+ CreateRandomSortedPrefixArray(100, 4, 4, array);
+ CreateRandomSortedPrefixArray(10, 5, 32, array);
+ RefPtr<LookupCacheV4> cache = SetupLookupCache<LookupCacheV4>(array);
+
+ // Setup TableUpdate object with only sha256 from previous update(initial
+ // data).
+ nsCString sha256;
+ CalculateSHA256(array, sha256);
+ std::string stdSHA256;
+ stdSHA256.assign(const_cast<char*>(sha256.BeginReading()), sha256.Length());
+
+ RefPtr<TableUpdateV4> tableUpdate = new TableUpdateV4(GTEST_TABLE);
+ tableUpdate->SetSHA256(stdSHA256);
+
+ // Apply update directly through LookupCache interface
+ PrefixStringMap input, output;
+ PrefixArrayToPrefixStringMap(array, input);
+ nsresult rv = cache->ApplyUpdate(tableUpdate.get(), input, output);
+
+ ASSERT_TRUE(rv == NS_OK);
+
+ Clear();
+}
diff --git a/toolkit/components/url-classifier/tests/gtest/TestUrlClassifierUtils.cpp b/toolkit/components/url-classifier/tests/gtest/TestUrlClassifierUtils.cpp
new file mode 100644
index 0000000000..354a47f07d
--- /dev/null
+++ b/toolkit/components/url-classifier/tests/gtest/TestUrlClassifierUtils.cpp
@@ -0,0 +1,254 @@
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+#include <ctype.h>
+#include <stdio.h>
+
+#include <mozilla/RefPtr.h>
+#include "nsEscape.h"
+#include "nsString.h"
+#include "nsUrlClassifierUtils.h"
+#include "stdlib.h"
+
+#include "Common.h"
+
+static char int_to_hex_digit(int32_t i) {
+ NS_ASSERTION((i >= 0) && (i <= 15), "int too big in int_to_hex_digit");
+ return static_cast<char>(((i < 10) ? (i + '0') : ((i - 10) + 'A')));
+}
+
+static void CheckEquals(nsCString& expected, nsCString& actual) {
+ ASSERT_TRUE((expected).Equals((actual)))
+ << "Expected:" << expected.get() << ", Actual:" << actual.get();
+}
+
+static void TestUnescapeHelper(const char* in, const char* expected) {
+ nsCString out, strIn(in), strExp(expected);
+
+ NS_UnescapeURL(strIn.get(), strIn.Length(), esc_AlwaysCopy, out);
+ CheckEquals(strExp, out);
+}
+
+static void TestEncodeHelper(const char* in, const char* expected) {
+ nsCString out, strIn(in), strExp(expected);
+ nsUrlClassifierUtils::GetInstance()->SpecialEncode(strIn, true, out);
+ CheckEquals(strExp, out);
+}
+
+static void TestCanonicalizeHelper(const char* in, const char* expected) {
+ nsCString out, strIn(in), strExp(expected);
+ nsUrlClassifierUtils::GetInstance()->CanonicalizePath(strIn, out);
+ CheckEquals(strExp, out);
+}
+
+static void TestCanonicalNumHelper(const char* in, uint32_t bytes,
+ bool allowOctal, const char* expected) {
+ nsCString out, strIn(in), strExp(expected);
+ nsUrlClassifierUtils::GetInstance()->CanonicalNum(strIn, bytes, allowOctal,
+ out);
+ CheckEquals(strExp, out);
+}
+
+void TestHostnameHelper(const char* in, const char* expected) {
+ nsCString out, strIn(in), strExp(expected);
+ nsUrlClassifierUtils::GetInstance()->CanonicalizeHostname(strIn, out);
+ CheckEquals(strExp, out);
+}
+
+// Make sure Unescape from nsEncode.h's unescape does what the server does.
+TEST(UrlClassifierUtils, Unescape)
+{
+ // test empty string
+ TestUnescapeHelper("\0", "\0");
+
+ // Test docoding of all characters.
+ nsCString allCharsEncoded, allCharsEncodedLowercase, allCharsAsString;
+ for (int32_t i = 1; i < 256; ++i) {
+ allCharsEncoded.Append('%');
+ allCharsEncoded.Append(int_to_hex_digit(i / 16));
+ allCharsEncoded.Append((int_to_hex_digit(i % 16)));
+
+ allCharsEncodedLowercase.Append('%');
+ allCharsEncodedLowercase.Append(tolower(int_to_hex_digit(i / 16)));
+ allCharsEncodedLowercase.Append(tolower(int_to_hex_digit(i % 16)));
+
+ allCharsAsString.Append(static_cast<char>(i));
+ }
+
+ nsCString out;
+ NS_UnescapeURL(allCharsEncoded.get(), allCharsEncoded.Length(),
+ esc_AlwaysCopy, out);
+
+ CheckEquals(allCharsAsString, out);
+
+ out.Truncate();
+ NS_UnescapeURL(allCharsEncodedLowercase.get(),
+ allCharsEncodedLowercase.Length(), esc_AlwaysCopy, out);
+ CheckEquals(allCharsAsString, out);
+
+ // Test %-related edge cases
+ TestUnescapeHelper("%", "%");
+ TestUnescapeHelper("%xx", "%xx");
+ TestUnescapeHelper("%%", "%%");
+ TestUnescapeHelper("%%%", "%%%");
+ TestUnescapeHelper("%%%%", "%%%%");
+ TestUnescapeHelper("%1", "%1");
+ TestUnescapeHelper("%1z", "%1z");
+ TestUnescapeHelper("a%1z", "a%1z");
+ TestUnescapeHelper("abc%d%e%fg%hij%klmno%", "abc%d%e%fg%hij%klmno%");
+
+ // A few more tests
+ TestUnescapeHelper("%25", "%");
+ TestUnescapeHelper("%25%32%35", "%25");
+}
+
+TEST(UrlClassifierUtils, Enc)
+{
+ // Test empty string
+ TestEncodeHelper("", "");
+
+ // Test that all characters we shouldn't encode ([33-34],36,[38,126]) are not.
+ nsCString noenc;
+ for (int32_t i = 33; i < 127; i++) {
+ if (i != 35 && i != 37) { // skip %
+ noenc.Append(static_cast<char>(i));
+ }
+ }
+ nsCString out;
+ nsUrlClassifierUtils::GetInstance()->SpecialEncode(noenc, false, out);
+ CheckEquals(noenc, out);
+
+ // Test that all the chars that we should encode [0,32],35, 37,[127,255] are
+ nsCString yesAsString, yesExpectedString;
+ for (int32_t i = 1; i < 256; i++) {
+ if (i < 33 || i == 35 || i == 37 || i > 126) {
+ yesAsString.Append(static_cast<char>(i));
+ yesExpectedString.Append('%');
+ yesExpectedString.Append(int_to_hex_digit(i / 16));
+ yesExpectedString.Append(int_to_hex_digit(i % 16));
+ }
+ }
+
+ out.Truncate();
+ nsUrlClassifierUtils::GetInstance()->SpecialEncode(yesAsString, false, out);
+ CheckEquals(yesExpectedString, out);
+
+ TestEncodeHelper("blah//blah", "blah/blah");
+}
+
+TEST(UrlClassifierUtils, Canonicalize)
+{
+ // Test repeated %-decoding. Note: %25 --> %, %32 --> 2, %35 --> 5
+ TestCanonicalizeHelper("%25", "%25");
+ TestCanonicalizeHelper("%25%32%35", "%25");
+ TestCanonicalizeHelper("asdf%25%32%35asd", "asdf%25asd");
+ TestCanonicalizeHelper("%%%25%32%35asd%%", "%25%25%25asd%25%25");
+ TestCanonicalizeHelper("%25%32%35%25%32%35%25%32%35", "%25%25%25");
+ TestCanonicalizeHelper("%25", "%25");
+ TestCanonicalizeHelper(
+ "%257Ea%2521b%2540c%2523d%2524e%25f%255E00%252611%252A22%252833%252944_"
+ "55%252B",
+ "~a!b@c%23d$e%25f^00&11*22(33)44_55+");
+
+ TestCanonicalizeHelper("", "");
+ TestCanonicalizeHelper(
+ "%31%36%38%2e%31%38%38%2e%39%39%2e%32%36/%2E%73%65%63%75%72%65/"
+ "%77%77%77%2E%65%62%61%79%2E%63%6F%6D/",
+ "168.188.99.26/.secure/www.ebay.com/");
+ TestCanonicalizeHelper(
+ "195.127.0.11/uploads/%20%20%20%20/.verify/"
+ ".eBaysecure=updateuserdataxplimnbqmn-xplmvalidateinfoswqpcmlx=hgplmcx/",
+ "195.127.0.11/uploads/%20%20%20%20/.verify/"
+ ".eBaysecure=updateuserdataxplimnbqmn-xplmvalidateinfoswqpcmlx=hgplmcx/");
+ // Added in bug 489455. %00 should no longer be changed to %01.
+ TestCanonicalizeHelper("%00", "%00");
+}
+
+void TestParseIPAddressHelper(const char* in, const char* expected) {
+ nsCString out, strIn(in), strExp(expected);
+ nsUrlClassifierUtils::GetInstance()->ParseIPAddress(strIn, out);
+ CheckEquals(strExp, out);
+}
+
+TEST(UrlClassifierUtils, ParseIPAddress)
+{
+ TestParseIPAddressHelper("123.123.0.0.1", "");
+ TestParseIPAddressHelper("255.0.0.1", "255.0.0.1");
+ TestParseIPAddressHelper("12.0x12.01234", "12.18.2.156");
+ TestParseIPAddressHelper("276.2.3", "20.2.0.3");
+ TestParseIPAddressHelper("012.034.01.055", "10.28.1.45");
+ TestParseIPAddressHelper("0x12.0x43.0x44.0x01", "18.67.68.1");
+ TestParseIPAddressHelper("167838211", "10.1.2.3");
+ TestParseIPAddressHelper("3279880203", "195.127.0.11");
+ TestParseIPAddressHelper("0x12434401", "18.67.68.1");
+ TestParseIPAddressHelper("413960661", "24.172.137.213");
+ TestParseIPAddressHelper("03053104725", "24.172.137.213");
+ TestParseIPAddressHelper("030.0254.0x89d5", "24.172.137.213");
+ TestParseIPAddressHelper("1.234.4.0377", "1.234.4.255");
+ TestParseIPAddressHelper("1.2.3.00x0", "");
+ TestParseIPAddressHelper("10.192.95.89 xy", "10.192.95.89");
+ TestParseIPAddressHelper("10.192.95.89 xyz", "");
+ TestParseIPAddressHelper("1.2.3.0x0", "1.2.3.0");
+ TestParseIPAddressHelper("1.2.3.4", "1.2.3.4");
+}
+
+TEST(UrlClassifierUtils, CanonicalNum)
+{
+ TestCanonicalNumHelper("", 1, true, "");
+ TestCanonicalNumHelper("10", 0, true, "");
+ TestCanonicalNumHelper("45", 1, true, "45");
+ TestCanonicalNumHelper("0x10", 1, true, "16");
+ TestCanonicalNumHelper("367", 2, true, "1.111");
+ TestCanonicalNumHelper("012345", 3, true, "0.20.229");
+ TestCanonicalNumHelper("0173", 1, true, "123");
+ TestCanonicalNumHelper("09", 1, false, "9");
+ TestCanonicalNumHelper("0x120x34", 2, true, "");
+ TestCanonicalNumHelper("0x12fc", 2, true, "18.252");
+ TestCanonicalNumHelper("3279880203", 4, true, "195.127.0.11");
+ TestCanonicalNumHelper("0x0000059", 1, true, "89");
+ TestCanonicalNumHelper("0x00000059", 1, true, "89");
+ TestCanonicalNumHelper("0x0000067", 1, true, "103");
+}
+
+TEST(UrlClassifierUtils, Hostname)
+{
+ TestHostnameHelper("abcd123;[]", "abcd123;[]");
+ TestHostnameHelper("abc.123", "abc.123");
+ TestHostnameHelper("abc..123", "abc.123");
+ TestHostnameHelper("trailing.", "trailing");
+ TestHostnameHelper("i love trailing dots....", "i%20love%20trailing%20dots");
+ TestHostnameHelper(".leading", "leading");
+ TestHostnameHelper("..leading", "leading");
+ TestHostnameHelper(".dots.", "dots");
+ TestHostnameHelper(".both.", "both");
+ TestHostnameHelper(".both..", "both");
+ TestHostnameHelper("..both.", "both");
+ TestHostnameHelper("..both..", "both");
+ TestHostnameHelper("..a.b.c.d..", "a.b.c.d");
+ TestHostnameHelper("..127.0.0.1..", "127.0.0.1");
+ TestHostnameHelper("AB CD 12354", "ab%20cd%2012354");
+ TestHostnameHelper("\1\2\3\4\112\177", "%01%02%03%04j%7F");
+ TestHostnameHelper("<>.AS/-+", "<>.as/-+");
+ // Added in bug 489455. %00 should no longer be changed to %01.
+ TestHostnameHelper("%00", "%00");
+}
+
+TEST(UrlClassifierUtils, LongHostname)
+{
+ static const int kTestSize = 1024 * 150;
+ char* str = static_cast<char*>(malloc(kTestSize + 1));
+ memset(str, 'x', kTestSize);
+ str[kTestSize] = '\0';
+
+ nsAutoCString out;
+ nsDependentCString in(str);
+ PRIntervalTime clockStart = PR_IntervalNow();
+ nsUrlClassifierUtils::GetInstance()->CanonicalizeHostname(in, out);
+ PRIntervalTime clockEnd = PR_IntervalNow();
+
+ CheckEquals(in, out);
+
+ printf("CanonicalizeHostname on long string (%dms)\n",
+ PR_IntervalToMilliseconds(clockEnd - clockStart));
+}
diff --git a/toolkit/components/url-classifier/tests/gtest/TestVariableLengthPrefixSet.cpp b/toolkit/components/url-classifier/tests/gtest/TestVariableLengthPrefixSet.cpp
new file mode 100644
index 0000000000..6eb36a12d2
--- /dev/null
+++ b/toolkit/components/url-classifier/tests/gtest/TestVariableLengthPrefixSet.cpp
@@ -0,0 +1,486 @@
+/* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+#include "LookupCacheV4.h"
+#include "mozilla/Preferences.h"
+#include <mozilla/RefPtr.h>
+#include "nsAppDirectoryServiceDefs.h"
+#include "nsClassHashtable.h"
+#include "nsString.h"
+#include "VariableLengthPrefixSet.h"
+
+#include "Common.h"
+
+// Create fullhash by appending random characters.
+static nsCString CreateFullHash(const nsACString& in) {
+ nsCString out(in);
+ out.SetLength(32);
+ for (size_t i = in.Length(); i < 32; i++) {
+ out.SetCharAt(char(rand() % 256), i);
+ }
+
+ return out;
+}
+
+// This function generate N prefixes with size between MIN and MAX.
+// The output array will not be cleared, random result will append to it
+static void RandomPrefixes(uint32_t N, uint32_t MIN, uint32_t MAX,
+ _PrefixArray& array) {
+ array.SetCapacity(array.Length() + N);
+
+ uint32_t range = (MAX - MIN + 1);
+
+ for (uint32_t i = 0; i < N; i++) {
+ uint32_t prefixSize = (rand() % range) + MIN;
+ _Prefix prefix;
+ prefix.SetLength(prefixSize);
+
+ bool added = false;
+ while (!added) {
+ char* dst = prefix.BeginWriting();
+ for (uint32_t j = 0; j < prefixSize; j++) {
+ dst[j] = rand() % 256;
+ }
+
+ if (!array.Contains(prefix)) {
+ array.AppendElement(prefix);
+ added = true;
+ }
+ }
+ }
+}
+
+// This test loops through all the prefixes and converts each prefix to
+// fullhash by appending random characters, each converted fullhash
+// should at least match its original length in the prefixSet.
+static void DoExpectedLookup(LookupCacheV4* cache, _PrefixArray& array) {
+ uint32_t matchLength = 0;
+ for (uint32_t i = 0; i < array.Length(); i++) {
+ const nsCString& prefix = array[i];
+ Completion complete;
+ complete.Assign(CreateFullHash(prefix));
+
+ // Find match for prefix-generated full hash
+ bool has, confirmed;
+ cache->Has(complete, &has, &matchLength, &confirmed);
+ MOZ_ASSERT(matchLength != 0);
+
+ if (matchLength != prefix.Length()) {
+ // Return match size is not the same as prefix size.
+ // In this case it could be because the generated fullhash match other
+ // prefixes, check if this prefix exist.
+ bool found = false;
+
+ for (uint32_t j = 0; j < array.Length(); j++) {
+ if (array[j].Length() != matchLength) {
+ continue;
+ }
+
+ if (0 == memcmp(complete.buf, array[j].BeginReading(), matchLength)) {
+ found = true;
+ break;
+ }
+ }
+ ASSERT_TRUE(found);
+ }
+ }
+}
+
+static void DoRandomLookup(LookupCacheV4* cache, uint32_t N,
+ _PrefixArray& array) {
+ for (uint32_t i = 0; i < N; i++) {
+ // Random 32-bytes test fullhash
+ char buf[32];
+ for (uint32_t j = 0; j < 32; j++) {
+ buf[j] = (char)(rand() % 256);
+ }
+
+ // Get the expected result.
+ nsTArray<uint32_t> expected;
+ for (uint32_t j = 0; j < array.Length(); j++) {
+ const nsACString& str = array[j];
+ if (0 == memcmp(buf, str.BeginReading(), str.Length())) {
+ expected.AppendElement(str.Length());
+ }
+ }
+
+ Completion complete;
+ complete.Assign(nsDependentCSubstring(buf, 32));
+ bool has, confirmed;
+ uint32_t matchLength = 0;
+ cache->Has(complete, &has, &matchLength, &confirmed);
+
+ ASSERT_TRUE(expected.IsEmpty() ? !matchLength
+ : expected.Contains(matchLength));
+ }
+}
+
+static already_AddRefed<LookupCacheV4> SetupLookupCache(
+ const nsACString& aName) {
+ nsCOMPtr<nsIFile> rootDir;
+ NS_GetSpecialDirectory(NS_APP_USER_PROFILE_50_DIR, getter_AddRefs(rootDir));
+
+ nsAutoCString provider("test");
+ RefPtr<LookupCacheV4> lookup = new LookupCacheV4(aName, provider, rootDir);
+ lookup->Init();
+
+ return lookup.forget();
+}
+
+class UrlClassifierPrefixSetTest : public ::testing::TestWithParam<uint32_t> {
+ protected:
+ void SetUp() override {
+ // max_array_size to 0 means we are testing delta algorithm here.
+ static const char prefKey[] =
+ "browser.safebrowsing.prefixset.max_array_size";
+ mozilla::Preferences::SetUint(prefKey, GetParam());
+
+ mCache = SetupLookupCache("test"_ns);
+ }
+
+ void TearDown() override {
+ mCache = nullptr;
+ mArray.Clear();
+ mMap.Clear();
+ }
+
+ nsresult SetupPrefixes(_PrefixArray&& aArray) {
+ mArray = std::move(aArray);
+ PrefixArrayToPrefixStringMap(mArray, mMap);
+ return mCache->Build(mMap);
+ }
+
+ void SetupPrefixesAndVerify(_PrefixArray& aArray) {
+ mArray = aArray.Clone();
+ PrefixArrayToPrefixStringMap(mArray, mMap);
+
+ ASSERT_NS_SUCCEEDED(mCache->Build(mMap));
+ Verify();
+ }
+
+ void SetupPrefixesAndVerify(_PrefixArray&& aArray) {
+ nsresult rv = SetupPrefixes(std::move(aArray));
+ ASSERT_NS_SUCCEEDED(rv);
+ Verify();
+ }
+
+ void SetupRandomPrefixesAndVerify(uint32_t N, uint32_t MIN, uint32_t MAX) {
+ srand(time(nullptr));
+ RandomPrefixes(N, MIN, MAX, mArray);
+ PrefixArrayToPrefixStringMap(mArray, mMap);
+
+ ASSERT_NS_SUCCEEDED(mCache->Build(mMap));
+ Verify();
+ }
+
+ void Verify() {
+ DoExpectedLookup(mCache, mArray);
+ DoRandomLookup(mCache, 1000, mArray);
+ CheckContent(mCache, mArray);
+ }
+
+ RefPtr<LookupCacheV4> mCache;
+ _PrefixArray mArray;
+ PrefixStringMap mMap;
+};
+
+// Test setting prefix set with only 4-bytes prefixes
+TEST_P(UrlClassifierPrefixSetTest, FixedLengthSet) {
+ SetupPrefixesAndVerify({
+ _Prefix("alph"),
+ _Prefix("brav"),
+ _Prefix("char"),
+ _Prefix("delt"),
+ _Prefix("echo"),
+ _Prefix("foxt"),
+ });
+}
+
+TEST_P(UrlClassifierPrefixSetTest, FixedLengthRandomSet) {
+ SetupRandomPrefixesAndVerify(1500, 4, 4);
+}
+
+TEST_P(UrlClassifierPrefixSetTest, FixedLengthRandomLargeSet) {
+ SetupRandomPrefixesAndVerify(15000, 4, 4);
+}
+
+TEST_P(UrlClassifierPrefixSetTest, FixedLengthTinySet) {
+ SetupPrefixesAndVerify({
+ _Prefix("tiny"),
+ });
+}
+
+// Test setting prefix set with only 5~32 bytes prefixes
+TEST_P(UrlClassifierPrefixSetTest, VariableLengthSet) {
+ SetupPrefixesAndVerify(
+ {_Prefix("bravo"), _Prefix("charlie"), _Prefix("delta"),
+ _Prefix("EchoEchoEchoEchoEcho"), _Prefix("foxtrot"),
+ _Prefix("GolfGolfGolfGolfGolfGolfGolfGolf"), _Prefix("hotel"),
+ _Prefix("november"), _Prefix("oscar"), _Prefix("quebec"),
+ _Prefix("romeo"), _Prefix("sierrasierrasierrasierrasierra"),
+ _Prefix("Tango"), _Prefix("whiskey"), _Prefix("yankee"),
+ _Prefix("ZuluZuluZuluZulu")});
+}
+
+TEST_P(UrlClassifierPrefixSetTest, VariableLengthRandomSet) {
+ SetupRandomPrefixesAndVerify(1500, 5, 32);
+}
+
+// Test setting prefix set with both 4-bytes prefixes and 5~32 bytes prefixes
+TEST_P(UrlClassifierPrefixSetTest, MixedPrefixSet) {
+ SetupPrefixesAndVerify(
+ {_Prefix("enus"), _Prefix("apollo"), _Prefix("mars"),
+ _Prefix("Hecatonchires cyclopes"), _Prefix("vesta"), _Prefix("neptunus"),
+ _Prefix("jupiter"), _Prefix("diana"), _Prefix("minerva"),
+ _Prefix("ceres"), _Prefix("Aidos,Adephagia,Adikia,Aletheia"),
+ _Prefix("hecatonchires"), _Prefix("alcyoneus"), _Prefix("hades"),
+ _Prefix("vulcanus"), _Prefix("juno"), _Prefix("mercury"),
+ _Prefix("Stheno, Euryale and Medusa")});
+}
+
+TEST_P(UrlClassifierPrefixSetTest, MixedRandomPrefixSet) {
+ SetupRandomPrefixesAndVerify(1500, 4, 32);
+}
+
+// Test resetting prefix set
+TEST_P(UrlClassifierPrefixSetTest, ResetPrefix) {
+ // Base prefix set
+ _PrefixArray oldArray = {
+ _Prefix("Iceland"), _Prefix("Peru"), _Prefix("Mexico"),
+ _Prefix("Australia"), _Prefix("Japan"), _Prefix("Egypt"),
+ _Prefix("America"), _Prefix("Finland"), _Prefix("Germany"),
+ _Prefix("Italy"), _Prefix("France"), _Prefix("Taiwan"),
+ };
+ SetupPrefixesAndVerify(oldArray);
+
+ // New prefix set
+ _PrefixArray newArray = {
+ _Prefix("Pikachu"), _Prefix("Bulbasaur"), _Prefix("Charmander"),
+ _Prefix("Blastoise"), _Prefix("Pidgey"), _Prefix("Mewtwo"),
+ _Prefix("Jigglypuff"), _Prefix("Persian"), _Prefix("Tentacool"),
+ _Prefix("Onix"), _Prefix("Eevee"), _Prefix("Jynx"),
+ };
+ SetupPrefixesAndVerify(newArray);
+
+ // Should not match any of the first prefix set
+ uint32_t matchLength = 0;
+ for (uint32_t i = 0; i < oldArray.Length(); i++) {
+ Completion complete;
+ complete.Assign(CreateFullHash(oldArray[i]));
+
+ // Find match for prefix-generated full hash
+ bool has, confirmed;
+ mCache->Has(complete, &has, &matchLength, &confirmed);
+
+ ASSERT_TRUE(matchLength == 0);
+ }
+}
+
+// Test only set one 4-bytes prefix and one full-length prefix
+TEST_P(UrlClassifierPrefixSetTest, TinyPrefixSet) {
+ SetupPrefixesAndVerify({
+ _Prefix("AAAA"),
+ _Prefix("11112222333344445555666677778888"),
+ });
+}
+
+// Test empty prefix set and IsEmpty function
+TEST_P(UrlClassifierPrefixSetTest, EmptyFixedPrefixSet) {
+ ASSERT_TRUE(mCache->IsEmpty());
+
+ SetupPrefixesAndVerify({});
+
+ // Insert an 4-bytes prefix, then IsEmpty should return false
+ SetupPrefixesAndVerify({_Prefix("test")});
+
+ ASSERT_TRUE(!mCache->IsEmpty());
+}
+
+TEST_P(UrlClassifierPrefixSetTest, EmptyVariableLengthPrefixSet) {
+ ASSERT_TRUE(mCache->IsEmpty());
+
+ SetupPrefixesAndVerify({});
+
+ // Insert an 5~32 bytes prefix, then IsEmpty should return false
+ SetupPrefixesAndVerify({_Prefix("test variable length")});
+
+ ASSERT_TRUE(!mCache->IsEmpty());
+}
+
+// Test prefix size should only between 4~32 bytes
+TEST_P(UrlClassifierPrefixSetTest, MinMaxPrefixSet) {
+ // Test prefix set between 4-32 bytes, should success
+ SetupPrefixesAndVerify({_Prefix("1234"), _Prefix("ABCDEFGHIJKKMNOP"),
+ _Prefix("1aaa2bbb3ccc4ddd5eee6fff7ggg8hhh")});
+
+ // Prefix size less than 4-bytes should fail
+ nsresult rv = SetupPrefixes({_Prefix("123")});
+ ASSERT_NS_FAILED(rv);
+
+ // Prefix size greater than 32-bytes should fail
+ rv = SetupPrefixes({_Prefix("1aaa2bbb3ccc4ddd5eee6fff7ggg8hhh9")});
+ ASSERT_NS_FAILED(rv);
+}
+
+// Test save then load prefix set with only 4-bytes prefixes
+TEST_P(UrlClassifierPrefixSetTest, LoadSaveFixedLengthPrefixSet) {
+ nsCOMPtr<nsIFile> file;
+ _PrefixArray array;
+ PrefixStringMap map;
+
+ // Save
+ {
+ RefPtr<LookupCacheV4> save = SetupLookupCache("test-save"_ns);
+
+ RandomPrefixes(10000, 4, 4, array);
+
+ PrefixArrayToPrefixStringMap(array, map);
+ save->Build(map);
+
+ DoExpectedLookup(save, array);
+ DoRandomLookup(save, 1000, array);
+ CheckContent(save, array);
+
+ NS_GetSpecialDirectory(NS_APP_USER_PROFILE_50_DIR, getter_AddRefs(file));
+ file->Append(u"test.vlpset"_ns);
+ save->StoreToFile(file);
+ }
+
+ // Load
+ {
+ RefPtr<LookupCacheV4> load = SetupLookupCache("test-load"_ns);
+ load->LoadFromFile(file);
+
+ DoExpectedLookup(load, array);
+ DoRandomLookup(load, 1000, array);
+ CheckContent(load, array);
+ }
+
+ file->Remove(false);
+}
+
+// Test save then load prefix set with only 5~32 bytes prefixes
+TEST_P(UrlClassifierPrefixSetTest, LoadSaveVariableLengthPrefixSet) {
+ nsCOMPtr<nsIFile> file;
+ _PrefixArray array;
+ PrefixStringMap map;
+
+ // Save
+ {
+ RefPtr<LookupCacheV4> save = SetupLookupCache("test-save"_ns);
+
+ RandomPrefixes(10000, 5, 32, array);
+
+ PrefixArrayToPrefixStringMap(array, map);
+ save->Build(map);
+
+ DoExpectedLookup(save, array);
+ DoRandomLookup(save, 1000, array);
+ CheckContent(save, array);
+
+ NS_GetSpecialDirectory(NS_APP_USER_PROFILE_50_DIR, getter_AddRefs(file));
+ file->Append(u"test.vlpset"_ns);
+ save->StoreToFile(file);
+ }
+
+ // Load
+ {
+ RefPtr<LookupCacheV4> load = SetupLookupCache("test-load"_ns);
+ load->LoadFromFile(file);
+
+ DoExpectedLookup(load, array);
+ DoRandomLookup(load, 1000, array);
+ CheckContent(load, array);
+ }
+
+ file->Remove(false);
+}
+
+// Test save then load prefix with both 4 bytes prefixes and 5~32 bytes prefixes
+TEST_P(UrlClassifierPrefixSetTest, LoadSavePrefixSet) {
+ nsCOMPtr<nsIFile> file;
+ _PrefixArray array;
+ PrefixStringMap map;
+
+ // Save
+ {
+ RefPtr<LookupCacheV4> save = SetupLookupCache("test-save"_ns);
+
+ // Try to simulate the real case that most prefixes are 4bytes
+ RandomPrefixes(20000, 4, 4, array);
+ RandomPrefixes(1000, 5, 32, array);
+
+ PrefixArrayToPrefixStringMap(array, map);
+ save->Build(map);
+
+ DoExpectedLookup(save, array);
+ DoRandomLookup(save, 1000, array);
+ CheckContent(save, array);
+
+ NS_GetSpecialDirectory(NS_APP_USER_PROFILE_50_DIR, getter_AddRefs(file));
+ file->Append(u"test.vlpset"_ns);
+ save->StoreToFile(file);
+ }
+
+ // Load
+ {
+ RefPtr<LookupCacheV4> load = SetupLookupCache("test-load"_ns);
+ load->LoadFromFile(file);
+
+ DoExpectedLookup(load, array);
+ DoRandomLookup(load, 1000, array);
+ CheckContent(load, array);
+ }
+
+ file->Remove(false);
+}
+
+// This is for fixed-length prefixset
+TEST_P(UrlClassifierPrefixSetTest, LoadSaveNoDelta) {
+ nsCOMPtr<nsIFile> file;
+ _PrefixArray array;
+ PrefixStringMap map;
+
+ for (uint32_t i = 0; i < 100; i++) {
+ // construct a tree without deltas by making the distance
+ // between entries larger than 16 bits
+ uint32_t v = ((1 << 16) + 1) * i;
+ nsCString* ele = array.AppendElement();
+ ele->AppendASCII(reinterpret_cast<const char*>(&v), 4);
+ }
+
+ // Save
+ {
+ RefPtr<LookupCacheV4> save = SetupLookupCache("test-save"_ns);
+
+ PrefixArrayToPrefixStringMap(array, map);
+ save->Build(map);
+
+ DoExpectedLookup(save, array);
+ DoRandomLookup(save, 1000, array);
+ CheckContent(save, array);
+
+ NS_GetSpecialDirectory(NS_APP_USER_PROFILE_50_DIR, getter_AddRefs(file));
+ file->Append(u"test.vlpset"_ns);
+ save->StoreToFile(file);
+ }
+
+ // Load
+ {
+ RefPtr<LookupCacheV4> load = SetupLookupCache("test-load"_ns);
+ load->LoadFromFile(file);
+
+ DoExpectedLookup(load, array);
+ DoRandomLookup(load, 1000, array);
+ CheckContent(load, array);
+ }
+
+ file->Remove(false);
+}
+
+// To run the same test for different configurations of
+// "browser_safebrowsing_prefixset_max_array_size"
+INSTANTIATE_TEST_SUITE_P(UrlClassifierPrefixSetTest, UrlClassifierPrefixSetTest,
+ ::testing::Values(0, UINT32_MAX));
diff --git a/toolkit/components/url-classifier/tests/gtest/moz.build b/toolkit/components/url-classifier/tests/gtest/moz.build
new file mode 100644
index 0000000000..070012de77
--- /dev/null
+++ b/toolkit/components/url-classifier/tests/gtest/moz.build
@@ -0,0 +1,40 @@
+# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
+# vim: set filetype=python:
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+LOCAL_INCLUDES += [
+ "../..",
+]
+
+DEFINES["GOOGLE_PROTOBUF_NO_RTTI"] = True
+DEFINES["GOOGLE_PROTOBUF_NO_STATIC_INITIALIZER"] = True
+
+UNIFIED_SOURCES += [
+ "Common.cpp",
+ "TestCaching.cpp",
+ "TestChunkSet.cpp",
+ "TestClassifier.cpp",
+ "TestFailUpdate.cpp",
+ "TestFindFullHash.cpp",
+ "TestLookupCacheV4.cpp",
+ "TestPerProviderDirectory.cpp",
+ "TestPrefixSet.cpp",
+ "TestProtocolParser.cpp",
+ "TestRiceDeltaDecoder.cpp",
+ "TestSafebrowsingHash.cpp",
+ "TestSafeBrowsingProtobuf.cpp",
+ "TestTable.cpp",
+ "TestUrlClassifierTableUpdateV4.cpp",
+ "TestUrlClassifierUtils.cpp",
+ "TestURLsAndHashing.cpp",
+ "TestVariableLengthPrefixSet.cpp",
+]
+
+# Required to have the same MOZ_SAFEBROWSING_DUMP_FAILED_UPDATES
+# as non-testing code.
+if CONFIG["NIGHTLY_BUILD"] or CONFIG["MOZ_DEBUG"]:
+ DEFINES["MOZ_SAFEBROWSING_DUMP_FAILED_UPDATES"] = True
+
+FINAL_LIBRARY = "xul-gtest"
diff --git a/toolkit/components/url-classifier/tests/mochitest/allowlistAnnotatedFrame.html b/toolkit/components/url-classifier/tests/mochitest/allowlistAnnotatedFrame.html
new file mode 100644
index 0000000000..1096274260
--- /dev/null
+++ b/toolkit/components/url-classifier/tests/mochitest/allowlistAnnotatedFrame.html
@@ -0,0 +1,143 @@
+<html>
+<head>
+<title></title>
+
+<script type="text/javascript">
+
+// Modified by evil.js
+var scriptItem;
+
+var scriptItem1 = "untouched";
+var imageItem1 = "untouched";
+var frameItem1 = "untouched";
+var scriptItem2 = "untouched";
+var imageItem2 = "untouched";
+var frameItem2 = "untouched";
+var xhrItem = "untouched";
+var fetchItem = "untouched";
+var mediaItem1 = "untouched";
+
+async function checkLoads() {
+ window.parent.is(scriptItem1, "spoiled", "Should not block tracking js 1");
+ window.parent.is(scriptItem2, "spoiled", "Should not block tracking js 2");
+ window.parent.is(imageItem1, "spoiled", "Should not block tracking img 1");
+ window.parent.is(imageItem2, "spoiled", "Should not block tracking img 2");
+ window.parent.is(frameItem1, "spoiled", "Should not block tracking iframe 1");
+ window.parent.is(frameItem2, "spoiled", "Should not block tracking iframe 2");
+ window.parent.is(mediaItem1, "loaded", "Should not block tracking video");
+ window.parent.is(xhrItem, "loaded", "Should not block tracking XHR");
+ window.parent.is(fetchItem, "loaded", "Should not block fetches from tracking domains");
+ window.parent.is(window.document.blockedNodeByClassifierCount, 0,
+ "No elements should be blocked");
+
+ // End (parent) test.
+ await window.parent.clearPermissions();
+ window.parent.SimpleTest.finish();
+}
+
+var onloadCalled = false;
+var xhrFinished = false;
+var fetchFinished = false;
+var videoLoaded = false;
+function loaded(type) {
+ if (type === "onload") {
+ onloadCalled = true;
+ } else if (type === "xhr") {
+ xhrFinished = true;
+ } else if (type === "fetch") {
+ fetchFinished = true;
+ } else if (type === "video") {
+ videoLoaded = true;
+ }
+
+ if (onloadCalled && xhrFinished && fetchFinished && videoLoaded) {
+ checkLoads();
+ }
+}
+</script>
+
+</head>
+
+<body onload="loaded('onload')">
+
+<!-- Try loading from a tracking script URI (1) -->
+<script id="badscript1" src="http://tracking.example.com/tests/toolkit/components/url-classifier/tests/mochitest/evil.js" onload="scriptItem1 = 'spoiled';"></script>
+
+<!-- Try loading from a tracking image URI (1) -->
+<img id="badimage1" src="http://tracking.example.com/tests/toolkit/components/url-classifier/tests/mochitest/raptor.jpg" onload="imageItem1 = 'spoiled';"/>
+
+<!-- Try loading from a tracking frame URI (1) -->
+<iframe id="badframe1" src="http://tracking.example.com/tests/toolkit/components/url-classifier/tests/mochitest/track.html" onload="frameItem1 = 'spoiled';"></iframe>
+
+<!-- Try loading from a tracking video URI -->
+<video id="badmedia1" src="http://tracking.example.com/tests/toolkit/components/url-classifier/tests/mochitest/vp9.webm"></video>
+
+<script>
+var v = document.getElementById("badmedia1");
+v.addEventListener("loadedmetadata", function() {
+ mediaItem1 = "loaded";
+ loaded("video");
+}, true);
+v.addEventListener("error", function() {
+ mediaItem1 = "error";
+ loaded("video");
+}, true);
+
+// Try loading from a tracking script URI (2) - The loader may follow a
+// different path depending on whether the resource is loaded from JS or HTML.
+var newScript = document.createElement("script");
+newScript.id = "badscript2";
+newScript.src = "http://tracking.example.com/tests/toolkit/components/url-classifier/tests/mochitest/evil.js";
+newScript.addEventListener("load", function onload() { scriptItem2 = "spoiled"; });
+document.body.appendChild(newScript);
+
+// / Try loading from a tracking image URI (2)
+var newImage = document.createElement("img");
+newImage.id = "badimage2";
+newImage.src = "http://tracking.example.com/tests/toolkit/components/url-classifier/tests/mochitest/raptor.jpg";
+newImage.addEventListener("load", function onload() { imageItem2 = "spoiled"; });
+document.body.appendChild(newImage);
+
+// Try loading from a tracking iframe URI (2)
+var newFrame = document.createElement("iframe");
+newFrame.id = "badframe2";
+newFrame.src = "http://tracking.example.com/tests/toolkit/components/url-classifier/tests/mochitest/track.html";
+newFrame.addEventListener("load", function onload() { frameItem2 = "spoiled"; });
+document.body.appendChild(newFrame);
+
+// Try doing an XHR against a tracking domain (bug 1216793)
+function reqListener() {
+ xhrItem = "loaded";
+ loaded("xhr");
+}
+function transferFailed() {
+ xhrItem = "failed";
+ loaded("xhr");
+}
+function transferCanceled() {
+ xhrItem = "canceled";
+ loaded("xhr");
+}
+var oReq = new XMLHttpRequest();
+oReq.addEventListener("load", reqListener);
+oReq.addEventListener("error", transferFailed);
+oReq.addEventListener("abort", transferCanceled);
+oReq.open("GET", "http://tracking.example.com/tests/toolkit/components/url-classifier/tests/mochitest/evil.js");
+oReq.send();
+
+// Fetch from a tracking domain
+fetch("http://tracking.example.com/tests/toolkit/components/url-classifier/tests/mochitest/evil.js").then(function(response) {
+ if (response.ok) {
+ fetchItem = "loaded";
+ loaded("fetch");
+ } else {
+ fetchItem = "badresponse";
+ loaded("fetch");
+ }
+ }).catch(function(error) {
+ fetchItem = "error";
+ loaded("fetch");
+});
+</script>
+</body>
+</html>
diff --git a/toolkit/components/url-classifier/tests/mochitest/bad.css b/toolkit/components/url-classifier/tests/mochitest/bad.css
new file mode 100644
index 0000000000..f57b36a778
--- /dev/null
+++ b/toolkit/components/url-classifier/tests/mochitest/bad.css
@@ -0,0 +1 @@
+#styleBad { visibility: hidden; }
diff --git a/toolkit/components/url-classifier/tests/mochitest/bad.css^headers^ b/toolkit/components/url-classifier/tests/mochitest/bad.css^headers^
new file mode 100644
index 0000000000..4030ea1d3d
--- /dev/null
+++ b/toolkit/components/url-classifier/tests/mochitest/bad.css^headers^
@@ -0,0 +1 @@
+Cache-Control: no-store
diff --git a/toolkit/components/url-classifier/tests/mochitest/basic.vtt b/toolkit/components/url-classifier/tests/mochitest/basic.vtt
new file mode 100644
index 0000000000..7781790d04
--- /dev/null
+++ b/toolkit/components/url-classifier/tests/mochitest/basic.vtt
@@ -0,0 +1,27 @@
+WEBVTT
+Region: id=testOne lines=2 width=30%
+Region: id=testTwo lines=4 width=20%
+
+1
+00:00.500 --> 00:00.700 region:testOne
+This
+
+2
+00:01.200 --> 00:02.400 region:testTwo
+Is
+
+2.5
+00:02.000 --> 00:03.500 region:testOne
+(Over here?!)
+
+3
+00:02.710 --> 00:02.910
+A
+
+4
+00:03.217 --> 00:03.989
+Test
+
+5
+00:03.217 --> 00:03.989
+And more!
diff --git a/toolkit/components/url-classifier/tests/mochitest/basic.vtt^headers^ b/toolkit/components/url-classifier/tests/mochitest/basic.vtt^headers^
new file mode 100644
index 0000000000..23de552c1a
--- /dev/null
+++ b/toolkit/components/url-classifier/tests/mochitest/basic.vtt^headers^
@@ -0,0 +1 @@
+Access-Control-Allow-Origin: * \ No newline at end of file
diff --git a/toolkit/components/url-classifier/tests/mochitest/bug_1281083.html b/toolkit/components/url-classifier/tests/mochitest/bug_1281083.html
new file mode 100644
index 0000000000..80124da3cc
--- /dev/null
+++ b/toolkit/components/url-classifier/tests/mochitest/bug_1281083.html
@@ -0,0 +1,11 @@
+<html>
+<head>
+<title></title>
+</head>
+<body>
+
+<!-- Try loading from a malware javascript URI -->
+<script id="badscript" data-touched="not sure" src="http://bug1281083.example.com/tests/toolkit/components/url-classifier/tests/mochitest/evil.js" onload="this.dataset.touched = 'yes';" onerror="this.dataset.touched = 'no';"></script>
+
+</body>
+</html>
diff --git a/toolkit/components/url-classifier/tests/mochitest/bug_1580416.html b/toolkit/components/url-classifier/tests/mochitest/bug_1580416.html
new file mode 100644
index 0000000000..ae305bbb47
--- /dev/null
+++ b/toolkit/components/url-classifier/tests/mochitest/bug_1580416.html
@@ -0,0 +1,13 @@
+<!DOCTYPE HTML>
+<!-- Any copyright is dedicated to the Public Domain.
+ http://creativecommons.org/publicdomain/zero/1.0/ -->
+<html>
+<head>
+<title></title>
+</head>
+<body>
+
+<script id="goodscript" data-touched="not sure" src="http://mochitest.apps.fbsbx.com/tests/toolkit/components/url-classifier/tests/mochitest/good.js" onload="this.dataset.touched = 'yes';" onerror="this.dataset.touched = 'no';"></script>
+
+</body>
+</html>
diff --git a/toolkit/components/url-classifier/tests/mochitest/cache.sjs b/toolkit/components/url-classifier/tests/mochitest/cache.sjs
new file mode 100644
index 0000000000..84fb0e9089
--- /dev/null
+++ b/toolkit/components/url-classifier/tests/mochitest/cache.sjs
@@ -0,0 +1,90 @@
+/* Any copyright is dedicated to the Public Domain.
+ * http://creativecommons.org/publicdomain/zero/1.0/
+ */
+
+const CC = Components.Constructor;
+const BinaryInputStream = CC(
+ "@mozilla.org/binaryinputstream;1",
+ "nsIBinaryInputStream",
+ "setInputStream"
+);
+
+function handleRequest(request, response) {
+ var query = {};
+ request.queryString.split("&").forEach(function (val) {
+ var idx = val.indexOf("=");
+ query[val.slice(0, idx)] = unescape(val.slice(idx + 1));
+ });
+
+ var responseBody;
+
+ // Store fullhash in the server side.
+ if ("list" in query && "fullhash" in query) {
+ // In the server side we will store:
+ // 1. All the full hashes for a given list
+ // 2. All the lists we have right now
+ // data is separate by '\n'
+ let list = query.list;
+ let hashes = getState(list);
+
+ let hash = atob(query.fullhash);
+ hashes += hash + "\n";
+ setState(list, hashes);
+
+ let lists = getState("lists");
+ if (!lists.includes(list)) {
+ lists += list + "\n";
+ setState("lists", lists);
+ }
+
+ return;
+ // gethash count return how many gethash request received.
+ // This is used by client to know if a gethash request is triggered by gecko
+ } else if ("gethashcount" == request.queryString) {
+ let counter = getState("counter");
+ responseBody = counter == "" ? "0" : counter;
+ } else {
+ let body = new BinaryInputStream(request.bodyInputStream);
+ let avail;
+ let bytes = [];
+
+ while ((avail = body.available()) > 0) {
+ Array.prototype.push.apply(bytes, body.readByteArray(avail));
+ }
+
+ let counter = getState("counter");
+ counter = counter == "" ? "1" : (parseInt(counter) + 1).toString();
+ setState("counter", counter);
+
+ responseBody = parseV2Request(bytes);
+ }
+
+ response.setHeader("Content-Type", "text/plain", false);
+ response.write(responseBody);
+}
+
+function parseV2Request(bytes) {
+ var request = String.fromCharCode.apply(this, bytes);
+ var [HEADER, PREFIXES] = request.split("\n");
+ var [PREFIXSIZE, LENGTH] = HEADER.split(":").map(val => {
+ return parseInt(val);
+ });
+
+ var ret = "";
+ for (var start = 0; start < LENGTH; start += PREFIXSIZE) {
+ getState("lists")
+ .split("\n")
+ .forEach(function (list) {
+ var completions = getState(list).split("\n");
+
+ for (var completion of completions) {
+ if (completion.indexOf(PREFIXES.substr(start, PREFIXSIZE)) == 0) {
+ ret += list + ":1:32\n";
+ ret += completion;
+ }
+ }
+ });
+ }
+
+ return ret;
+}
diff --git a/toolkit/components/url-classifier/tests/mochitest/chrome.toml b/toolkit/components/url-classifier/tests/mochitest/chrome.toml
new file mode 100644
index 0000000000..cbd0e475e3
--- /dev/null
+++ b/toolkit/components/url-classifier/tests/mochitest/chrome.toml
@@ -0,0 +1,83 @@
+[DEFAULT]
+skip-if = ["os == 'android'"]
+support-files = [
+ "allowlistAnnotatedFrame.html",
+ "classifiedAnnotatedFrame.html",
+ "classifiedAnnotatedPBFrame.html",
+ "trackingRequest.html",
+ "bug_1281083.html",
+ "bug_1580416.html",
+ "report.sjs",
+ "gethash.sjs",
+ "classifierCommon.js",
+ "classifierHelper.js",
+ "head.js",
+ "threathit.sjs",
+ "redirect_tracker.sjs",
+ "!/toolkit/components/url-classifier/tests/mochitest/classifierFrame.html",
+ "!/toolkit/components/url-classifier/tests/mochitest/cleanWorker.js",
+ "!/toolkit/components/url-classifier/tests/mochitest/good.js",
+ "!/toolkit/components/url-classifier/tests/mochitest/evil.css",
+ "!/toolkit/components/url-classifier/tests/mochitest/evil.css^headers^",
+ "!/toolkit/components/url-classifier/tests/mochitest/evil.js",
+ "!/toolkit/components/url-classifier/tests/mochitest/evil.js^headers^",
+ "!/toolkit/components/url-classifier/tests/mochitest/evilWorker.js",
+ "!/toolkit/components/url-classifier/tests/mochitest/import.css",
+ "!/toolkit/components/url-classifier/tests/mochitest/raptor.jpg",
+ "!/toolkit/components/url-classifier/tests/mochitest/track.html",
+ "!/toolkit/components/url-classifier/tests/mochitest/trackingRequest.js",
+ "!/toolkit/components/url-classifier/tests/mochitest/trackingRequest.js^headers^",
+ "!/toolkit/components/url-classifier/tests/mochitest/unwantedWorker.js",
+ "!/toolkit/components/url-classifier/tests/mochitest/vp9.webm",
+ "!/toolkit/components/url-classifier/tests/mochitest/whitelistFrame.html",
+ "!/toolkit/components/url-classifier/tests/mochitest/workerFrame.html",
+ "!/toolkit/components/url-classifier/tests/mochitest/ping.sjs",
+ "!/toolkit/components/url-classifier/tests/mochitest/basic.vtt",
+ "!/toolkit/components/url-classifier/tests/mochitest/basic.vtt^headers^",
+ "!/toolkit/components/url-classifier/tests/mochitest/dnt.html",
+ "!/toolkit/components/url-classifier/tests/mochitest/dnt.sjs",
+ "!/toolkit/components/url-classifier/tests/mochitest/update.sjs",
+ "!/toolkit/components/url-classifier/tests/mochitest/bad.css",
+ "!/toolkit/components/url-classifier/tests/mochitest/bad.css^headers^",
+ "!/toolkit/components/url-classifier/tests/mochitest/gethashFrame.html",
+ "!/toolkit/components/url-classifier/tests/mochitest/seek.webm",
+ "!/toolkit/components/url-classifier/tests/mochitest/cache.sjs",
+]
+
+["test_advisory_link.html"]
+
+["test_allowlisted_annotations.html"]
+tags = "trackingprotection"
+
+["test_classified_annotations.html"]
+tags = "trackingprotection"
+skip-if = ["os == 'linux' && asan"] # Bug 1202548
+
+["test_classifier_changetablepref.html"]
+skip-if = ["verify"]
+
+["test_classifier_changetablepref_bug1395411.html"]
+
+["test_donottrack.html"]
+
+["test_privatebrowsing_trackingprotection.html"]
+tags = "trackingprotection"
+
+["test_reporturl.html"]
+skip-if = ["verify"]
+
+["test_safebrowsing_bug1272239.html"]
+
+["test_threathit_report.html"]
+skip-if = ["verify"]
+
+["test_trackingprotection_bug1157081.html"]
+tags = "trackingprotection"
+
+["test_trackingprotection_bug1312515.html"]
+
+["test_trackingprotection_bug1580416.html"]
+tags = "trackingprotection"
+
+["test_trackingprotection_whitelist.html"]
+tags = "trackingprotection"
diff --git a/toolkit/components/url-classifier/tests/mochitest/classifiedAnnotatedFrame.html b/toolkit/components/url-classifier/tests/mochitest/classifiedAnnotatedFrame.html
new file mode 100644
index 0000000000..9b12f529cb
--- /dev/null
+++ b/toolkit/components/url-classifier/tests/mochitest/classifiedAnnotatedFrame.html
@@ -0,0 +1,154 @@
+<html>
+<head>
+<title></title>
+
+<script type="text/javascript">
+"use strict";
+
+var scriptItem = "untouched";
+var scriptItem1 = "untouched";
+var scriptItem2 = "untouched";
+var imageItem1 = "untouched";
+var imageItem2 = "untouched";
+var frameItem1 = "untouched";
+var frameItem2 = "untouched";
+var xhrItem = "untouched";
+var fetchItem = "untouched";
+var mediaItem1 = "untouched";
+
+var badids = [
+ "badscript1",
+ "badscript2",
+ "badimage1",
+ "badimage2",
+ "badframe1",
+ "badframe2",
+ "badmedia1",
+ "badcss",
+];
+
+var onloadCalled = false;
+var xhrFinished = false;
+var fetchFinished = false;
+var videoLoaded = false;
+function loaded(type) {
+ if (type === "onload") {
+ onloadCalled = true;
+ } else if (type === "xhr") {
+ xhrFinished = true;
+ } else if (type === "fetch") {
+ fetchFinished = true;
+ } else if (type === "video") {
+ videoLoaded = true;
+ }
+ if (onloadCalled && xhrFinished && fetchFinished && videoLoaded) {
+ var msg = new window.CustomEvent("OnLoadComplete", {
+ detail: JSON.stringify({
+ scriptItem,
+ scriptItem1,
+ scriptItem2,
+ imageItem1,
+ imageItem2,
+ frameItem1,
+ frameItem2,
+ xhrItem,
+ fetchItem,
+ mediaItem1,
+ }),
+ });
+ window.dispatchEvent(msg);
+ }
+}
+</script>
+
+<!-- Try loading from a tracking CSS URI -->
+<link id="badcss" rel="stylesheet" type="text/css" href="http://tracking.example.com/tests/toolkit/components/url-classifier/tests/mochitest/evil.css"></link>
+
+</head>
+
+<body onload="loaded('onload')">
+
+<!-- Try loading from a tracking script URI (1): evil.js onload will have updated the scriptItem variable -->
+<script id="badscript1" src="http://tracking.example.com/tests/toolkit/components/url-classifier/tests/mochitest/evil.js" onload="scriptItem1 = scriptItem;"></script>
+
+<!-- Try loading from a tracking image URI (1) -->
+<img id="badimage1" src="http://tracking.example.com/tests/toolkit/components/url-classifier/tests/mochitest/raptor.jpg?reload=true" onload="imageItem1 = 'spoiled';"/>
+
+<!-- Try loading from a tracking frame URI (1) -->
+<iframe id="badframe1" src="http://tracking.example.com/tests/toolkit/components/url-classifier/tests/mochitest/track.html" onload="frameItem1 = 'spoiled';"></iframe>
+
+<!-- Try loading from a tracking video URI -->
+<video id="badmedia1" src="http://tracking.example.com/tests/toolkit/components/url-classifier/tests/mochitest/vp9.webm?reload=true"></video>
+
+<script>
+var v = document.getElementById("badmedia1");
+v.addEventListener("loadedmetadata", function() {
+ mediaItem1 = "loaded";
+ loaded("video");
+}, true);
+v.addEventListener("error", function() {
+ mediaItem1 = "error";
+ loaded("video");
+}, true);
+
+// Try loading from a tracking script URI (2) - The loader may follow a different path depending on whether the resource is loaded from JS or HTML.
+var newScript = document.createElement("script");
+newScript.id = "badscript2";
+newScript.src = "http://tracking.example.com/tests/toolkit/components/url-classifier/tests/mochitest/evil.js";
+newScript.addEventListener("load", function() { scriptItem2 = scriptItem; });
+document.body.appendChild(newScript);
+
+// Try loading from a tracking image URI (2)
+var newImage = document.createElement("img");
+newImage.id = "badimage2";
+newImage.src = "http://tracking.example.com/tests/toolkit/components/url-classifier/tests/mochitest/raptor.jpg?reload=true";
+newImage.addEventListener("load", function() { imageItem2 = "spoiled"; });
+document.body.appendChild(newImage);
+
+// Try loading from a tracking iframe URI (2)
+var newFrame = document.createElement("iframe");
+newFrame.id = "badframe2";
+newFrame.src = "http://tracking.example.com/tests/toolkit/components/url-classifier/tests/mochitest/track.html";
+newFrame.addEventListener("load", function() { frameItem2 = "spoiled"; });
+document.body.appendChild(newFrame);
+
+// Try doing an XHR against a tracking domain (bug 1216793)
+function reqListener() {
+ xhrItem = "loaded";
+ loaded("xhr");
+}
+function transferFailed() {
+ xhrItem = "failed";
+ loaded("xhr");
+}
+function transferCanceled() {
+ xhrItem = "canceled";
+ loaded("xhr");
+}
+var oReq = new XMLHttpRequest();
+oReq.addEventListener("load", reqListener);
+oReq.addEventListener("error", transferFailed);
+oReq.addEventListener("abort", transferCanceled);
+oReq.open("GET", "http://tracking.example.com/tests/toolkit/components/url-classifier/tests/mochitest/evil.js");
+oReq.send();
+
+// Fetch from a tracking domain
+fetch("http://tracking.example.com/tests/toolkit/components/url-classifier/tests/mochitest/evil.js").then(function(response) {
+ if (response.ok) {
+ fetchItem = "loaded";
+ loaded("fetch");
+ } else {
+ fetchItem = "badresponse";
+ loaded("fetch");
+ }
+ }).catch(function(error) {
+ fetchItem = "error";
+ loaded("fetch");
+});
+</script>
+
+The following should not be hidden:
+<div id="styleCheck">STYLE TEST</div>
+
+</body>
+</html>
diff --git a/toolkit/components/url-classifier/tests/mochitest/classifiedAnnotatedPBFrame.html b/toolkit/components/url-classifier/tests/mochitest/classifiedAnnotatedPBFrame.html
new file mode 100644
index 0000000000..ecd89e91d6
--- /dev/null
+++ b/toolkit/components/url-classifier/tests/mochitest/classifiedAnnotatedPBFrame.html
@@ -0,0 +1,26 @@
+<!DOCTYPE HTML>
+<!-- Any copyright is dedicated to the Public Domain.
+ http://creativecommons.org/publicdomain/zero/1.0/ -->
+<html>
+<head>
+<title></title>
+
+<link id="badcss" rel="stylesheet" type="text/css" href="https://tracking.example.com/tests/toolkit/components/url-classifier/tests/mochitest/evil.css"></link>
+
+</head>
+<body>
+
+<script id="badscript" data-touched="not sure" src="https://tracking.example.com/tests/toolkit/components/url-classifier/tests/mochitest/evil.js" onload="this.dataset.touched = 'yes';" onerror="this.dataset.touched = 'no';"></script>
+
+<script id="goodscript" data-touched="not sure" src="https://itisatracker.org/tests/toolkit/components/url-classifier/tests/mochitest/good.js" onload="this.dataset.touched = 'yes';" onerror="this.dataset.touched = 'no';"></script>
+
+<!-- The image cache can cache JS handlers, so make sure we use a different URL for raptor.jpg each time -->
+<img id="badimage" data-touched="not sure" src="https://tracking.example.com/tests/toolkit/components/url-classifier/tests/mochitest/raptor.jpg?pbmode=test" onload="this.dataset.touched = 'yes';" onerror="this.dataset.touched = 'no';"/>
+
+<img id="goodimage" data-touched="not sure" src="https://tracking.example.org/tests/toolkit/components/url-classifier/tests/mochitest/raptor.jpg?pbmode=test2" onload="this.dataset.touched = 'yes';" onerror="this.dataset.touched = 'no';"/>
+
+The following should not be hidden:
+<div id="styleCheck">STYLE TEST</div>
+
+</body>
+</html>
diff --git a/toolkit/components/url-classifier/tests/mochitest/classifierCommon.js b/toolkit/components/url-classifier/tests/mochitest/classifierCommon.js
new file mode 100644
index 0000000000..ca288986e3
--- /dev/null
+++ b/toolkit/components/url-classifier/tests/mochitest/classifierCommon.js
@@ -0,0 +1,108 @@
+/* Any copyright is dedicated to the Public Domain.
+ * http://creativecommons.org/publicdomain/zero/1.0/ */
+
+/* eslint-env mozilla/chrome-script */
+
+var dbService = Cc["@mozilla.org/url-classifier/dbservice;1"].getService(
+ Ci.nsIUrlClassifierDBService
+);
+var listmanager = Cc["@mozilla.org/url-classifier/listmanager;1"].getService(
+ Ci.nsIUrlListManager
+);
+
+var timer;
+function setTimeout(callback, delay) {
+ timer = Cc["@mozilla.org/timer;1"].createInstance(Ci.nsITimer);
+ timer.initWithCallback(
+ { notify: callback },
+ delay,
+ Ci.nsITimer.TYPE_ONE_SHOT
+ );
+}
+
+function doUpdate(update) {
+ let listener = {
+ QueryInterface: ChromeUtils.generateQI(["nsIUrlClassifierUpdateObserver"]),
+ updateUrlRequested(url) {},
+ streamFinished(status) {},
+ updateError(errorCode) {
+ sendAsyncMessage("updateError", errorCode);
+ },
+ updateSuccess(requestedTimeout) {
+ sendAsyncMessage("updateSuccess");
+ },
+ };
+
+ try {
+ dbService.beginUpdate(
+ listener,
+ "test-malware-simple,test-unwanted-simple",
+ ""
+ );
+ dbService.beginStream("", "");
+ dbService.updateStream(update);
+ dbService.finishStream();
+ dbService.finishUpdate();
+ } catch (e) {
+ // beginUpdate may fail if there's an existing update in progress
+ // retry until success or testcase timeout.
+ setTimeout(() => {
+ doUpdate(update);
+ }, 1000);
+ }
+}
+
+function doReload() {
+ try {
+ dbService.reloadDatabase();
+ sendAsyncMessage("reloadSuccess");
+ } catch (e) {
+ setTimeout(() => {
+ doReload();
+ }, 1000);
+ }
+}
+
+// SafeBrowsing.jsm is initialized after mozEntries are added. Add observer
+// to receive "finished" event. For the case when this function is called
+// after the event had already been notified, we lookup entries to see if
+// they are already added to database.
+function waitForInit() {
+ if (listmanager.isRegistered()) {
+ sendAsyncMessage("safeBrowsingInited");
+ } else {
+ setTimeout(() => {
+ waitForInit();
+ }, 1000);
+ }
+}
+
+function doGetTables() {
+ const callback = tables => {
+ sendAsyncMessage("GetTableSuccess", tables);
+ };
+
+ try {
+ dbService.getTables(callback);
+ } catch (e) {
+ setTimeout(() => {
+ doGetTables();
+ }, 1000);
+ }
+}
+
+addMessageListener("doUpdate", ({ testUpdate }) => {
+ doUpdate(testUpdate);
+});
+
+addMessageListener("doReload", () => {
+ doReload();
+});
+
+addMessageListener("waitForInit", () => {
+ waitForInit();
+});
+
+addMessageListener("doGetTables", () => {
+ doGetTables();
+});
diff --git a/toolkit/components/url-classifier/tests/mochitest/classifierFrame.html b/toolkit/components/url-classifier/tests/mochitest/classifierFrame.html
new file mode 100644
index 0000000000..1e3617b9d8
--- /dev/null
+++ b/toolkit/components/url-classifier/tests/mochitest/classifierFrame.html
@@ -0,0 +1,57 @@
+<html>
+<head>
+<title></title>
+
+<script type="text/javascript">
+
+var scriptItem = "untouched";
+
+function checkLoads() {
+ // Make sure the javascript did not load.
+ window.parent.is(scriptItem, "untouched", "Should not load bad javascript");
+
+ // Make sure the css did not load.
+ var elt = document.getElementById("styleCheck");
+ var style = document.defaultView.getComputedStyle(elt);
+ window.parent.isnot(style.visibility, "hidden", "Should not load bad css");
+
+ elt = document.getElementById("styleBad");
+ style = document.defaultView.getComputedStyle(elt);
+ window.parent.isnot(style.visibility, "hidden", "Should not load bad css");
+
+ elt = document.getElementById("styleImport");
+ style = document.defaultView.getComputedStyle(elt);
+ window.parent.isnot(style.visibility, "visible", "Should import clean css");
+
+ // Call parent.loadTestFrame again to test classification metadata in HTTP
+ // cache entries.
+ if (window.parent.firstLoad) {
+ window.parent.info("Reloading from cache...");
+ window.parent.firstLoad = false;
+ window.parent.loadTestFrame();
+ return;
+ }
+
+ // End (parent) test.
+ window.parent.SimpleTest.finish();
+}
+
+</script>
+
+<!-- Try loading from a malware javascript URI -->
+<script type="text/javascript" src="http://malware.mochi.test/tests/toolkit/components/url-classifier/tests/mochitest/evil.js"></script>
+
+<!-- Try loading from an uwanted software css URI -->
+<link rel="stylesheet" type="text/css" href="http://unwanted.mochi.test/tests/toolkit/components/url-classifier/tests/mochitest/evil.css"></link>
+
+<!-- Try loading a marked-as-malware css through an @import from a clean URI -->
+<link rel="stylesheet" type="text/css" href="import2.css"></link>
+</head>
+
+<body onload="checkLoads()">
+The following should not be hidden:
+<div id="styleCheck">STYLE TEST</div>
+<div id="styleBad">STYLE BAD</div>
+<div id="styleImport">STYLE IMPORT</div>
+</body>
+</html>
diff --git a/toolkit/components/url-classifier/tests/mochitest/classifierHelper.js b/toolkit/components/url-classifier/tests/mochitest/classifierHelper.js
new file mode 100644
index 0000000000..157e5d54f7
--- /dev/null
+++ b/toolkit/components/url-classifier/tests/mochitest/classifierHelper.js
@@ -0,0 +1,204 @@
+if (typeof classifierHelper == "undefined") {
+ var classifierHelper = {};
+}
+
+const CLASSIFIER_COMMON_URL = SimpleTest.getTestFileURL("classifierCommon.js");
+var gScript = SpecialPowers.loadChromeScript(CLASSIFIER_COMMON_URL);
+var gOriginalGetHashURL;
+
+const PREFS = {
+ PROVIDER_LISTS: "browser.safebrowsing.provider.mozilla.lists",
+ DISALLOW_COMPLETIONS: "urlclassifier.disallow_completions",
+ PROVIDER_GETHASHURL: "browser.safebrowsing.provider.mozilla.gethashURL",
+};
+
+classifierHelper._curAddChunkNum = 1;
+
+// addUrlToDB is asynchronous, queue the task to ensure
+// the callback follow correct order.
+classifierHelper._updates = [];
+
+// Keep urls added to database, those urls should be automatically
+// removed after test complete.
+classifierHelper._updatesToCleanup = [];
+
+classifierHelper._initsCB = [];
+
+// This function return a Promise, promise is resolved when SafeBrowsing.jsm
+// is initialized.
+classifierHelper.waitForInit = function () {
+ return new Promise(function (resolve, reject) {
+ classifierHelper._initsCB.push(resolve);
+ gScript.sendAsyncMessage("waitForInit");
+ });
+};
+
+// This function is used to allow completion for specific "list",
+// some lists like "test-malware-simple" is default disabled to ask for complete.
+// "list" is the db we would like to allow it
+// "url" is the completion server
+classifierHelper.allowCompletion = async function (lists, url) {
+ for (var list of lists) {
+ // Add test db to provider
+ var pref = await SpecialPowers.getParentCharPref(PREFS.PROVIDER_LISTS);
+ pref += "," + list;
+ await SpecialPowers.setCharPref(PREFS.PROVIDER_LISTS, pref);
+
+ // Rename test db so we will not disallow it from completions
+ pref = await SpecialPowers.getParentCharPref(PREFS.DISALLOW_COMPLETIONS);
+ pref = pref.replace(list, list + "-backup");
+ await SpecialPowers.setCharPref(PREFS.DISALLOW_COMPLETIONS, pref);
+ }
+
+ // Store the original get hash URL in order to reset it back during clean up.
+ gOriginalGetHashURL = SpecialPowers.getCharPref(PREFS.PROVIDER_GETHASHURL);
+
+ // Set get hash url
+ await SpecialPowers.setCharPref(PREFS.PROVIDER_GETHASHURL, url);
+};
+
+// Pass { url: ..., db: ... } to add url to database,
+// onsuccess/onerror will be called when update complete.
+classifierHelper.addUrlToDB = function (updateData) {
+ return new Promise(function (resolve, reject) {
+ var testUpdate = "";
+ for (var update of updateData) {
+ var LISTNAME = update.db;
+ var CHUNKDATA = update.url;
+ var CHUNKLEN = CHUNKDATA.length;
+ var HASHLEN = update.len ? update.len : 32;
+
+ update.addChunk = classifierHelper._curAddChunkNum;
+ classifierHelper._curAddChunkNum += 1;
+
+ classifierHelper._updatesToCleanup.push(update);
+ testUpdate +=
+ "n:1000\n" +
+ "i:" +
+ LISTNAME +
+ "\n" +
+ "ad:1\n" +
+ "a:" +
+ update.addChunk +
+ ":" +
+ HASHLEN +
+ ":" +
+ CHUNKLEN +
+ "\n" +
+ CHUNKDATA;
+ }
+
+ classifierHelper._update(testUpdate, resolve, reject);
+ });
+};
+
+// This API is used to expire all add/sub chunks we have updated
+// by using addUrlToDB.
+classifierHelper.resetDatabase = function () {
+ function removeDatabase() {
+ return new Promise(function (resolve, reject) {
+ var testUpdate = "";
+ for (var update of classifierHelper._updatesToCleanup) {
+ testUpdate +=
+ "n:1000\ni:" + update.db + "\nad:" + update.addChunk + "\n";
+ }
+
+ classifierHelper._update(testUpdate, resolve, reject);
+ });
+ }
+
+ // Remove and then reload will ensure both database and cache will
+ // be cleared.
+ return Promise.resolve()
+ .then(removeDatabase)
+ .then(classifierHelper.reloadDatabase);
+};
+
+classifierHelper.reloadDatabase = function () {
+ return new Promise(function (resolve, reject) {
+ gScript.addMessageListener("reloadSuccess", function handler() {
+ gScript.removeMessageListener("reloadSuccess", handler);
+ resolve();
+ });
+
+ gScript.sendAsyncMessage("doReload");
+ });
+};
+
+classifierHelper.getTables = function () {
+ return new Promise(resolve => {
+ gScript.addMessageListener("GetTableSuccess", function handler(tables) {
+ gScript.removeMessageListener("GetTableSuccess", handler);
+ resolve(tables);
+ });
+
+ gScript.sendAsyncMessage("doGetTables");
+ });
+};
+
+classifierHelper._update = function (testUpdate, onsuccess, onerror) {
+ // Queue the task if there is still an on-going update
+ classifierHelper._updates.push({
+ data: testUpdate,
+ onsuccess,
+ onerror,
+ });
+ if (classifierHelper._updates.length != 1) {
+ return;
+ }
+
+ gScript.sendAsyncMessage("doUpdate", { testUpdate });
+};
+
+classifierHelper._updateSuccess = function () {
+ var update = classifierHelper._updates.shift();
+ update.onsuccess();
+
+ if (classifierHelper._updates.length) {
+ var testUpdate = classifierHelper._updates[0].data;
+ gScript.sendAsyncMessage("doUpdate", { testUpdate });
+ }
+};
+
+classifierHelper._updateError = function (errorCode) {
+ var update = classifierHelper._updates.shift();
+ update.onerror(errorCode);
+
+ if (classifierHelper._updates.length) {
+ var testUpdate = classifierHelper._updates[0].data;
+ gScript.sendAsyncMessage("doUpdate", { testUpdate });
+ }
+};
+
+classifierHelper._inited = function () {
+ classifierHelper._initsCB.forEach(function (cb) {
+ cb();
+ });
+ classifierHelper._initsCB = [];
+};
+
+classifierHelper._setup = function () {
+ gScript.addMessageListener("updateSuccess", classifierHelper._updateSuccess);
+ gScript.addMessageListener("updateError", classifierHelper._updateError);
+ gScript.addMessageListener("safeBrowsingInited", classifierHelper._inited);
+
+ // cleanup will be called at end of each testcase to remove all the urls added to database.
+ SimpleTest.registerCleanupFunction(classifierHelper._cleanup);
+};
+
+classifierHelper._cleanup = function () {
+ // clean all the preferences may touch by helper
+ Object.values(PREFS).map(pref => SpecialPowers.clearUserPref(pref));
+
+ // Set the getHashURL back, the original value isn't the same as the default
+ // pref value.
+ SpecialPowers.setCharPref(PREFS.PROVIDER_GETHASHURL, gOriginalGetHashURL);
+
+ if (!classifierHelper._updatesToCleanup) {
+ return Promise.resolve();
+ }
+
+ return classifierHelper.resetDatabase();
+};
+
+classifierHelper._setup();
diff --git a/toolkit/components/url-classifier/tests/mochitest/cleanWorker.js b/toolkit/components/url-classifier/tests/mochitest/cleanWorker.js
new file mode 100644
index 0000000000..e7361119b6
--- /dev/null
+++ b/toolkit/components/url-classifier/tests/mochitest/cleanWorker.js
@@ -0,0 +1,12 @@
+/* eslint-env worker */
+
+onmessage = function () {
+ try {
+ importScripts("evilWorker.js");
+ } catch (ex) {
+ postMessage("success");
+ return;
+ }
+
+ postMessage("failure");
+};
diff --git a/toolkit/components/url-classifier/tests/mochitest/dnt.html b/toolkit/components/url-classifier/tests/mochitest/dnt.html
new file mode 100644
index 0000000000..2246263688
--- /dev/null
+++ b/toolkit/components/url-classifier/tests/mochitest/dnt.html
@@ -0,0 +1,31 @@
+<html>
+<head>
+<title></title>
+
+<script type="text/javascript">
+
+function makeXHR(url, callback) {
+ var xhr = new XMLHttpRequest();
+ xhr.open("GET", url, true);
+ xhr.onload = function() {
+ callback(xhr.response);
+ };
+ xhr.send();
+}
+
+function loaded(type) {
+ window.parent.postMessage("navigator.doNotTrack=" + navigator.doNotTrack, "*");
+
+ makeXHR("dnt.sjs", (res) => {
+ window.parent.postMessage("DNT=" + res, "*");
+ window.parent.postMessage("finish", "*");
+ });
+}
+
+</script>
+</head>
+
+<body onload="loaded('onload')">
+</body>
+
+</html>
diff --git a/toolkit/components/url-classifier/tests/mochitest/dnt.sjs b/toolkit/components/url-classifier/tests/mochitest/dnt.sjs
new file mode 100644
index 0000000000..bbb836482a
--- /dev/null
+++ b/toolkit/components/url-classifier/tests/mochitest/dnt.sjs
@@ -0,0 +1,9 @@
+function handleRequest(request, response) {
+ var dnt = "unspecified";
+ if (request.hasHeader("DNT")) {
+ dnt = "1";
+ }
+
+ response.setHeader("Content-Type", "text/plain", false);
+ response.write(dnt);
+}
diff --git a/toolkit/components/url-classifier/tests/mochitest/evil.css b/toolkit/components/url-classifier/tests/mochitest/evil.css
new file mode 100644
index 0000000000..62d506c899
--- /dev/null
+++ b/toolkit/components/url-classifier/tests/mochitest/evil.css
@@ -0,0 +1 @@
+#styleCheck { visibility: hidden; }
diff --git a/toolkit/components/url-classifier/tests/mochitest/evil.css^headers^ b/toolkit/components/url-classifier/tests/mochitest/evil.css^headers^
new file mode 100644
index 0000000000..4030ea1d3d
--- /dev/null
+++ b/toolkit/components/url-classifier/tests/mochitest/evil.css^headers^
@@ -0,0 +1 @@
+Cache-Control: no-store
diff --git a/toolkit/components/url-classifier/tests/mochitest/evil.js b/toolkit/components/url-classifier/tests/mochitest/evil.js
new file mode 100644
index 0000000000..3e8b165587
--- /dev/null
+++ b/toolkit/components/url-classifier/tests/mochitest/evil.js
@@ -0,0 +1,3 @@
+/* global scriptItem:true */
+
+scriptItem = "loaded malware javascript!";
diff --git a/toolkit/components/url-classifier/tests/mochitest/evil.js^headers^ b/toolkit/components/url-classifier/tests/mochitest/evil.js^headers^
new file mode 100644
index 0000000000..3eced96143
--- /dev/null
+++ b/toolkit/components/url-classifier/tests/mochitest/evil.js^headers^
@@ -0,0 +1,2 @@
+Access-Control-Allow-Origin: *
+Cache-Control: no-store
diff --git a/toolkit/components/url-classifier/tests/mochitest/evilWorker.js b/toolkit/components/url-classifier/tests/mochitest/evilWorker.js
new file mode 100644
index 0000000000..b4e8a47602
--- /dev/null
+++ b/toolkit/components/url-classifier/tests/mochitest/evilWorker.js
@@ -0,0 +1,5 @@
+/* eslint-env worker */
+
+onmessage = function () {
+ postMessage("loaded bad file");
+};
diff --git a/toolkit/components/url-classifier/tests/mochitest/features.js b/toolkit/components/url-classifier/tests/mochitest/features.js
new file mode 100644
index 0000000000..0004693d81
--- /dev/null
+++ b/toolkit/components/url-classifier/tests/mochitest/features.js
@@ -0,0 +1,291 @@
+var tests = [
+ // Config is an array with 4 elements:
+ // - annotation blocklist
+ // - annotation entitylist
+ // - tracking blocklist
+ // - tracking entitylist
+
+ // All disabled.
+ {
+ config: [false, false, false, false],
+ loadExpected: true,
+ annotationExpected: false,
+ },
+
+ // Just entitylisted.
+ {
+ config: [false, false, false, true],
+ loadExpected: true,
+ annotationExpected: false,
+ },
+
+ // Just blocklisted.
+ {
+ config: [false, false, true, false],
+ loadExpected: false,
+ annotationExpected: false,
+ },
+
+ // entitylist + blocklist => entitylist wins
+ {
+ config: [false, false, true, true],
+ loadExpected: true,
+ annotationExpected: false,
+ },
+
+ // just annotated in entitylist.
+ {
+ config: [false, true, false, false],
+ loadExpected: true,
+ annotationExpected: false,
+ },
+
+ // TP and annotation entitylisted.
+ {
+ config: [false, true, false, true],
+ loadExpected: true,
+ annotationExpected: false,
+ },
+
+ // Annotation entitylisted, but TP blocklisted.
+ {
+ config: [false, true, true, false],
+ loadExpected: false,
+ annotationExpected: false,
+ },
+
+ // Annotation entitylisted. TP blocklisted and entitylisted: entitylist wins.
+ {
+ config: [false, true, true, true],
+ loadExpected: true,
+ annotationExpected: false,
+ },
+
+ // Just blocklist annotated.
+ {
+ config: [true, false, false, false],
+ loadExpected: true,
+ annotationExpected: true,
+ },
+
+ // annotated but TP entitylisted.
+ {
+ config: [true, false, false, true],
+ loadExpected: true,
+ annotationExpected: true,
+ },
+
+ // annotated and blocklisted.
+ {
+ config: [true, false, true, false],
+ loadExpected: false,
+ annotationExpected: false,
+ },
+
+ // annotated, TP blocklisted and entitylisted: entitylist wins.
+ {
+ config: [true, false, true, true],
+ loadExpected: true,
+ annotationExpected: true,
+ },
+
+ // annotated in white and blocklist.
+ {
+ config: [true, true, false, false],
+ loadExpected: true,
+ annotationExpected: false,
+ },
+
+ // annotated in white and blocklist. TP Whiteslited
+ {
+ config: [true, true, false, true],
+ loadExpected: true,
+ annotationExpected: false,
+ },
+
+ // everywhere. TP entitylist wins.
+ {
+ config: [true, true, true, true],
+ loadExpected: true,
+ annotationExpected: false,
+ },
+];
+
+function prefBlacklistValue(value) {
+ return value ? "example.com" : "";
+}
+
+function prefWhitelistValue(value) {
+ return value ? "mochi.test,mochi.xorigin-test" : "";
+}
+
+async function runTest(test, expectedFlag, expectedTrackingResource, prefs) {
+ let config = [
+ [
+ "urlclassifier.trackingAnnotationTable.testEntries",
+ prefBlacklistValue(test.config[0]),
+ ],
+ [
+ "urlclassifier.features.fingerprinting.annotate.blacklistHosts",
+ prefBlacklistValue(test.config[0]),
+ ],
+ [
+ "urlclassifier.features.cryptomining.annotate.blacklistHosts",
+ prefBlacklistValue(test.config[0]),
+ ],
+ [
+ "urlclassifier.features.socialtracking.annotate.blacklistHosts",
+ prefBlacklistValue(test.config[0]),
+ ],
+
+ [
+ "urlclassifier.trackingAnnotationWhitelistTable.testEntries",
+ prefWhitelistValue(test.config[1]),
+ ],
+ [
+ "urlclassifier.features.fingerprinting.annotate.whitelistHosts",
+ prefWhitelistValue(test.config[1]),
+ ],
+ [
+ "urlclassifier.features.cryptomining.annotate.whitelistHosts",
+ prefWhitelistValue(test.config[1]),
+ ],
+ [
+ "urlclassifier.features.socialtracking.annotate.whitelistHosts",
+ prefWhitelistValue(test.config[1]),
+ ],
+
+ [
+ "urlclassifier.trackingTable.testEntries",
+ prefBlacklistValue(test.config[2]),
+ ],
+ [
+ "urlclassifier.features.fingerprinting.blacklistHosts",
+ prefBlacklistValue(test.config[2]),
+ ],
+ [
+ "urlclassifier.features.cryptomining.blacklistHosts",
+ prefBlacklistValue(test.config[2]),
+ ],
+ [
+ "urlclassifier.features.socialtracking.blacklistHosts",
+ prefBlacklistValue(test.config[2]),
+ ],
+
+ [
+ "urlclassifier.trackingWhitelistTable.testEntries",
+ prefWhitelistValue(test.config[3]),
+ ],
+ [
+ "urlclassifier.features.fingerprinting.whitelistHosts",
+ prefWhitelistValue(test.config[3]),
+ ],
+ [
+ "urlclassifier.features.cryptomining.whitelistHosts",
+ prefWhitelistValue(test.config[3]),
+ ],
+ [
+ "urlclassifier.features.socialtracking.whitelistHosts",
+ prefWhitelistValue(test.config[3]),
+ ],
+ ];
+
+ info("Testing: " + JSON.stringify(config) + "\n");
+
+ await SpecialPowers.pushPrefEnv({ set: config.concat(prefs) });
+
+ // This promise will be resolved when the chromeScript knows if the channel
+ // is annotated or not.
+ let annotationPromise;
+ if (test.loadExpected) {
+ info("We want to have annotation information");
+ annotationPromise = new Promise(resolve => {
+ chromeScript.addMessageListener(
+ "last-channel-flags",
+ data => resolve(data),
+ { once: true }
+ );
+ });
+ }
+
+ // Let's load a script with a random query string to avoid network cache.
+ // Using a script as the fingerprinting feature does not block display content
+ let result = await new Promise(resolve => {
+ let script = document.createElement("script");
+ script.setAttribute(
+ "src",
+ "http://example.com/tests/toolkit/components/url-classifier/tests/mochitest/evil.js?" +
+ Math.random()
+ );
+ script.onload = _ => resolve(true);
+ script.onerror = _ => resolve(false);
+ document.body.appendChild(script);
+ });
+
+ is(result, test.loadExpected, "The loading happened correctly");
+
+ if (annotationPromise) {
+ let data = await annotationPromise;
+ is(
+ !!data.classificationFlags,
+ test.annotationExpected,
+ "The annotation happened correctly"
+ );
+ if (test.annotationExpected) {
+ is(data.classificationFlags, expectedFlag, "Correct flag");
+ is(
+ data.isThirdPartyTrackingResource,
+ expectedTrackingResource,
+ "Tracking resource flag matches"
+ );
+ }
+ }
+}
+
+var chromeScript;
+
+function runTests(flag, prefs, trackingResource) {
+ chromeScript = SpecialPowers.loadChromeScript(_ => {
+ /* eslint-env mozilla/chrome-script */
+ function onExamResp(subject, topic, data) {
+ let channel = subject.QueryInterface(Ci.nsIHttpChannel);
+ let classifiedChannel = subject.QueryInterface(Ci.nsIClassifiedChannel);
+ if (
+ !channel ||
+ !classifiedChannel ||
+ !channel.URI.spec.startsWith(
+ "http://example.com/tests/toolkit/components/url-classifier/tests/mochitest/evil.js"
+ )
+ ) {
+ return;
+ }
+
+ sendAsyncMessage("last-channel-flags", {
+ classificationFlags: classifiedChannel.classificationFlags,
+ isThirdPartyTrackingResource:
+ classifiedChannel.isThirdPartyTrackingResource(),
+ });
+ }
+
+ addMessageListener("done", __ => {
+ Services.obs.removeObserver(onExamResp, "http-on-examine-response");
+ });
+
+ Services.obs.addObserver(onExamResp, "http-on-examine-response");
+
+ sendAsyncMessage("start-test");
+ });
+
+ chromeScript.addMessageListener(
+ "start-test",
+ async _ => {
+ for (let test in tests) {
+ await runTest(tests[test], flag, trackingResource, prefs);
+ }
+
+ chromeScript.sendAsyncMessage("done");
+ SimpleTest.finish();
+ },
+ { once: true }
+ );
+}
diff --git a/toolkit/components/url-classifier/tests/mochitest/gethash.sjs b/toolkit/components/url-classifier/tests/mochitest/gethash.sjs
new file mode 100644
index 0000000000..12e8c11835
--- /dev/null
+++ b/toolkit/components/url-classifier/tests/mochitest/gethash.sjs
@@ -0,0 +1,86 @@
+const CC = Components.Constructor;
+const BinaryInputStream = CC(
+ "@mozilla.org/binaryinputstream;1",
+ "nsIBinaryInputStream",
+ "setInputStream"
+);
+
+function handleRequest(request, response) {
+ var query = {};
+ request.queryString.split("&").forEach(function (val) {
+ var idx = val.indexOf("=");
+ query[val.slice(0, idx)] = unescape(val.slice(idx + 1));
+ });
+
+ var responseBody;
+
+ // Store fullhash in the server side.
+ if ("list" in query && "fullhash" in query) {
+ // In the server side we will store:
+ // 1. All the full hashes for a given list
+ // 2. All the lists we have right now
+ // data is separate by '\n'
+ let list = query.list;
+ let hashes = getState(list);
+
+ let hash = atob(query.fullhash);
+ hashes += hash + "\n";
+ setState(list, hashes);
+
+ let lists = getState("lists");
+ if (!lists.includes(list)) {
+ lists += list + "\n";
+ setState("lists", lists);
+ }
+
+ return;
+ // gethash count return how many gethash request received.
+ // This is used by client to know if a gethash request is triggered by gecko
+ } else if ("gethashcount" == request.queryString) {
+ let counter = getState("counter");
+ responseBody = counter == "" ? "0" : counter;
+ } else {
+ let body = new BinaryInputStream(request.bodyInputStream);
+ let avail;
+ let bytes = [];
+
+ while ((avail = body.available()) > 0) {
+ Array.prototype.push.apply(bytes, body.readByteArray(avail));
+ }
+
+ let counter = getState("counter");
+ counter = counter == "" ? "1" : (parseInt(counter) + 1).toString();
+ setState("counter", counter);
+
+ responseBody = parseV2Request(bytes);
+ }
+
+ response.setHeader("Content-Type", "text/plain", false);
+ response.write(responseBody);
+}
+
+function parseV2Request(bytes) {
+ var request = String.fromCharCode.apply(this, bytes);
+ var [HEADER, PREFIXES] = request.split("\n");
+ var [PREFIXSIZE, LENGTH] = HEADER.split(":").map(val => {
+ return parseInt(val);
+ });
+
+ var ret = "";
+ for (var start = 0; start < LENGTH; start += PREFIXSIZE) {
+ getState("lists")
+ .split("\n")
+ .forEach(function (list) {
+ var completions = getState(list).split("\n");
+
+ for (var completion of completions) {
+ if (completion.indexOf(PREFIXES.substr(start, PREFIXSIZE)) == 0) {
+ ret += list + ":1:32\n";
+ ret += completion;
+ }
+ }
+ });
+ }
+
+ return ret;
+}
diff --git a/toolkit/components/url-classifier/tests/mochitest/gethashFrame.html b/toolkit/components/url-classifier/tests/mochitest/gethashFrame.html
new file mode 100644
index 0000000000..4f518dabe0
--- /dev/null
+++ b/toolkit/components/url-classifier/tests/mochitest/gethashFrame.html
@@ -0,0 +1,61 @@
+<html>
+<head>
+<title></title>
+
+<script type="text/javascript">
+
+var scriptItem = "untouched";
+
+function checkLoads() {
+ var title = document.getElementById("title");
+ title.textContent = window.parent.shouldLoad ?
+ "The following should be hidden:" :
+ "The following should not be hidden:";
+
+ if (window.parent.shouldLoad) {
+ window.parent.is(scriptItem, "loaded malware javascript!", "Should load bad javascript");
+ } else {
+ window.parent.is(scriptItem, "untouched", "Should not load bad javascript");
+ }
+
+ var elt = document.getElementById("styleImport");
+ var style = document.defaultView.getComputedStyle(elt);
+ window.parent.isnot(style.visibility, "visible", "Should load clean css");
+
+ // Make sure the css did not load.
+ elt = document.getElementById("styleCheck");
+ style = document.defaultView.getComputedStyle(elt);
+ if (window.parent.shouldLoad) {
+ window.parent.isnot(style.visibility, "visible", "Should load bad css");
+ } else {
+ window.parent.isnot(style.visibility, "hidden", "Should not load bad css");
+ }
+
+ elt = document.getElementById("styleBad");
+ style = document.defaultView.getComputedStyle(elt);
+ if (window.parent.shouldLoad) {
+ window.parent.isnot(style.visibility, "visible", "Should import bad css");
+ } else {
+ window.parent.isnot(style.visibility, "hidden", "Should not import bad css");
+ }
+}
+
+</script>
+
+<!-- Try loading from a malware javascript URI -->
+<script type="text/javascript" src="http://malware.example.com/tests/toolkit/components/url-classifier/tests/mochitest/evil.js"></script>
+
+<!-- Try loading from an uwanted software css URI -->
+<link rel="stylesheet" type="text/css" href="http://unwanted.example.com/tests/toolkit/components/url-classifier/tests/mochitest/evil.css"></link>
+
+<!-- Try loading a marked-as-malware css through an @import from a clean URI -->
+<link rel="stylesheet" type="text/css" href="import.css"></link>
+</head>
+
+<body onload="checkLoads()">
+<div id="title"></div>
+<div id="styleCheck">STYLE EVIL</div>
+<div id="styleBad">STYLE BAD</div>
+<div id="styleImport">STYLE IMPORT</div>
+</body>
+</html>
diff --git a/toolkit/components/url-classifier/tests/mochitest/good.js b/toolkit/components/url-classifier/tests/mochitest/good.js
new file mode 100644
index 0000000000..a3c8fcb0c8
--- /dev/null
+++ b/toolkit/components/url-classifier/tests/mochitest/good.js
@@ -0,0 +1,3 @@
+/* global scriptItem:true */
+
+scriptItem = "loaded whitelisted javascript!";
diff --git a/toolkit/components/url-classifier/tests/mochitest/head.js b/toolkit/components/url-classifier/tests/mochitest/head.js
new file mode 100644
index 0000000000..dca6abe279
--- /dev/null
+++ b/toolkit/components/url-classifier/tests/mochitest/head.js
@@ -0,0 +1,40 @@
+// calculate the fullhash and send it to gethash server
+function addCompletionToServer(list, url, mochitestUrl) {
+ return new Promise(function (resolve, reject) {
+ var listParam = "list=" + list;
+ var fullhashParam = "fullhash=" + hash(url);
+
+ var xhr = new XMLHttpRequest();
+ xhr.open("PUT", mochitestUrl + "?" + listParam + "&" + fullhashParam, true);
+ xhr.setRequestHeader("Content-Type", "text/plain");
+ xhr.onreadystatechange = function () {
+ if (this.readyState == this.DONE) {
+ resolve();
+ }
+ };
+ xhr.send();
+ });
+}
+
+function hash(str) {
+ var hasher = SpecialPowers.Cc["@mozilla.org/security/hash;1"].createInstance(
+ SpecialPowers.Ci.nsICryptoHash
+ );
+
+ var data = new TextEncoder().encode(str);
+ hasher.init(hasher.SHA256);
+ hasher.update(data, data.length);
+
+ return hasher.finish(true);
+}
+
+var pushPrefs = (...p) => SpecialPowers.pushPrefEnv({ set: p });
+
+function whenDelayedStartupFinished(aWindow, aCallback) {
+ Services.obs.addObserver(function observer(aSubject, aTopic) {
+ if (aWindow == aSubject) {
+ Services.obs.removeObserver(observer, aTopic);
+ setTimeout(aCallback, 0);
+ }
+ }, "browser-delayed-startup-finished");
+}
diff --git a/toolkit/components/url-classifier/tests/mochitest/import.css b/toolkit/components/url-classifier/tests/mochitest/import.css
new file mode 100644
index 0000000000..9b86c82169
--- /dev/null
+++ b/toolkit/components/url-classifier/tests/mochitest/import.css
@@ -0,0 +1,3 @@
+/* malware.example.com is in the malware database. */
+@import url("http://malware.example.com/tests/toolkit/components/url-classifier/tests/mochitest/bad.css");
+#styleImport { visibility: hidden; }
diff --git a/toolkit/components/url-classifier/tests/mochitest/import2.css b/toolkit/components/url-classifier/tests/mochitest/import2.css
new file mode 100644
index 0000000000..55de698e0c
--- /dev/null
+++ b/toolkit/components/url-classifier/tests/mochitest/import2.css
@@ -0,0 +1,3 @@
+/* malware.mochi.test is in the malware database. */
+@import url("http://malware.mochi.test/tests/toolkit/components/url-classifier/tests/mochitest/bad.css");
+#styleImport { visibility: hidden; }
diff --git a/toolkit/components/url-classifier/tests/mochitest/mochitest.toml b/toolkit/components/url-classifier/tests/mochitest/mochitest.toml
new file mode 100644
index 0000000000..20ebb3bf34
--- /dev/null
+++ b/toolkit/components/url-classifier/tests/mochitest/mochitest.toml
@@ -0,0 +1,91 @@
+[DEFAULT]
+tags = "condprof"
+support-files = [
+ "classifierCommon.js",
+ "classifierFrame.html",
+ "classifierHelper.js",
+ "cleanWorker.js",
+ "good.js",
+ "head.js",
+ "evil.css",
+ "evil.css^headers^",
+ "evil.js",
+ "evil.js^headers^",
+ "evilWorker.js",
+ "import.css",
+ "import2.css",
+ "raptor.jpg",
+ "track.html",
+ "trackerFrame.html",
+ "trackerFrame.sjs",
+ "trackingRequest.js",
+ "trackingRequest.js^headers^",
+ "unwantedWorker.js",
+ "vp9.webm",
+ "whitelistFrame.html",
+ "workerFrame.html",
+ "ping.sjs",
+ "basic.vtt",
+ "basic.vtt^headers^",
+ "dnt.html",
+ "dnt.sjs",
+ "update.sjs",
+ "bad.css",
+ "bad.css^headers^",
+ "gethash.sjs",
+ "gethashFrame.html",
+ "seek.webm",
+ "cache.sjs",
+ "features.js",
+ "sw_register.html",
+ "sw_unregister.html",
+ "sw_worker.js",
+ "sandboxed.html",
+ "sandboxed.html^headers^",
+]
+skip-if = [
+ "http3",
+ "http2",
+]
+
+["test_annotation_vs_TP.html"]
+
+["test_bug1254766.html"]
+
+["test_cachemiss.html"]
+skip-if = ["verify"]
+
+["test_classifier.html"]
+
+["test_classifier_match.html"]
+
+["test_classifier_worker.html"]
+
+["test_classify_by_default.html"]
+skip-if = [
+ "http3",
+ "http2",
+]
+
+["test_classify_ping.html"]
+skip-if = ["verify && debug && (os == 'win' || os == 'mac')"]
+
+["test_classify_top_sandboxed.html"]
+
+["test_classify_track.html"]
+
+["test_cryptomining.html"]
+
+["test_cryptomining_annotate.html"]
+
+["test_emailtracking.html"]
+
+["test_fingerprinting.html"]
+
+["test_fingerprinting_annotate.html"]
+
+["test_gethash.html"]
+
+["test_socialtracking.html"]
+
+["test_socialtracking_annotate.html"]
diff --git a/toolkit/components/url-classifier/tests/mochitest/ping.sjs b/toolkit/components/url-classifier/tests/mochitest/ping.sjs
new file mode 100644
index 0000000000..09a5f17356
--- /dev/null
+++ b/toolkit/components/url-classifier/tests/mochitest/ping.sjs
@@ -0,0 +1,15 @@
+function handleRequest(request, response) {
+ var query = {};
+ request.queryString.split("&").forEach(function (val) {
+ var [name, value] = val.split("=");
+ query[name] = unescape(value);
+ });
+
+ if (request.method == "POST") {
+ setState(query.id, "ping");
+ } else {
+ var value = getState(query.id);
+ response.setHeader("Content-Type", "text/plain", false);
+ response.write(value);
+ }
+}
diff --git a/toolkit/components/url-classifier/tests/mochitest/raptor.jpg b/toolkit/components/url-classifier/tests/mochitest/raptor.jpg
new file mode 100644
index 0000000000..243ba9e2d4
--- /dev/null
+++ b/toolkit/components/url-classifier/tests/mochitest/raptor.jpg
Binary files differ
diff --git a/toolkit/components/url-classifier/tests/mochitest/redirect_tracker.sjs b/toolkit/components/url-classifier/tests/mochitest/redirect_tracker.sjs
new file mode 100644
index 0000000000..563abae5cb
--- /dev/null
+++ b/toolkit/components/url-classifier/tests/mochitest/redirect_tracker.sjs
@@ -0,0 +1,7 @@
+const gURL =
+ "http://trackertest.org/tests/toolkit/components/url-classifier/tests/mochitest/evil.js";
+
+function handleRequest(request, response) {
+ response.setStatusLine("1.1", 302, "found");
+ response.setHeader("Location", gURL, false);
+}
diff --git a/toolkit/components/url-classifier/tests/mochitest/report.sjs b/toolkit/components/url-classifier/tests/mochitest/report.sjs
new file mode 100644
index 0000000000..b5844eb7a4
--- /dev/null
+++ b/toolkit/components/url-classifier/tests/mochitest/report.sjs
@@ -0,0 +1,71 @@
+/* Any copyright is dedicated to the Public Domain.
+ * http://creativecommons.org/publicdomain/zero/1.0/ */
+
+const SJS = "report.sjs?";
+const REDIRECT =
+ "mochi.test:8888/chrome/toolkit/components/url-classifier/tests/mochitest/" +
+ SJS;
+
+function createBlockedIframePage() {
+ return `<!DOCTYPE HTML>
+ <html>
+ <head>
+ <title></title>
+ </head>
+ <body>
+ <iframe id="phishingFrame" ></iframe>
+ </body>
+ </html>`;
+}
+
+function createPage() {
+ return `<!DOCTYPE HTML>
+ <html>
+ <head>
+ <title>Hello World</title>
+ </head>
+ <body>
+ <script></script>
+ </body>
+ </html>`;
+}
+
+function handleRequest(request, response) {
+ var params = new URLSearchParams(request.queryString);
+ var action = params.get("action");
+
+ if (action === "create-blocked-iframe") {
+ response.setHeader("Cache-Control", "no-cache", false);
+ response.setHeader("Content-Type", "text/html; charset=utf-8", false);
+ response.write(createBlockedIframePage());
+ return;
+ }
+
+ if (action === "redirect") {
+ response.setHeader("Cache-Control", "no-cache", false);
+ response.setHeader("Content-Type", "text/html; charset=utf-8", false);
+ response.write(createPage());
+ return;
+ }
+
+ if (action === "reporturl") {
+ response.setHeader("Cache-Control", "no-cache", false);
+ response.setHeader("Content-Type", "text/html; charset=utf-8", false);
+ response.write(createPage());
+ return;
+ }
+
+ if (action === "create-blocked-redirect") {
+ params.delete("action");
+ params.append("action", "redirect");
+ response.setStatusLine("1.1", 302, "found");
+ response.setHeader(
+ "Location",
+ "https://" + REDIRECT + params.toString(),
+ false
+ );
+ return;
+ }
+
+ response.write("I don't know action " + action);
+}
diff --git a/toolkit/components/url-classifier/tests/mochitest/sandboxed.html b/toolkit/components/url-classifier/tests/mochitest/sandboxed.html
new file mode 100644
index 0000000000..3eb8870edd
--- /dev/null
+++ b/toolkit/components/url-classifier/tests/mochitest/sandboxed.html
@@ -0,0 +1,8 @@
+<!DOCTYPE HTML>
+<!-- Any copyright is dedicated to the Public Domain.
+ http://creativecommons.org/publicdomain/zero/1.0/ -->
+<html>
+<body>
+<script src="https://itisatracker.org/tests/toolkit/components/url-classifier/tests/mochitest/evil.js">
+</body>
+</html>
diff --git a/toolkit/components/url-classifier/tests/mochitest/sandboxed.html^headers^ b/toolkit/components/url-classifier/tests/mochitest/sandboxed.html^headers^
new file mode 100644
index 0000000000..4705ce9ded
--- /dev/null
+++ b/toolkit/components/url-classifier/tests/mochitest/sandboxed.html^headers^
@@ -0,0 +1 @@
+Content-Security-Policy: sandbox allow-scripts;
diff --git a/toolkit/components/url-classifier/tests/mochitest/seek.webm b/toolkit/components/url-classifier/tests/mochitest/seek.webm
new file mode 100644
index 0000000000..72b0297233
--- /dev/null
+++ b/toolkit/components/url-classifier/tests/mochitest/seek.webm
Binary files differ
diff --git a/toolkit/components/url-classifier/tests/mochitest/sw_register.html b/toolkit/components/url-classifier/tests/mochitest/sw_register.html
new file mode 100644
index 0000000000..2a536128fa
--- /dev/null
+++ b/toolkit/components/url-classifier/tests/mochitest/sw_register.html
@@ -0,0 +1,33 @@
+<!DOCTYPE html>
+<html lang="en">
+<head>
+<meta charset="utf-8">
+<script>
+function waitForState(worker, state, context) {
+ return new Promise(resolve => {
+ function onStateChange() {
+ if (worker.state === state) {
+ worker.removeEventListener("statechange", onStateChange);
+ resolve(context);
+ }
+ }
+
+ // First add an event listener, so we won't miss any change that happens
+ // before we check the current state.
+ worker.addEventListener("statechange", onStateChange);
+
+ // Now check if the worker is already in the desired state.
+ onStateChange();
+ });
+}
+
+(async function () {
+ dump("[Dimi]register sw...\n");
+ let reg = await navigator.serviceWorker.register("sw_worker.js", {scope: "."});
+ await waitForState(reg.installing, "activated", reg);
+ window.parent.postMessage({status: "registrationdone"}, "*");
+})();
+
+</script>
+</head>
+<html>
diff --git a/toolkit/components/url-classifier/tests/mochitest/sw_unregister.html b/toolkit/components/url-classifier/tests/mochitest/sw_unregister.html
new file mode 100644
index 0000000000..b94fa0baac
--- /dev/null
+++ b/toolkit/components/url-classifier/tests/mochitest/sw_unregister.html
@@ -0,0 +1,12 @@
+<!DOCTYPE html>
+<script>
+ navigator.serviceWorker.getRegistration(".").then(function(registration) {
+ registration.unregister().then(function(success) {
+ if (success) {
+ window.opener.postMessage({status: "unregistrationdone"}, "*");
+ }
+ }, function(e) {
+ dump("Unregistering the SW failed with " + e + "\n");
+ });
+ });
+</script>
diff --git a/toolkit/components/url-classifier/tests/mochitest/sw_worker.js b/toolkit/components/url-classifier/tests/mochitest/sw_worker.js
new file mode 100644
index 0000000000..bad16608b4
--- /dev/null
+++ b/toolkit/components/url-classifier/tests/mochitest/sw_worker.js
@@ -0,0 +1,10 @@
+self.addEventListener("fetch", function (event) {
+ let sep = "synth.html?";
+ let idx = event.request.url.indexOf(sep);
+ if (idx > 0) {
+ let url = event.request.url.substring(idx + sep.length);
+ event.respondWith(fetch(url, { credentials: "include" }));
+ } else {
+ event.respondWith(fetch(event.request));
+ }
+});
diff --git a/toolkit/components/url-classifier/tests/mochitest/test_advisory_link.html b/toolkit/components/url-classifier/tests/mochitest/test_advisory_link.html
new file mode 100644
index 0000000000..5fa49f628e
--- /dev/null
+++ b/toolkit/components/url-classifier/tests/mochitest/test_advisory_link.html
@@ -0,0 +1,138 @@
+<!DOCTYPE HTML>
+<!-- Any copyright is dedicated to the Public Domain.
+ http://creativecommons.org/publicdomain/zero/1.0/ -->
+<html>
+<head>
+ <title>Test advisory link (Bug #1366384)</title>
+ <script src="chrome://mochikit/content/tests/SimpleTest/SimpleTest.js"></script>
+ <script type="text/javascript" src="classifierHelper.js"></script>
+ <link rel="stylesheet" type="text/css" href="chrome://mochikit/content/tests/SimpleTest/test.css">
+</head>
+
+<body>
+<p id="display"></p>
+<div id="content" style="display: none">
+</div>
+<pre id="test">
+
+<script class="testbody" type="text/javascript">
+const {TestUtils} = ChromeUtils.importESModule(
+ "resource://testing-common/TestUtils.sys.mjs"
+);
+const {BrowserTestUtils} = ChromeUtils.importESModule(
+ "resource://testing-common/BrowserTestUtils.sys.mjs"
+);
+
+var mainWindow = window.browsingContext.topChromeWindow;
+
+var testDatas = [
+ { url: "itisaphishingsite.org/phishing.html",
+ list: "mochi1-phish-simple",
+ provider: "google",
+ },
+
+ { url: "fakeitisaphishingsite.org/phishing.html",
+ list: "fake1-phish-simple",
+ provider: "mozilla",
+ },
+
+ { url: "phishing.example.com/test.html",
+ list: "mochi2-phish-simple",
+ provider: "google4",
+ },
+];
+
+let pushPrefs = (...p) => SpecialPowers.pushPrefEnv({set: p});
+
+function addUrlToDB(list, url) {
+ let testData = [{ db: list, url}];
+
+ return classifierHelper.addUrlToDB(testData)
+ .catch(function(err) {
+ ok(false, "Couldn't update classifier. Error code: " + err);
+ // Abort test.
+ SimpleTest.finish();
+ });
+}
+
+function setupTestData(data) {
+ let promises = [];
+ let providerList = "browser.safebrowsing.provider." + data.provider + ".lists";
+ promises.push(pushPrefs([providerList, data.list]));
+
+ let activeTablePref = "urlclassifier.phishTable";
+ let activeTable = SpecialPowers.getCharPref(activeTablePref);
+ activeTable += "," + data.list;
+ promises.push(pushPrefs([activeTablePref, activeTable]));
+
+ promises.push(addUrlToDB(data.list, data.url));
+ return Promise.all(promises);
+}
+
+function testOnWindow(aTestData) {
+ return new Promise(resolve => {
+ let win = mainWindow.OpenBrowserWindow();
+
+ (async function() {
+ await TestUtils.topicObserved("browser-delayed-startup-finished",
+ subject => subject == win);
+
+ let browser = win.gBrowser.selectedBrowser;
+ BrowserTestUtils.startLoadingURIString(browser, aTestData.url);
+ await BrowserTestUtils.waitForContentEvent(browser, "DOMContentLoaded");
+
+ let doc = win.gBrowser.contentDocument;
+
+ // This test works on a document which uses Fluent.
+ // Fluent localization may finish later than DOMContentLoaded,
+ // so let's wait for `document.l10n.ready` promise to resolve.
+ await doc.l10n.ready;
+ let advisoryEl = doc.getElementById("advisory_provider");
+ if (aTestData.provider != "google" && aTestData.provider != "google4") {
+ ok(!advisoryEl, "Advisory should not be shown");
+ } else {
+ ok(advisoryEl, "Advisory element should exist");
+ let expectedUrl =
+ SpecialPowers.getCharPref("browser.safebrowsing.provider." +
+ aTestData.provider +
+ ".advisoryURL");
+ is(advisoryEl.href, expectedUrl, "Correct advisory url");
+ let expectedText =
+ SpecialPowers.getCharPref("browser.safebrowsing.provider." +
+ aTestData.provider +
+ ".advisoryName");
+ is(advisoryEl.text, expectedText, "correct advisory text");
+ }
+
+ win.close();
+ resolve();
+ })();
+ });
+}
+
+SpecialPowers.pushPrefEnv(
+ {"set": [["browser.safebrowsing.phishing.enabled", true]]},
+ test);
+
+function test() {
+ (async function() {
+ await classifierHelper.waitForInit();
+
+ for (let testData of testDatas) {
+ await setupTestData(testData);
+ await testOnWindow(testData);
+ await classifierHelper._cleanup();
+ }
+
+ SimpleTest.finish();
+ })();
+}
+
+SimpleTest.waitForExplicitFinish();
+
+</script>
+
+</pre>
+<iframe id="testFrame" width="100%" height="100%" onload=""></iframe>
+</body>
+</html>
diff --git a/toolkit/components/url-classifier/tests/mochitest/test_allowlisted_annotations.html b/toolkit/components/url-classifier/tests/mochitest/test_allowlisted_annotations.html
new file mode 100644
index 0000000000..bb512e7cdc
--- /dev/null
+++ b/toolkit/components/url-classifier/tests/mochitest/test_allowlisted_annotations.html
@@ -0,0 +1,60 @@
+<!DOCTYPE HTML>
+<html>
+<head>
+ <title>Test the URI Classifier</title>
+ <script src="chrome://mochikit/content/tests/SimpleTest/SimpleTest.js"></script>
+ <link rel="stylesheet" type="text/css" href="chrome://mochikit/content/tests/SimpleTest/test.css">
+</head>
+
+<body>
+<p id="display"></p>
+<div id="content" style="display: none">
+</div>
+<pre id="test">
+
+<script class="testbody" type="text/javascript">
+
+var Cc = SpecialPowers.Cc;
+var Ci = SpecialPowers.Ci;
+
+const {UrlClassifierTestUtils} = ChromeUtils.importESModule(
+ "resource://testing-common/UrlClassifierTestUtils.sys.mjs"
+);
+
+// Add https://allowlisted.example.com to the permissions manager
+SpecialPowers.addPermission("trackingprotection",
+ Ci.nsIPermissionManager.ALLOW_ACTION,
+ { url: "https://allowlisted.example.com" });
+
+async function clearPermissions() {
+ await SpecialPowers.removePermission("trackingprotection",
+ { url: "https://allowlisted.example.com" });
+ ok(!await SpecialPowers.testPermission("trackingprotection",
+ Ci.nsIPermissionManager.ALLOW_ACTION,
+ { url: "https://allowlisted.example.com" }));
+}
+
+SpecialPowers.pushPrefEnv(
+ {"set": [["urlclassifier.trackingTable", "moztest-track-simple"],
+ ["privacy.trackingprotection.enabled", true],
+ ["privacy.trackingprotection.testing.report_blocked_node", true],
+ ["channelclassifier.allowlist_example", true],
+ ["dom.security.skip_remote_script_assertion_in_system_priv_context", true]]},
+ test);
+
+function test() {
+ SimpleTest.registerCleanupFunction(UrlClassifierTestUtils.cleanupTestTrackers);
+ UrlClassifierTestUtils.addTestTrackers().then(() => {
+ document.getElementById("testFrame").src = "allowlistAnnotatedFrame.html";
+ });
+}
+
+// Expected finish() call is in "allowlistedAnnotatedFrame.html".
+SimpleTest.waitForExplicitFinish();
+
+</script>
+
+</pre>
+<iframe id="testFrame" width="100%" height="100%" onload=""></iframe>
+</body>
+</html>
diff --git a/toolkit/components/url-classifier/tests/mochitest/test_annotation_vs_TP.html b/toolkit/components/url-classifier/tests/mochitest/test_annotation_vs_TP.html
new file mode 100644
index 0000000000..ad8e98005b
--- /dev/null
+++ b/toolkit/components/url-classifier/tests/mochitest/test_annotation_vs_TP.html
@@ -0,0 +1,31 @@
+<!DOCTYPE HTML>
+<html>
+<head>
+ <title>Test the relationship between annotation vs TP</title>
+ <script src="/tests/SimpleTest/SimpleTest.js"></script>
+ <script type="text/javascript" src="features.js"></script>
+ <link rel="stylesheet" type="text/css" href="/tests/SimpleTest/test.css" />
+</head>
+
+<body>
+<script class="testbody" type="text/javascript">
+
+runTests(SpecialPowers.Ci.nsIClassifiedChannel.CLASSIFIED_TRACKING,
+ [
+ ["privacy.trackingprotection.enabled", true],
+ ["privacy.trackingprotection.annotate_channels", true],
+ ["urlclassifier.features.fingerprinting.annotate.blacklistTables", ""],
+ ["urlclassifier.features.fingerprinting.annotate.blacklistHosts", ""],
+ ["privacy.trackingprotection.fingerprinting.enabled", false],
+ ["urlclassifier.features.cryptomining.annotate.blacklistTables", ""],
+ ["urlclassifier.features.cryptomining.annotate.blacklistHosts", ""],
+ ["privacy.trackingprotection.cryptomining.enabled", false],
+ ["urlclassifier.features.socialtracking.annotate.blacklistTables", ""],
+ ["urlclassifier.features.socialtracking.annotate.blacklistHosts", ""],
+ ],
+ true /* a tracking resource */);
+SimpleTest.waitForExplicitFinish();
+
+</script>
+</body>
+</html>
diff --git a/toolkit/components/url-classifier/tests/mochitest/test_bug1254766.html b/toolkit/components/url-classifier/tests/mochitest/test_bug1254766.html
new file mode 100644
index 0000000000..2e528a7242
--- /dev/null
+++ b/toolkit/components/url-classifier/tests/mochitest/test_bug1254766.html
@@ -0,0 +1,265 @@
+<!DOCTYPE HTML>
+<html>
+<head>
+ <title>Bug 1272239 - Test gethash.</title>
+ <script src="/tests/SimpleTest/SimpleTest.js"></script>
+ <script type="text/javascript" src="classifierHelper.js"></script>
+ <link rel="stylesheet" type="text/css" href="/tests/SimpleTest/test.css" />
+</head>
+
+<body>
+<p id="display"></p>
+<div id="content" style="display: none">
+</div>
+<pre id="test">
+
+<script src="head.js"></script>
+<script class="testbody" type="text/javascript">
+const MALWARE_LIST = "mochi-malware-simple";
+const MALWARE_HOST1 = "malware.example.com/";
+const MALWARE_HOST2 = "test1.example.com/";
+
+const UNWANTED_LIST = "mochi-unwanted-simple";
+const UNWANTED_HOST1 = "unwanted.example.com/";
+const UNWANTED_HOST2 = "test2.example.com/";
+
+
+const UNUSED_MALWARE_HOST = "unused.malware.com/";
+const UNUSED_UNWANTED_HOST = "unused.unwanted.com/";
+
+const GETHASH_URL =
+ "http://mochi.test:8888/tests/toolkit/components/url-classifier/tests/mochitest/gethash.sjs";
+
+var gPreGethashCounter = 0;
+var gCurGethashCounter = 0;
+
+var expectLoad = false;
+
+function loadTestFrame() {
+ return new Promise(function(resolve, reject) {
+ var iframe = document.createElement("iframe");
+ iframe.setAttribute("src", "gethashFrame.html");
+ document.body.appendChild(iframe);
+
+ iframe.onload = function() {
+ document.body.removeChild(iframe);
+ resolve();
+ };
+ }).then(getGethashCounter);
+}
+
+function getGethashCounter() {
+ return new Promise(function(resolve, reject) {
+ var xhr = new XMLHttpRequest;
+ xhr.open("PUT", GETHASH_URL + "?gethashcount");
+ xhr.setRequestHeader("Content-Type", "text/plain");
+ xhr.onreadystatechange = function() {
+ if (this.readyState == this.DONE) {
+ gPreGethashCounter = gCurGethashCounter;
+ gCurGethashCounter = parseInt(xhr.response);
+ resolve();
+ }
+ };
+ xhr.send();
+ });
+}
+
+// setup function allows classifier send gethash request for test database
+// also it calculate to fullhash for url and store those hashes in gethash sjs.
+async function setup() {
+ await classifierHelper.allowCompletion([MALWARE_LIST, UNWANTED_LIST], GETHASH_URL);
+
+ return Promise.all([
+ addCompletionToServer(MALWARE_LIST, MALWARE_HOST1, GETHASH_URL),
+ addCompletionToServer(MALWARE_LIST, MALWARE_HOST2, GETHASH_URL),
+ addCompletionToServer(UNWANTED_LIST, UNWANTED_HOST1, GETHASH_URL),
+ addCompletionToServer(UNWANTED_LIST, UNWANTED_HOST2, GETHASH_URL),
+ ]);
+}
+
+// Reset function in helper try to simulate the behavior we restart firefox
+function reset() {
+ return classifierHelper.resetDatabase()
+ .catch(err => {
+ ok(false, "Couldn't update classifier. Error code: " + err);
+ // Abort test.
+ SimpleTest.finish();
+ });
+}
+
+function updateUnusedUrl() {
+ var testData = [
+ { url: UNUSED_MALWARE_HOST, db: MALWARE_LIST },
+ { url: UNUSED_UNWANTED_HOST, db: UNWANTED_LIST },
+ ];
+
+ return classifierHelper.addUrlToDB(testData)
+ .catch(err => {
+ ok(false, "Couldn't update classifier. Error code: " + err);
+ // Abort test.
+ SimpleTest.finish();
+ });
+}
+
+function addPrefixToDB() {
+ return update(true);
+}
+
+function addCompletionToDB() {
+ return update(false);
+}
+
+function update(prefix = false) {
+ var length = prefix ? 4 : 32;
+ var testData = [
+ { url: MALWARE_HOST1, db: MALWARE_LIST, len: length },
+ { url: MALWARE_HOST2, db: MALWARE_LIST, len: length },
+ { url: UNWANTED_HOST1, db: UNWANTED_LIST, len: length },
+ { url: UNWANTED_HOST2, db: UNWANTED_LIST, len: length },
+ ];
+
+ return classifierHelper.addUrlToDB(testData)
+ .catch(err => {
+ ok(false, "Couldn't update classifier. Error code: " + err);
+ // Abort test.
+ SimpleTest.finish();
+ });
+}
+
+// This testcase is to make sure gethash works:
+// 1. Add prefixes to DB.
+// 2. Load test frame contains malware & unwanted url, those urls should be blocked.
+// 3. The second step should also trigger a gethash request since completions is not in
+// either cache or DB.
+// 4. Load test frame again, since completions is stored in cache now, no gethash
+// request should be triggered.
+function testGethash() {
+ return Promise.resolve()
+ .then(addPrefixToDB)
+ .then(loadTestFrame)
+ .then(() => {
+ ok(gCurGethashCounter > gPreGethashCounter, "Gethash request is triggered.");
+})
+ .then(loadTestFrame)
+ .then(() => {
+ ok(gCurGethashCounter == gPreGethashCounter, "Gethash request is not triggered.");
+})
+ .then(reset);
+}
+
+// This testcae is to make sure completions in update works:
+// 1. Add completions to DB.
+// 2. Load test frame, since completions is stored in DB, gethash request should
+// not be triggered.
+function testUpdate() {
+ return Promise.resolve()
+ .then(addCompletionToDB)
+ .then(loadTestFrame)
+ .then(() => {
+ ok(gCurGethashCounter == gPreGethashCounter, "Gethash request is not triggered.");
+})
+ .then(reset);
+}
+
+// This testcase is to make sure an update request will not clear completions in DB:
+// 1. Add completions to DB.
+// 2. Load test frame to make sure completions is stored in database, in this case, gethash
+// should not be triggered.
+// 3. Trigger an update, cache is cleared, but completions in DB should still remain.
+// 4. Load test frame again, since completions is in DB, gethash request should not be triggered.
+function testUpdateNotClearCompletions() {
+ return Promise.resolve()
+ .then(addCompletionToDB)
+ .then(loadTestFrame)
+ .then(() => {
+ ok(gCurGethashCounter == gPreGethashCounter, "Gethash request is not triggered.");
+})
+ .then(updateUnusedUrl)
+ .then(loadTestFrame)
+ .then(() => {
+ ok(gCurGethashCounter == gPreGethashCounter, "Gethash request is not triggered.");
+})
+ .then(reset);
+}
+
+// This testcase is to make sure completion store in DB will properly load after restarting.
+// 1. Add completions to DB.
+// 2. Simulate firefox restart by calling reloadDatabase.
+// 3. Load test frame, since completions should be loaded from DB, no gethash request should
+// be triggered.
+function testUpdateCompletionsAfterReload() {
+ return Promise.resolve()
+ .then(addCompletionToDB)
+ .then(classifierHelper.reloadDatabase)
+ // Call getTables to ensure the DB is fully reloaded before we load the test
+ // frame.
+ .then(classifierHelper.getTables)
+ .then(loadTestFrame)
+ .then(() => {
+ ok(gCurGethashCounter == gPreGethashCounter, "Gethash request is not triggered.");
+})
+ .then(reset);
+}
+
+// This testcase is to make sure cache will be cleared after restarting
+// 1. Add prefixes to DB.
+// 2. Load test frame, this should trigger a gethash request and completions will be stored in
+// cache.
+// 3. Load test frame again, no gethash should be triggered because of cache.
+// 4. Simulate firefox restart by calling reloadDatabase.
+// 5. Load test frame again, since cache is cleared, gethash request should be triggered.
+function testGethashCompletionsAfterReload() {
+ return Promise.resolve()
+ .then(addPrefixToDB)
+ .then(loadTestFrame)
+ .then(() => {
+ ok(gCurGethashCounter > gPreGethashCounter, "Gethash request is triggered.");
+})
+ .then(loadTestFrame)
+ .then(() => {
+ ok(gCurGethashCounter == gPreGethashCounter, "Gethash request is not triggered.");
+})
+ .then(classifierHelper.reloadDatabase)
+ // Call getTables to ensure the DB is fully reloaded before we load the test
+ // frame.
+ .then(classifierHelper.getTables)
+ .then(loadTestFrame)
+ .then(() => {
+ ok(gCurGethashCounter > gPreGethashCounter, "Gethash request is triggered.");
+})
+ .then(reset);
+}
+
+function runTest() {
+ Promise.resolve()
+ .then(classifierHelper.waitForInit)
+ .then(setup)
+ .then(testGethash)
+ .then(testUpdate)
+ .then(testUpdateNotClearCompletions)
+ .then(testUpdateCompletionsAfterReload)
+ .then(testGethashCompletionsAfterReload)
+ .then(function() {
+ SimpleTest.finish();
+ }).catch(function(e) {
+ ok(false, "Some test failed with error " + e);
+ SimpleTest.finish();
+ });
+}
+
+SimpleTest.waitForExplicitFinish();
+
+// 'network.predictor.enabled' is disabled because if other testcase load
+// evil.js, evil.css ...etc resources, it may cause we load them from cache
+// directly and bypass classifier check
+SpecialPowers.pushPrefEnv({"set": [
+ ["browser.safebrowsing.malware.enabled", true],
+ ["urlclassifier.malwareTable", "mochi-malware-simple,mochi-unwanted-simple"],
+ ["network.predictor.enabled", false],
+ ["urlclassifier.gethash.timeout_ms", 30000],
+]}, runTest);
+
+</script>
+</pre>
+</body>
+</html>
diff --git a/toolkit/components/url-classifier/tests/mochitest/test_cachemiss.html b/toolkit/components/url-classifier/tests/mochitest/test_cachemiss.html
new file mode 100644
index 0000000000..2af0285884
--- /dev/null
+++ b/toolkit/components/url-classifier/tests/mochitest/test_cachemiss.html
@@ -0,0 +1,167 @@
+<!DOCTYPE HTML>
+<html>
+<head>
+ <title>Bug 1272239 - Test gethash.</title>
+ <script src="/tests/SimpleTest/SimpleTest.js"></script>
+ <script type="text/javascript" src="classifierHelper.js"></script>
+ <link rel="stylesheet" type="text/css" href="/tests/SimpleTest/test.css" />
+</head>
+
+<body>
+<p id="display"></p>
+<div id="content" style="display: none">
+</div>
+<pre id="test">
+
+<script src="head.js"></script>
+<script class="testbody" type="text/javascript">
+const MALWARE_LIST = "test-malware-simple";
+const MALWARE_HOST = "malware.example.com/";
+
+const UNWANTED_LIST = "test-unwanted-simple";
+const UNWANTED_HOST = "unwanted.example.com/";
+
+const GETHASH_URL = "http://mochi.test:8888/tests/toolkit/components/url-classifier/tests/mochitest/cache.sjs";
+
+var shouldLoad = false;
+
+var gPreGethashCounter = 0;
+var gCurGethashCounter = 0;
+
+function loadTestFrame() {
+ return new Promise(function(resolve, reject) {
+ var iframe = document.createElement("iframe");
+ iframe.setAttribute("src", "gethashFrame.html");
+ document.body.appendChild(iframe);
+
+ iframe.onload = function() {
+ document.body.removeChild(iframe);
+ resolve();
+ };
+ }).then(getGethashCounter);
+}
+
+function getGethashCounter() {
+ return new Promise(function(resolve, reject) {
+ var xhr = new XMLHttpRequest;
+ xhr.open("PUT", GETHASH_URL + "?gethashcount");
+ xhr.setRequestHeader("Content-Type", "text/plain");
+ xhr.onreadystatechange = function() {
+ if (this.readyState == this.DONE) {
+ gPreGethashCounter = gCurGethashCounter;
+ gCurGethashCounter = parseInt(xhr.response);
+ resolve();
+ }
+ };
+ xhr.send();
+ });
+}
+
+// add 4-bytes prefixes to local database, so when we access the url,
+// it will trigger gethash request.
+function addPrefixToDB(list, url) {
+ var testData = [{ db: list, url, len: 4 }];
+
+ return classifierHelper.addUrlToDB(testData)
+ .catch(function(err) {
+ ok(false, "Couldn't update classifier. Error code: " + err);
+ // Abort test.
+ SimpleTest.finish();
+ });
+}
+
+// manually reset DB to make sure next test won't be affected by cache.
+function reset() {
+ return classifierHelper.resetDatabase();
+}
+
+// This test has to come before testPositiveCache to ensure gethash server doesn't
+// contain completions.
+function testNegativeCache() {
+ SpecialPowers.DOMWindowUtils.clearSharedStyleSheetCache();
+ shouldLoad = true;
+
+ async function setup() {
+ await classifierHelper.allowCompletion([MALWARE_LIST, UNWANTED_LIST], GETHASH_URL);
+
+ // Only add prefix to database. not server, so gethash will not return
+ // result.
+ return Promise.all([
+ addPrefixToDB(MALWARE_LIST, MALWARE_HOST),
+ addPrefixToDB(UNWANTED_LIST, UNWANTED_HOST),
+ ]);
+ }
+
+ return Promise.resolve()
+ .then(setup)
+ .then(() => loadTestFrame())
+ .then(() => {
+ ok(gCurGethashCounter > gPreGethashCounter, "Gethash request is triggered.");
+})
+ // Second load should not trigger gethash request because cache.
+ .then(() => loadTestFrame())
+ .then(() => {
+ ok(gCurGethashCounter == gPreGethashCounter, "Gethash request is nottriggered.");
+})
+ .then(reset);
+}
+
+function testPositiveCache() {
+ SpecialPowers.DOMWindowUtils.clearSharedStyleSheetCache();
+ shouldLoad = false;
+
+ async function setup() {
+ await classifierHelper.allowCompletion([MALWARE_LIST, UNWANTED_LIST], GETHASH_URL);
+
+ return Promise.all([
+ addPrefixToDB(MALWARE_LIST, MALWARE_HOST),
+ addPrefixToDB(UNWANTED_LIST, UNWANTED_HOST),
+ addCompletionToServer(MALWARE_LIST, MALWARE_HOST, GETHASH_URL),
+ addCompletionToServer(UNWANTED_LIST, UNWANTED_HOST, GETHASH_URL),
+ ]);
+ }
+
+ return Promise.resolve()
+ .then(setup)
+ .then(() => loadTestFrame())
+ .then(() => {
+ ok(gCurGethashCounter > gPreGethashCounter, "Gethash request is triggered.");
+})
+ // Second load should not trigger gethash request because cache.
+ .then(() => loadTestFrame())
+ .then(() => {
+ ok(gCurGethashCounter == gPreGethashCounter, "Gethash request is nottriggered.");
+})
+ .then(reset);
+}
+
+function runTest() {
+ Promise.resolve()
+ // This test resources get blocked when gethash returns successfully
+ .then(classifierHelper.waitForInit)
+ .then(testNegativeCache)
+ .then(testPositiveCache)
+ .then(function() {
+ SimpleTest.finish();
+ }).catch(function(e) {
+ ok(false, "Some test failed with error " + e);
+ SimpleTest.finish();
+ });
+}
+
+SimpleTest.waitForExplicitFinish();
+
+// 'network.predictor.enabled' is disabled because if other testcase load
+// evil.js, evil.css ...etc resources, it may cause we load them from cache
+// directly and bypass classifier check
+SpecialPowers.pushPrefEnv({"set": [
+ ["browser.safebrowsing.malware.enabled", true],
+ ["urlclassifier.malwareTable", "test-malware-simple,test-unwanted-simple"],
+ ["network.predictor.enabled", false],
+ ["urlclassifier.gethash.timeout_ms", 30000],
+]}, runTest);
+
+</script>
+</pre>
+</body>
+</html>
diff --git a/toolkit/components/url-classifier/tests/mochitest/test_classified_annotations.html b/toolkit/components/url-classifier/tests/mochitest/test_classified_annotations.html
new file mode 100644
index 0000000000..44a0c92549
--- /dev/null
+++ b/toolkit/components/url-classifier/tests/mochitest/test_classified_annotations.html
@@ -0,0 +1,171 @@
+<!DOCTYPE HTML>
+<html>
+<head>
+ <title>Test the URI Classifier</title>
+ <script src="chrome://mochikit/content/tests/SimpleTest/SimpleTest.js"></script>
+ <link rel="stylesheet" type="text/css" href="chrome://mochikit/content/tests/SimpleTest/test.css">
+</head>
+
+<body>
+<p id="display"></p>
+<div id="content" style="display: none">
+</div>
+<pre id="test">
+
+<script class="testbody" type="text/javascript">
+
+var mainWindow = window.browsingContext.topChromeWindow;
+var contentPage = "http://mochi.test:8888/chrome/toolkit/components/url-classifier/tests/mochitest/classifiedAnnotatedFrame.html";
+
+var Cc = SpecialPowers.Cc;
+var Ci = SpecialPowers.Ci;
+
+const {UrlClassifierTestUtils} = ChromeUtils.importESModule(
+ "resource://testing-common/UrlClassifierTestUtils.sys.mjs"
+);
+const {TestUtils} = ChromeUtils.importESModule(
+ "resource://testing-common/TestUtils.sys.mjs"
+);
+
+function testOnWindow() {
+ return new Promise((resolve, reject) => {
+ let win = mainWindow.OpenBrowserWindow();
+ win.addEventListener("load", function() {
+ TestUtils.topicObserved("browser-delayed-startup-finished",
+ subject => subject == win).then(() => {
+ win.addEventListener("DOMContentLoaded", function onInnerLoad() {
+ if (win.content.location.href != contentPage) {
+ win.gBrowser.loadURI(Services.io.newURI(contentPage), {
+ triggeringPrincipal: Services.scriptSecurityManager.createNullPrincipal({}),
+ });
+ return;
+ }
+ win.removeEventListener("DOMContentLoaded", onInnerLoad, true);
+
+ win.content.addEventListener("OnLoadComplete", function innerLoad2(e) {
+ win.content.removeEventListener("OnLoadComplete", innerLoad2);
+ SimpleTest.executeSoon(function() {
+ checkLoads(win, JSON.parse(e.detail));
+ resolve(win);
+ });
+ }, false, true);
+ }, true);
+ SimpleTest.executeSoon(function() {
+ win.gBrowser.loadURI(Services.io.newURI(contentPage), {
+ triggeringPrincipal: Services.scriptSecurityManager.createNullPrincipal({}),
+ });
+ });
+ });
+ }, {capture: true, once: true});
+ });
+}
+
+var badids = [
+ "badscript1",
+ "badscript2",
+ "badimage1",
+ "badimage2",
+ "badframe1",
+ "badframe2",
+ "badmedia1",
+ "badcss",
+];
+
+function checkLoads(aWindow, aData) {
+ var win = aWindow.content;
+
+ is(aData.scriptItem1, "untouched", "Should not load tracking javascript");
+ is(aData.scriptItem2, "untouched", "Should not load tracking javascript (2)");
+
+ is(aData.imageItem1, "untouched", "Should not load tracking images");
+ is(aData.imageItem2, "untouched", "Should not load tracking images (2)");
+
+ is(aData.frameItem1, "untouched", "Should not load tracking iframes");
+ is(aData.frameItem2, "untouched", "Should not load tracking iframes (2)");
+ is(aData.mediaItem1, "error", "Should not load tracking videos");
+ is(aData.xhrItem, "failed", "Should not load tracking XHRs");
+ is(aData.fetchItem, "error", "Should not fetch from tracking URLs");
+
+ var elt = win.document.getElementById("styleCheck");
+ var style = win.document.defaultView.getComputedStyle(elt);
+ isnot(style.visibility, "hidden", "Should not load tracking css");
+
+ is(win.document.blockedNodeByClassifierCount, badids.length,
+ "Should identify all tracking elements");
+
+ var blockedNodes = win.document.blockedNodesByClassifier;
+
+ // Make sure that every node in blockedNodes exists in the tree
+ // (that may not always be the case but do not expect any nodes to disappear
+ // from the tree here)
+ var allNodeMatch = true;
+ for (let i = 0; i < blockedNodes.length; i++) {
+ let nodeMatch = false;
+ for (let j = 0; j < badids.length && !nodeMatch; j++) {
+ nodeMatch = nodeMatch ||
+ (blockedNodes[i] == win.document.getElementById(badids[j]));
+ }
+
+ allNodeMatch = allNodeMatch && nodeMatch;
+ }
+ ok(allNodeMatch, "All annotated nodes are expected in the tree");
+
+ // Make sure that every node with a badid (see badids) is found in the
+ // blockedNodes. This tells us if we are neglecting to annotate
+ // some nodes
+ allNodeMatch = true;
+ for (let j = 0; j < badids.length; j++) {
+ let nodeMatch = false;
+ for (let i = 0; i < blockedNodes.length && !nodeMatch; i++) {
+ nodeMatch = nodeMatch ||
+ (blockedNodes[i] == win.document.getElementById(badids[j]));
+ }
+
+ if (!nodeMatch) {
+ console.log(badids[j] + " was not found in blockedNodes");
+ }
+ allNodeMatch = allNodeMatch && nodeMatch;
+ }
+ ok(allNodeMatch, "All tracking nodes are expected to be annotated as such");
+
+ // End (parent) test.
+}
+
+function cleanup() {
+ SpecialPowers.clearUserPref("privacy.trackingprotection.enabled");
+ SpecialPowers.clearUserPref("channelclassifier.allowlist_example");
+ SpecialPowers.clearUserPref("privacy.trackingprotection.testing.report_blocked_node");
+
+ UrlClassifierTestUtils.cleanupTestTrackers();
+}
+
+SpecialPowers.pushPrefEnv(
+ {"set": [["urlclassifier.trackingTable", "moztest-track-simple"]]},
+ test);
+
+async function test() {
+ SimpleTest.registerCleanupFunction(cleanup);
+ await UrlClassifierTestUtils.addTestTrackers();
+
+ await SpecialPowers.setBoolPref("privacy.trackingprotection.enabled", true);
+ // Make sure chrome:// URIs are processed. This does not white-list
+ // any URIs unless 'https://allowlisted.example.com' is added in the
+ // permission manager (see test_allowlisted_annotations.html)
+ await SpecialPowers.setBoolPref("channelclassifier.allowlist_example", true);
+ await SpecialPowers.setBoolPref("privacy.trackingprotection.testing.report_blocked_node", true);
+
+ await testOnWindow().then(function(aWindow) {
+ aWindow.close();
+ });
+
+ SimpleTest.finish();
+}
+
+// Expected finish() call is in "classifiedAnnotatedFrame.html".
+SimpleTest.waitForExplicitFinish();
+
+</script>
+
+</pre>
+</body>
+</html>
diff --git a/toolkit/components/url-classifier/tests/mochitest/test_classifier.html b/toolkit/components/url-classifier/tests/mochitest/test_classifier.html
new file mode 100644
index 0000000000..787b70798a
--- /dev/null
+++ b/toolkit/components/url-classifier/tests/mochitest/test_classifier.html
@@ -0,0 +1,141 @@
+<!DOCTYPE HTML>
+<html>
+<head>
+ <title>Test the URI Classifier</title>
+ <script src="/tests/SimpleTest/SimpleTest.js"></script>
+ <script type="text/javascript" src="classifierHelper.js"></script>
+ <link rel="stylesheet" type="text/css" href="/tests/SimpleTest/test.css" />
+</head>
+
+<p id="display"></p>
+<div id="content" style="display: none">
+</div>
+<pre id="test">
+
+<script class="testbody" type="text/javascript">
+var firstLoad = true;
+
+// Add some URLs to the malware database.
+var testData = [
+ { url: "malware.mochi.test/",
+ db: "test-malware-simple",
+ },
+ { url: "unwanted.mochi.test/",
+ db: "test-unwanted-simple",
+ },
+ { url: "blocked.mochi.test/",
+ db: "test-block-simple",
+ },
+ { url: "harmful.mochi.test/",
+ db: "test-harmful-simple",
+ },
+ { url: "phish.mochi.test/firefox/its-a-trap.html",
+ db: "test-phish-simple",
+ },
+];
+
+const Cc = SpecialPowers.Cc;
+const Ci = SpecialPowers.Ci;
+const Cr = SpecialPowers.Cr;
+
+var testURLs = [
+ { url: "http://mochi.test",
+ table: "",
+ result: Cr.NS_OK,
+ },
+ { url: "http://malware.mochi.test",
+ table: "test-malware-simple",
+ result: Cr.NS_ERROR_MALWARE_URI,
+ },
+ { url: "http://unwanted.mochi.test",
+ table: "test-unwanted-simple",
+ result: Cr.NS_ERROR_UNWANTED_URI,
+ },
+ { url: "http://phish.mochi.test/firefox/its-a-trap.html",
+ table: "test-phish-simple",
+ result: Cr.NS_ERROR_PHISHING_URI,
+ },
+ { url: "http://harmful.mochi.test",
+ table: "test-harmful-simple",
+ result: Cr.NS_ERROR_HARMFUL_URI,
+ },
+ { url: "http://blocked.mochi.test",
+ table: "test-block-simple",
+ result: Cr.NS_ERROR_BLOCKED_URI,
+ },
+];
+
+function loadTestFrame() {
+ document.getElementById("testFrame").src = "classifierFrame.html";
+}
+
+// Expected finish() call is in "classifierFrame.html".
+SimpleTest.waitForExplicitFinish();
+
+function updateSuccess() {
+ return SpecialPowers.pushPrefEnv(
+ {"set": [["browser.safebrowsing.malware.enabled", true]]});
+}
+
+function updateError(errorCode) {
+ ok(false, "Couldn't update classifier. Error code: " + errorCode);
+ // Abort test.
+ SimpleTest.finish();
+}
+
+function testService() {
+ return new Promise(resolve => {
+ function runNextTest() {
+ if (!testURLs.length) {
+ resolve();
+ return;
+ }
+ let test = testURLs.shift();
+ let tables = "test-malware-simple,test-unwanted-simple,test-phish-simple,test-block-simple,test-harmful-simple";
+ let uri = SpecialPowers.Services.io.newURI(test.url);
+ let prin = SpecialPowers.Services.scriptSecurityManager.createContentPrincipal(uri, {});
+ SpecialPowers.doUrlClassify(prin, function(errorCode) {
+ is(errorCode, test.result,
+ `Successful asynchronous classification of ${test.url}`);
+ SpecialPowers.doUrlClassifyLocal(uri, tables, function(results) {
+ if (!results.length) {
+ is(test.table, "",
+ `Successful asynchronous local classification of ${test.url}`);
+ } else {
+ let result = results[0].QueryInterface(Ci.nsIUrlClassifierFeatureResult);
+ is(result.list, test.table,
+ `Successful asynchronous local classification of ${test.url}`);
+ }
+ runNextTest();
+ });
+ });
+ }
+ runNextTest(resolve);
+ });
+}
+
+SpecialPowers.pushPrefEnv(
+ {"set": [["urlclassifier.malwareTable", "test-malware-simple,test-unwanted-simple,test-harmful-simple"],
+ ["urlclassifier.blockedTable", "test-block-simple"],
+ ["urlclassifier.phishTable", "test-phish-simple"],
+ ["browser.safebrowsing.phishing.enabled", true],
+ ["browser.safebrowsing.malware.enabled", true],
+ ["browser.safebrowsing.blockedURIs.enabled", true],
+ ["browser.safebrowsing.debug", true]]},
+ function() {
+ classifierHelper.waitForInit()
+ .then(() => classifierHelper.addUrlToDB(testData))
+ .then(updateSuccess)
+ .catch(err => {
+ updateError(err);
+ })
+ .then(testService)
+ .then(loadTestFrame);
+ });
+
+</script>
+
+</pre>
+<iframe id="testFrame" onload=""></iframe>
+</body>
+</html>
diff --git a/toolkit/components/url-classifier/tests/mochitest/test_classifier_changetablepref.html b/toolkit/components/url-classifier/tests/mochitest/test_classifier_changetablepref.html
new file mode 100644
index 0000000000..ea16c14e74
--- /dev/null
+++ b/toolkit/components/url-classifier/tests/mochitest/test_classifier_changetablepref.html
@@ -0,0 +1,157 @@
+<!DOCTYPE HTML>
+<html>
+<head>
+ <title>Bug 1281083 - Changing the urlclassifier.*Table prefs doesn't take effect before the next browser restart.</title>
+ <script src="chrome://mochikit/content/tests/SimpleTest/SimpleTest.js"></script>
+ <script type="text/javascript" src="classifierHelper.js"></script>
+ <link rel="stylesheet" type="text/css" href="chrome://mochikit/content/tests/SimpleTest/test.css">
+</head>
+
+<body>
+<p id="display"></p>
+<div id="content" style="display: none">
+</div>
+<pre id="test">
+
+<script src="head.js"></script>
+<script class="testbody" type="text/javascript">
+
+var mainWindow = window.browsingContext.topChromeWindow;
+var contentPage = "http://mochi.test:8888/chrome/toolkit/components/url-classifier/tests/mochitest/bug_1281083.html";
+
+const testTable = "moz-track-digest256";
+const UPDATE_URL = "http://mochi.test:8888/tests/toolkit/components/url-classifier/tests/mochitest/update.sjs";
+
+var Cc = SpecialPowers.Cc;
+var Ci = SpecialPowers.Ci;
+
+const {TestUtils} = ChromeUtils.importESModule(
+ "resource://testing-common/TestUtils.sys.mjs"
+);
+
+var timer = Cc["@mozilla.org/timer;1"]
+ .createInstance(Ci.nsITimer);
+
+// If default preference contain the table we want to test,
+// We should change test table to a different one.
+var trackingTables = SpecialPowers.getCharPref("urlclassifier.trackingTable").split(",");
+ok(!trackingTables.includes(testTable), "test table should not be in the preference");
+
+var listmanager = Cc["@mozilla.org/url-classifier/listmanager;1"].
+ getService(Ci.nsIUrlListManager);
+
+is(listmanager.getGethashUrl(testTable), "",
+ "gethash url for test table should be empty before setting to preference");
+
+function checkLoads(aWindow, aBlocked) {
+ var win = aWindow.content;
+ is(win.document.getElementById("badscript").dataset.touched, aBlocked ? "no" : "yes", "Should not load tracking javascript");
+}
+
+function testOnWindow() {
+ return new Promise((resolve, reject) => {
+ let win = mainWindow.OpenBrowserWindow();
+ win.addEventListener("load", function() {
+ TestUtils.topicObserved("browser-delayed-startup-finished",
+ subject => subject == win).then(() => {
+ win.addEventListener("DOMContentLoaded", function onInnerLoad() {
+ if (win.content.location.href != contentPage) {
+ win.gBrowser.loadURI(Services.io.newURI(contentPage), {
+ triggeringPrincipal: Services.scriptSecurityManager.createNullPrincipal({}),
+ });
+ return;
+ }
+ win.removeEventListener("DOMContentLoaded", onInnerLoad, true);
+
+ win.content.addEventListener("load", function innerLoad2(e) {
+ win.content.removeEventListener("load", innerLoad2);
+ SimpleTest.executeSoon(function() {
+ resolve(win);
+ });
+ }, false, true);
+ }, true);
+ SimpleTest.executeSoon(function() {
+ win.gBrowser.loadURI(Services.io.newURI(contentPage), {
+ triggeringPrincipal: Services.scriptSecurityManager.createNullPrincipal({}),
+ });
+ });
+ });
+ }, {capture: true, once: true});
+ });
+}
+
+function setup() {
+ return new Promise(function(resolve, reject) {
+ // gethash url of test table "moz-track-digest256" should be updated
+ // after setting preference.
+ var url = listmanager.getGethashUrl(testTable);
+ var expected = SpecialPowers.getCharPref("browser.safebrowsing.provider.mozilla.gethashURL");
+
+ is(url, expected, testTable + " matches its gethash url");
+
+ // Trigger update
+ listmanager.disableUpdate(testTable);
+ listmanager.enableUpdate(testTable);
+ listmanager.maybeToggleUpdateChecking();
+
+ // We wait until "nextupdattime" was set as a signal that update is complete.
+ waitForUpdateSuccess(function() {
+ resolve();
+ });
+ });
+}
+
+function waitForUpdateSuccess(callback) {
+ let nextupdatetime =
+ SpecialPowers.getCharPref("browser.safebrowsing.provider.mozilla.nextupdatetime");
+
+ if (nextupdatetime !== "1") {
+ callback();
+ return;
+ }
+
+ timer.initWithCallback(function() {
+ waitForUpdateSuccess(callback);
+ }, 10, Ci.nsITimer.TYPE_ONE_SHOT);
+}
+
+async function runTest() {
+ // To make sure url is not blocked by an already blocked url.
+ // Here we use non-tracking.example.com as a tracked url.
+ // Since this table is only used in this bug, so it won't affect other testcases.
+ await addCompletionToServer(testTable, "bug1281083.example.com/", UPDATE_URL);
+
+ /**
+ * In this test we try to modify only urlclassifier.*Table preference to see if
+ * url specified in the table will be blocked after update.
+ */
+ await SpecialPowers.pushPrefEnv(
+ {"set": [["urlclassifier.trackingTable", testTable]]});
+
+ await setup();
+
+ await testOnWindow().then(function(aWindow) {
+ checkLoads(aWindow, true);
+ aWindow.close();
+ });
+
+ SimpleTest.finish();
+}
+
+// Set nextupdatetime to 1 to trigger an update
+SpecialPowers.pushPrefEnv(
+ {"set": [["privacy.trackingprotection.enabled", true],
+ ["channelclassifier.allowlist_example", true],
+ ["browser.safebrowsing.provider.mozilla.nextupdatetime", "1"],
+ ["browser.safebrowsing.provider.mozilla.lists", testTable],
+ ["browser.safebrowsing.provider.mozilla.updateURL", UPDATE_URL]]},
+ runTest);
+
+// Expected finish() call is in "bug_1281083.html".
+SimpleTest.waitForExplicitFinish();
+
+</script>
+</pre>
+<iframe id="testFrame" width="100%" height="100%" onload=""></iframe>
+</body>
+</html>
diff --git a/toolkit/components/url-classifier/tests/mochitest/test_classifier_changetablepref_bug1395411.html b/toolkit/components/url-classifier/tests/mochitest/test_classifier_changetablepref_bug1395411.html
new file mode 100644
index 0000000000..df3ac79d8a
--- /dev/null
+++ b/toolkit/components/url-classifier/tests/mochitest/test_classifier_changetablepref_bug1395411.html
@@ -0,0 +1,72 @@
+<!DOCTYPE HTML>
+<!-- Any copyright is dedicated to the Public Domain.
+ http://creativecommons.org/publicdomain/zero/1.0/ -->
+<html>
+<head>
+ <title>Bug 1395411 - Changing the urlclassifier.*Table prefs doesn't remove them from the update checker.</title>
+ <script src="chrome://mochikit/content/tests/SimpleTest/SimpleTest.js"></script>
+ <script type="text/javascript" src="classifierHelper.js"></script>
+ <link rel="stylesheet" type="text/css" href="chrome://mochikit/content/tests/SimpleTest/test.css">
+</head>
+
+<body>
+<p id="display"></p>
+<div id="content" style="display: none">
+</div>
+<pre id="test">
+
+<script class="testbody" type="text/javascript">
+var Cc = SpecialPowers.Cc;
+var Ci = SpecialPowers.Ci;
+
+const testTableV2 = "mochi1-phish-simple";
+const testTableV4 = "mochi1-phish-proto";
+const UPDATE_URL_V2 = "http://mochi.test:8888/tests/toolkit/components/url-classifier/dummyV2";
+const UPDATE_URL_V4 = "http://mochi.test:8888/tests/toolkit/components/url-classifier/dummyV4";
+
+let listmanager = Cc["@mozilla.org/url-classifier/listmanager;1"].
+ getService(Ci.nsIUrlListManager);
+
+let pushPrefs = (...p) => SpecialPowers.pushPrefEnv({set: p});
+
+SpecialPowers.pushPrefEnv(
+ {"set": [["browser.safebrowsing.phishing.enabled", true],
+ ["browser.safebrowsing.provider.mozilla.lists", testTableV2],
+ ["browser.safebrowsing.provider.mozilla4.lists", testTableV4],
+ ["browser.safebrowsing.provider.mozilla4.updateURL", UPDATE_URL_V4],
+ ["browser.safebrowsing.provider.mozilla.updateURL", UPDATE_URL_V2]]},
+ runTest);
+
+function runTest() {
+ (async function() {
+ await classifierHelper.waitForInit();
+
+ await pushPrefs(["urlclassifier.phishTable", testTableV2 + "," + testTableV4]);
+ is(listmanager.getUpdateUrl(testTableV4), UPDATE_URL_V4, "Correct update url v4");
+ is(listmanager.getUpdateUrl(testTableV2), UPDATE_URL_V2, "Correct update url v2");
+
+ await pushPrefs(["urlclassifier.phishTable", testTableV2]);
+ is(listmanager.getUpdateUrl(testTableV4), "", "Correct empty update url v4");
+ is(listmanager.getUpdateUrl(testTableV2), UPDATE_URL_V2, "Correct update url v2");
+
+ await pushPrefs(["urlclassifier.phishTable", testTableV4]);
+ is(listmanager.getUpdateUrl(testTableV4), UPDATE_URL_V4, "Correct update url v4");
+ is(listmanager.getUpdateUrl(testTableV2), "", "Correct empty update url v2");
+
+ await pushPrefs(["urlclassifier.phishTable", ""]);
+ is(listmanager.getUpdateUrl(testTableV4), "", "Correct empty update url v4");
+ is(listmanager.getUpdateUrl(testTableV2), "", "Correct empty update url v2");
+
+ await classifierHelper._cleanup();
+
+ SimpleTest.finish();
+ })();
+}
+
+SimpleTest.waitForExplicitFinish();
+
+</script>
+</pre>
+<iframe id="testFrame" width="100%" height="100%" onload=""></iframe>
+</body>
+</html>
diff --git a/toolkit/components/url-classifier/tests/mochitest/test_classifier_match.html b/toolkit/components/url-classifier/tests/mochitest/test_classifier_match.html
new file mode 100644
index 0000000000..f6ea460be1
--- /dev/null
+++ b/toolkit/components/url-classifier/tests/mochitest/test_classifier_match.html
@@ -0,0 +1,179 @@
+<!DOCTYPE HTML>
+<html>
+<head>
+ <title>Test the URI Classifier Matched Info (bug 1288633) </title>
+ <script src="/tests/SimpleTest/SimpleTest.js"></script>
+ <script type="text/javascript" src="classifierHelper.js"></script>
+ <link rel="stylesheet" type="text/css" href="/tests/SimpleTest/test.css" />
+</head>
+
+<body>
+<p id="display"></p>
+<div id="content" style="display: none">
+</div>
+<pre id="test">
+
+<script class="testbody" type="application/javascript">
+var Cc = SpecialPowers.Cc;
+var Ci = SpecialPowers.Ci;
+var Cr = SpecialPowers.Cr;
+
+var inputDatas = [
+ { url: "malware.mochi.test/",
+ db: "mochi-block-simple",
+ },
+ { url: "malware1.mochi.test/",
+ db: "mochi1-block-simple",
+ },
+ { url: "malware1.mochi.test/",
+ db: "mochi1-malware-simple",
+ provider: "mozilla",
+ },
+ { url: "malware2.mochi.test/",
+ db: "mochi2-unwanted-simple",
+ provider: "mozilla",
+ },
+ { url: "malware2.mochi.test/",
+ db: "mochi2-malware-simple",
+ provider: "mozilla",
+ },
+ { url: "malware3.mochi.test/",
+ db: "mochig3-malware-simple",
+ provider: "google",
+ },
+ { url: "malware3.mochi.test/",
+ db: "mochim3-malware-simple",
+ provider: "mozilla",
+ },
+];
+
+function hash(str) {
+ let hasher = Cc["@mozilla.org/security/hash;1"]
+ .createInstance(Ci.nsICryptoHash);
+
+ let data = new TextEncoder().encode(str);
+ hasher.init(hasher.SHA256);
+ hasher.update(data, data.length);
+
+ return hasher.finish(false);
+}
+
+var testDatas = [
+ // Match empty provider
+ { url: "http://malware.mochi.test",
+ expect: { error: Cr.NS_ERROR_BLOCKED_URI,
+ table: "mochi-block-simple",
+ provider: "",
+ fullhash: (function() {
+ return hash("malware.mochi.test/");
+ })(),
+ },
+ },
+ // Match multiple tables, only one has valid provider
+ { url: "http://malware1.mochi.test",
+ expect: { error: Cr.NS_ERROR_MALWARE_URI,
+ table: "mochi1-malware-simple",
+ provider: "mozilla",
+ fullhash: (function() {
+ return hash("malware1.mochi.test/");
+ })(),
+ },
+ },
+ // Match multiple tables, handle order
+ { url: "http://malware2.mochi.test",
+ expect: { error: Cr.NS_ERROR_MALWARE_URI,
+ table: "mochi2-malware-simple",
+ provider: "mozilla",
+ fullhash: (function() {
+ return hash("malware2.mochi.test/");
+ })(),
+ },
+ },
+ // Match multiple tables, handle order
+ { url: "http://malware3.mochi.test",
+ expect: { error: Cr.NS_ERROR_MALWARE_URI,
+ table: "mochig3-malware-simple",
+ provider: "google",
+ fullhash: (function() {
+ return hash("malware3.mochi.test/");
+ })(),
+ },
+ },
+
+];
+
+SimpleTest.waitForExplicitFinish();
+
+function setupTestData(datas) {
+ let prefValues = {};
+ for (let data of datas) {
+ if (!data.provider) {
+ continue;
+ }
+ let providerPref = "browser.safebrowsing.provider." + data.provider + ".lists";
+ let prefValue;
+ if (!prefValues[providerPref]) {
+ prefValue = data.db;
+ } else {
+ prefValue = prefValues[providerPref] + "," + data.db;
+ }
+ prefValues[providerPref] = prefValue;
+ }
+
+ // Convert map to array
+ let prefArray = [];
+ for (var pref in prefValues) {
+ prefArray.push([pref, prefValues[pref]]);
+ }
+
+ let activeTablePref = "urlclassifier.malwareTable";
+ let activeTable = SpecialPowers.getCharPref(activeTablePref);
+ for (let data of datas) {
+ activeTable += "," + data.db;
+ }
+ prefArray.push([activeTablePref, activeTable]);
+
+ return SpecialPowers.pushPrefEnv({set: prefArray});
+}
+
+function runTest() {
+ return new Promise(resolve => {
+ function runNextTest() {
+ if (!testDatas.length) {
+ resolve();
+ return;
+ }
+ let test = testDatas.shift();
+ let uri = SpecialPowers.Services.io.newURI(test.url);
+ let prin = SpecialPowers.Services.scriptSecurityManager.createContentPrincipal(uri, {});
+ SpecialPowers.doUrlClassify(prin, function(errorCode, table, provider, fullhash) {
+ is(errorCode, test.expect.error, `Test url ${test.url} correct error`);
+ is(table, test.expect.table, `Test url ${test.url} correct table`);
+ is(provider, test.expect.provider, `Test url ${test.url} correct provider`);
+ is(fullhash, btoa(test.expect.fullhash), `Test url ${test.url} correct full hash`);
+ runNextTest();
+ });
+ }
+ runNextTest();
+ });
+}
+
+SpecialPowers.pushPrefEnv(
+ {"set": [["browser.safebrowsing.malware.enabled", true]]},
+ function() {
+ classifierHelper.waitForInit()
+ .then(() => setupTestData(inputDatas))
+ .then(() => classifierHelper.addUrlToDB(inputDatas))
+ .then(runTest)
+ .then(function() {
+ SimpleTest.finish();
+ }).catch(function(e) {
+ ok(false, "Some tests failed with error " + e);
+ SimpleTest.finish();
+ });
+ });
+
+</script>
+</pre>
+</body>
+</html>
diff --git a/toolkit/components/url-classifier/tests/mochitest/test_classifier_worker.html b/toolkit/components/url-classifier/tests/mochitest/test_classifier_worker.html
new file mode 100644
index 0000000000..0e984c2a64
--- /dev/null
+++ b/toolkit/components/url-classifier/tests/mochitest/test_classifier_worker.html
@@ -0,0 +1,73 @@
+<!DOCTYPE HTML>
+<html>
+<head>
+ <title>Test the URI Classifier</title>
+ <script src="/tests/SimpleTest/SimpleTest.js"></script>
+ <script type="text/javascript" src="classifierHelper.js"></script>
+ <link rel="stylesheet" type="text/css" href="/tests/SimpleTest/test.css" />
+</head>
+
+<p id="display"></p>
+<div id="content" style="display: none">
+</div>
+<pre id="test">
+
+<script class="testbody" type="text/javascript">
+// Add some URLs to the malware database.
+var testData = [
+ { url: "example.com/tests/toolkit/components/url-classifier/tests/mochitest/evilWorker.js",
+ db: "test-malware-simple",
+ },
+ { url: "example.com/tests/toolkit/components/url-classifier/tests/mochitest/unwantedWorker.js",
+ db: "test-unwanted-simple",
+ },
+];
+
+function loadTestFrame() {
+ document.getElementById("testFrame").src =
+ "http://example.com/tests/toolkit/components/url-classifier/tests/mochitest/workerFrame.html";
+}
+
+function onmessage(event) {
+ var pieces = event.data.split(":");
+ if (pieces[0] == "finish") {
+ SimpleTest.finish();
+ return;
+ }
+
+ is(pieces[0], "success", pieces[1]);
+}
+
+function updateSuccess() {
+ SpecialPowers.pushPrefEnv(
+ {"set": [["browser.safebrowsing.malware.enabled", true]]},
+ loadTestFrame);
+}
+
+function updateError(errorCode) {
+ ok(false, "Couldn't update classifier. Error code: " + errorCode);
+ // Abort test.
+ SimpleTest.finish();
+}
+
+SpecialPowers.pushPrefEnv(
+ {"set": [["urlclassifier.malwareTable", "test-malware-simple,test-unwanted-simple"]]},
+ function() {
+ classifierHelper.waitForInit()
+ .then(() => classifierHelper.addUrlToDB(testData))
+ .then(updateSuccess)
+ .catch(err => {
+ updateError(err);
+ });
+ });
+
+window.addEventListener("message", onmessage);
+
+SimpleTest.waitForExplicitFinish();
+
+</script>
+
+</pre>
+<iframe id="testFrame" onload=""></iframe>
+</body>
+</html>
diff --git a/toolkit/components/url-classifier/tests/mochitest/test_classify_by_default.html b/toolkit/components/url-classifier/tests/mochitest/test_classify_by_default.html
new file mode 100644
index 0000000000..c10a0c62f9
--- /dev/null
+++ b/toolkit/components/url-classifier/tests/mochitest/test_classify_by_default.html
@@ -0,0 +1,156 @@
+<!DOCTYPE HTML>
+<html>
+<head>
+ <meta charset="utf-8">
+ <title>Test for Bug 1442496</title>
+ <script src="/tests/SimpleTest/SimpleTest.js"></script>
+ <link rel="stylesheet" type="text/css" href="/tests/SimpleTest/test.css"/>
+</head>
+
+<body>
+<a target="_blank" href="https://bugzilla.mozilla.org/show_bug.cgi?id=1442496">Mozilla Bug 1442496</a>
+<p id="display"></p>
+<div id="content" style="display: none">
+</div>
+<pre id="test">
+
+<script type="text/javascript" src="classifierHelper.js"></script>
+<script class="testbody" type="text/javascript">
+
+// To add a request to test, add the request in trackerFrame.html
+// and the id of query string "?id=xxx" here.
+const trackersAll = [
+ "img-src",
+ "object-data",
+ "script-src",
+ "iframe-src",
+ "link-rel-stylesheet",
+ "video-src",
+ "track-src",
+ "ping",
+ "fetch",
+ "xmlhttprequest",
+ "send-beacon",
+ "fetch-in-sw",
+];
+
+const TRACKER_DOMAIN = "itisatracker.org";
+const TEST_TOP_DOMAIN = "example.com";
+
+const TEST_TOP_PAGE = "trackerFrame.html";
+const TRACKER_SERVER = "trackerFrame.sjs";
+
+const TEST_PATH = "/tests/toolkit/components/url-classifier/tests/mochitest/";
+
+const TEST_TOP_SITE = "https:///" + TEST_TOP_DOMAIN + TEST_PATH;
+const TRACKER_SITE = "https://" + TRACKER_DOMAIN + TEST_PATH;
+const TRACKER_SJS = "https://" + TRACKER_DOMAIN + TEST_PATH + TRACKER_SERVER;
+
+// This function ask the server to set the cookie
+async function setupAndRun(hasCookie, topLevelSite = TEST_TOP_SITE) {
+ // In order to apply the correct cookieBehavior, we need to first open a new
+ // window to setup cookies. So, the new window will use the correct
+ // cookieBehavior. Otherwise, it will use the default cookieBehavior.
+ let setup_win = window.open();
+ await setup_win.fetch(TRACKER_SJS + "?init=" + trackersAll.length, {
+ credentials: "include",
+ });
+ setup_win.close();
+
+ return new Promise(resolve => {
+ let win;
+ let query = hasCookie ? "with-cookie" : "without-cookie";
+ fetch(TRACKER_SJS + "?callback=" + query).then(r => {
+ r.text().then((body) => {
+ let trackers_found = body.split(",");
+ for (let tracker of trackersAll) {
+ let description = "Tracker request " + tracker + "received " +
+ (hasCookie ? "with" : "without") + " cookie";
+ ok(trackers_found.includes(tracker), description);
+ }
+ win.close();
+ resolve();
+ });
+ });
+
+ win = window.open(topLevelSite + TEST_TOP_PAGE);
+ });
+}
+
+async function cleanup(topLevelSite = TEST_TOP_SITE) {
+ function clearServiceWorker() {
+ return new Promise(resolve => {
+ let w;
+ window.onmessage = function(e) {
+ if (e.data.status == "unregistrationdone") {
+ w.close();
+ resolve();
+ }
+ }
+ w = window.open(TEST_TOP_SITE + "sw_unregister.html");
+ });
+ }
+
+ // Ensure we clear the stylesheet cache so that we re-classify.
+ SpecialPowers.DOMWindowUtils.clearSharedStyleSheetCache();
+
+ await clearServiceWorker();
+}
+
+async function runTests() {
+ await SpecialPowers.pushPrefEnv({set: [
+ ["urlclassifier.trackingAnnotationTable.testEntries", TRACKER_DOMAIN],
+ ["urlclassifier.trackingAnnotationWhitelistTable", "test-trackwhite-simple"],
+ ["network.cookie.cookieBehavior", 4],
+ ["privacy.trackingprotection.enabled", false ],
+ ["privacy.trackingprotection.annotate_channels", false],
+ ["browser.send_pings", true],
+ ["dom.serviceWorkers.exemptFromPerDomainMax", true],
+ ["dom.serviceWorkers.enabled", true],
+ ["dom.serviceWorkers.testing.enabled", true],
+ ]});
+
+ await classifierHelper.waitForInit();
+
+ let hasCookie;
+
+ info("Test all requests should be sent with cookies when ETP is off");
+ hasCookie = true;
+ await setupAndRun(hasCookie);
+ await cleanup();
+
+ info("Test all requests should be sent without cookies when ETP is on");
+ await SpecialPowers.pushPrefEnv({set: [
+ [ "privacy.trackingprotection.annotate_channels", true],
+ ]});
+
+ hasCookie = false;
+ await setupAndRun(hasCookie);
+ await cleanup();
+
+ info("Test all requests should be sent with cookies when top-level is in the whitelist");
+ await classifierHelper.addUrlToDB([{
+ url: TEST_TOP_DOMAIN + "/?resource=" + TRACKER_DOMAIN,
+ db: "test-trackwhite-simple",
+ }]);
+
+ hasCookie = true;
+ await setupAndRun(hasCookie);
+ await cleanup();
+
+ info("Test all requests should be sent with cookies when the tracker is a first-party");
+ hasCookie = true;
+ await setupAndRun(hasCookie, TRACKER_SITE);
+ await cleanup(TRACKER_SITE);
+
+ SimpleTest.finish();
+}
+
+SimpleTest.waitForExplicitFinish();
+runTests();
+
+</script>
+
+</pre>
+</body>
+</html>
diff --git a/toolkit/components/url-classifier/tests/mochitest/test_classify_ping.html b/toolkit/components/url-classifier/tests/mochitest/test_classify_ping.html
new file mode 100644
index 0000000000..6a8a189ed2
--- /dev/null
+++ b/toolkit/components/url-classifier/tests/mochitest/test_classify_ping.html
@@ -0,0 +1,131 @@
+<!DOCTYPE HTML>
+<html>
+<head>
+ <title>Bug 1233914 - ping doesn't honor the TP list here.</title>
+ <script src="/tests/SimpleTest/SimpleTest.js"></script>
+ <script type="text/javascript" src="classifierHelper.js"></script>
+ <link rel="stylesheet" type="text/css" href="/tests/SimpleTest/test.css" />
+</head>
+
+<body>
+<p id="display"></p>
+<div id="content" style="display: none">
+</div>
+<pre id="test">
+
+<script class="testbody" type="text/javascript">
+ SimpleTest.requestFlakyTimeout("Delay to make sure ping is made prior than XHR");
+
+ const timeout = 200;
+ const host_nottrack = "http://not-tracking.example.com/";
+ const host_track = "http://trackertest.org/";
+ const path_ping = "tests/toolkit/components/url-classifier/tests/mochitest/ping.sjs";
+ const TP_ENABLE_PREF = "privacy.trackingprotection.enabled";
+ const RETRY_TIMEOUT_MS = 200;
+
+ async function testPingNonBlocklist() {
+ await SpecialPowers.setBoolPref(TP_ENABLE_PREF, true);
+
+ var msg = "ping should reach page not in blacklist";
+ var expectPing = true;
+ var id = "1111";
+ ping(id, host_nottrack);
+
+ return new Promise(function(resolve, reject) {
+ // Retry at most 30 seconds.
+ isPingedWithRetry(id, expectPing, msg, resolve, 30 * 1000 / RETRY_TIMEOUT_MS);
+ });
+ }
+
+ async function testPingBlocklistSafebrowsingOff() {
+ await SpecialPowers.setBoolPref(TP_ENABLE_PREF, false);
+
+ var msg = "ping should reach page in blacklist when tracking protection is off";
+ var expectPing = true;
+ var id = "2222";
+ ping(id, host_track);
+
+ return new Promise(function(resolve, reject) {
+ // Retry at most 30 seconds.
+ isPingedWithRetry(id, expectPing, msg, resolve, 30 * 1000 / RETRY_TIMEOUT_MS);
+ });
+ }
+
+ async function testPingBlocklistSafebrowsingOn() {
+ await SpecialPowers.setBoolPref(TP_ENABLE_PREF, true);
+
+ var msg = "ping should not reach page in blacklist when tracking protection is on";
+ var expectPing = false;
+ var id = "3333";
+ ping(id, host_track);
+
+ return new Promise(function(resolve, reject) {
+ setTimeout(function() {
+ isPinged(id, expectPing, msg, resolve);
+ }, timeout);
+ });
+ }
+
+ function ping(id, host) {
+ var elm = document.createElement("a");
+ elm.setAttribute("ping", host + path_ping + "?id=" + id);
+ elm.setAttribute("href", "#");
+ document.body.appendChild(elm);
+
+ // Trigger ping.
+ elm.click();
+
+ document.body.removeChild(elm);
+ }
+
+ function isPingedWithRetry(id, expected, msg, callback, retryCnt) {
+ var url = "http://mochi.test:8888/" + path_ping;
+ var xhr = new XMLHttpRequest();
+ xhr.open("GET", url + "?id=" + id);
+ xhr.onload = function() {
+ let pinged = xhr.response === "ping";
+ let success = pinged === expected;
+ if (success || 0 === retryCnt) {
+ is(expected, pinged, msg);
+ callback();
+ return;
+ }
+ // Retry on failure.
+ setTimeout(() => {
+ isPingedWithRetry(id, expected, msg, callback, retryCnt - 1);
+ }, RETRY_TIMEOUT_MS);
+ };
+ xhr.send();
+ }
+
+ function isPinged(id, expected, msg, callback) {
+ isPingedWithRetry(id, expected, msg, callback, 0);
+ }
+
+ function cleanup() {
+ SpecialPowers.clearUserPref(TP_ENABLE_PREF);
+ }
+
+ function runTest() {
+ classifierHelper.waitForInit()
+ .then(testPingNonBlocklist)
+ .then(testPingBlocklistSafebrowsingOff)
+ .then(testPingBlocklistSafebrowsingOn)
+ .then(function() {
+ SimpleTest.finish();
+ }).catch(function(e) {
+ ok(false, "Some test failed with error " + e);
+ SimpleTest.finish();
+ });
+ }
+
+ SimpleTest.waitForExplicitFinish();
+ SimpleTest.registerCleanupFunction(cleanup);
+ SpecialPowers.pushPrefEnv({"set": [
+ ["browser.send_pings", true],
+ ]}, runTest);
+
+</script>
+</pre>
+</body>
+</html>
diff --git a/toolkit/components/url-classifier/tests/mochitest/test_classify_top_sandboxed.html b/toolkit/components/url-classifier/tests/mochitest/test_classify_top_sandboxed.html
new file mode 100644
index 0000000000..7e3ae97751
--- /dev/null
+++ b/toolkit/components/url-classifier/tests/mochitest/test_classify_top_sandboxed.html
@@ -0,0 +1,74 @@
+<!DOCTYPE HTML>
+<html>
+<head>
+ <meta charset="utf-8">
+ <title>Bug 1647681</title>
+ <script src="/tests/SimpleTest/SimpleTest.js"></script>
+ <link rel="stylesheet" href="/tests/SimpleTest/test.css"/>
+ <script>
+
+async function runTests() {
+ await SpecialPowers.pushPrefEnv({set: [
+ ["privacy.trackingprotection.annotate_channels", true],
+ ]});
+
+ var chromeScript;
+ chromeScript = SpecialPowers.loadChromeScript(_ => {
+ /* eslint-env mozilla/chrome-script */
+ function onExamResp(subject, topic, data) {
+ let channel = subject.QueryInterface(Ci.nsIHttpChannel);
+ let classifiedChannel = subject.QueryInterface(Ci.nsIClassifiedChannel);
+ if (
+ !channel ||
+ !classifiedChannel ||
+ !channel.URI.spec.startsWith(
+ "https://itisatracker.org/tests/toolkit/components/url-classifier/tests/mochitest/evil.js"
+ )
+ ) {
+ return;
+ }
+
+ sendAsyncMessage("last-channel-flags", {
+ classified: Ci.nsIClassifiedChannel.CLASSIFIED_TRACKING == classifiedChannel.classificationFlags,
+ });
+ }
+
+ addMessageListener("done", __ => {
+ Services.obs.removeObserver(onExamResp, "http-on-examine-response");
+ });
+
+ Services.obs.addObserver(onExamResp, "http-on-examine-response");
+
+ sendAsyncMessage("start-test");
+ });
+
+ chromeScript.addMessageListener(
+ "start-test",
+ async _ => {
+ window.open("https://example.org/tests/toolkit/components/url-classifier/tests/mochitest/sandboxed.html")
+ },
+ { once: true }
+ );
+
+ chromeScript.addMessageListener(
+ "last-channel-flags",
+ data => {
+ ok(data.classified, "tracker script should be classified when the top-level is sandboxed");
+ chromeScript.sendAsyncMessage("done");
+ SimpleTest.finish();
+ },
+ { once: true }
+ );
+}
+
+SimpleTest.waitForExplicitFinish();
+runTests();
+
+ </script>
+</head>
+<body>
+<p id="display"></p>
+<div id="content" style="display: none"></div>
+<pre id="test"></pre>
+</body>
+</html>
diff --git a/toolkit/components/url-classifier/tests/mochitest/test_classify_track.html b/toolkit/components/url-classifier/tests/mochitest/test_classify_track.html
new file mode 100644
index 0000000000..cbb6e67c78
--- /dev/null
+++ b/toolkit/components/url-classifier/tests/mochitest/test_classify_track.html
@@ -0,0 +1,163 @@
+<!DOCTYPE HTML>
+<html>
+<head>
+ <title>Bug 1262406 - Track element doesn't use the URL classifier.</title>
+ <script src="/tests/SimpleTest/SimpleTest.js"></script>
+ <script type="text/javascript" src="classifierHelper.js"></script>
+ <link rel="stylesheet" type="text/css" href="/tests/SimpleTest/test.css" />
+</head>
+
+<body>
+<p id="display"></p>
+<div id="content" style="display: none">
+</div>
+<pre id="test">
+
+<script class="testbody" type="text/javascript">
+ const PREF = "browser.safebrowsing.malware.enabled";
+ const track_path = "tests/toolkit/components/url-classifier/tests/mochitest/basic.vtt";
+ const malware_url = "http://malware.example.com/" + track_path;
+ const validtrack_url = "http://mochi.test:8888/" + track_path;
+
+ var video = document.createElement("video");
+ video.src = "seek.webm";
+ video.crossOrigin = "anonymous";
+
+ document.body.appendChild(video);
+
+ function testValidTrack() {
+ SpecialPowers.setBoolPref(PREF, true);
+
+ return new Promise(function(resolve, reject) {
+ var track = document.createElement("track");
+ track.track.mode = "hidden";
+ track.src = validtrack_url;
+ video.appendChild(track);
+
+ function onload() {
+ ok(true, "Track should be loaded when url is not in blocklist");
+ finish();
+ }
+
+ function onerror() {
+ ok(false, "Error while loading track");
+ finish();
+ }
+
+ function finish() {
+ track.removeEventListener("load", onload);
+ track.removeEventListener("error", onerror);
+ resolve();
+ }
+
+ track.addEventListener("load", onload);
+ track.addEventListener("error", onerror);
+ });
+ }
+
+ function testBlocklistTrackSafebrowsingOff() {
+ SpecialPowers.setBoolPref(PREF, false);
+
+ return new Promise(function(resolve, reject) {
+ var track = document.createElement("track");
+ track.track.mode = "hidden";
+ track.src = malware_url;
+ video.appendChild(track);
+
+ function onload() {
+ ok(true, "Track should be loaded when url is in blocklist and safebrowsing is off");
+ finish();
+ }
+
+ function onerror() {
+ ok(false, "Error while loading track");
+ finish();
+ }
+
+ function finish() {
+ track.removeEventListener("load", onload);
+ track.removeEventListener("error", onerror);
+ resolve();
+ }
+
+ track.addEventListener("load", onload);
+ track.addEventListener("error", onerror);
+ });
+ }
+
+ function testBlocklistTrackSafebrowsingOn() {
+ SpecialPowers.setBoolPref(PREF, true);
+
+ return new Promise(function(resolve, reject) {
+ var track = document.createElement("track");
+ track.track.mode = "hidden";
+ // Add a query string parameter here to avoid url classifier bypass classify
+ // because of cache.
+ track.src = malware_url + "?testsbon";
+ video.appendChild(track);
+
+ function onload() {
+ ok(false, "Unexpected result while loading track in blocklist");
+ finish();
+ }
+
+ function onerror() {
+ ok(true, "Track should not be loaded when url is in blocklist and safebrowsing is on");
+ finish();
+ }
+
+ function finish() {
+ track.removeEventListener("load", onload);
+ track.removeEventListener("error", onerror);
+ resolve();
+ }
+
+ track.addEventListener("load", onload);
+ track.addEventListener("error", onerror);
+ });
+ }
+
+ function cleanup() {
+ SpecialPowers.clearUserPref(PREF);
+ }
+
+ function setup() {
+ var testData = [
+ { url: "malware.example.com/",
+ db: "test-malware-simple",
+ },
+ ];
+
+ return classifierHelper.addUrlToDB(testData)
+ .catch(function(err) {
+ ok(false, "Couldn't update classifier. Error code: " + err);
+ // Abort test.
+ SimpleTest.finish();
+ });
+ }
+
+ function runTest() {
+ Promise.resolve()
+ .then(classifierHelper.waitForInit)
+ .then(setup)
+ .then(testValidTrack)
+ .then(testBlocklistTrackSafebrowsingOff)
+ .then(testBlocklistTrackSafebrowsingOn)
+ .then(function() {
+ SimpleTest.finish();
+ }).catch(function(e) {
+ ok(false, "Some test failed with error " + e);
+ SimpleTest.finish();
+ });
+ }
+
+ SimpleTest.waitForExplicitFinish();
+ SimpleTest.registerCleanupFunction(cleanup);
+ SpecialPowers.pushPrefEnv({"set": [
+ ["urlclassifier.malwareTable", "test-malware-simple"],
+ ]}, runTest);
+
+</script>
+</pre>
+</body>
+</html>
diff --git a/toolkit/components/url-classifier/tests/mochitest/test_cryptomining.html b/toolkit/components/url-classifier/tests/mochitest/test_cryptomining.html
new file mode 100644
index 0000000000..0098d6c489
--- /dev/null
+++ b/toolkit/components/url-classifier/tests/mochitest/test_cryptomining.html
@@ -0,0 +1,100 @@
+<!DOCTYPE HTML>
+<html>
+<head>
+ <title>Test the cryptomining classifier</title>
+ <script src="/tests/SimpleTest/SimpleTest.js"></script>
+ <link rel="stylesheet" type="text/css" href="/tests/SimpleTest/test.css" />
+</head>
+
+<body>
+<script class="testbody" type="text/javascript">
+
+var tests = [
+ // All disabled.
+ { config: [ false, false ], loadExpected: true },
+
+ // Just entitylisted.
+ { config: [ false, true ], loadExpected: true },
+
+ // Just blocklisted.
+ { config: [ true, false ], loadExpected: false },
+
+ // entitylist + blocklist: entitylist wins
+ { config: [ true, true ], loadExpected: true },
+];
+
+function prefValue(value, what) {
+ return value ? what : "";
+}
+
+async function runTest(test) {
+ await SpecialPowers.pushPrefEnv({set: [
+ [ "urlclassifier.features.cryptomining.blacklistHosts", prefValue(test.config[0], "example.com") ],
+ [ "urlclassifier.features.cryptomining.whitelistHosts", prefValue(test.config[1], "mochi.test,mochi.xorigin-test") ],
+ [ "urlclassifier.features.cryptomining.blacklistTables", prefValue(test.config[0], "mochitest1-track-simple") ],
+ [ "urlclassifier.features.cryptomining.whitelistTables", "" ],
+ [ "privacy.trackingprotection.enabled", false ],
+ [ "privacy.trackingprotection.annotate_channels", false ],
+ [ "privacy.trackingprotection.cryptomining.enabled", true ],
+ [ "privacy.trackingprotection.emailtracking.enabled", false ],
+ [ "privacy.trackingprotection.fingerprinting.enabled", false ],
+ [ "privacy.trackingprotection.socialtracking.enabled", false ],
+ ]});
+
+ info("Testing: " + JSON.stringify(test.config) + "\n");
+
+ // Let's load an image with a random query string, just to avoid network cache.
+ let result = await new Promise(resolve => {
+ let image = new Image();
+ image.src = "http://example.com/tests/toolkit/components/url-classifier/tests/mochitest/raptor.jpg?" + Math.random();
+ image.onload = _ => resolve(true);
+ image.onerror = _ => resolve(false);
+ });
+
+ is(result, test.loadExpected, "The loading happened correctly");
+
+ // Let's load an image with a random query string, just to avoid network cache.
+ result = await new Promise(resolve => {
+ let image = new Image();
+ image.src = "http://tracking.example.org/tests/toolkit/components/url-classifier/tests/mochitest/raptor.jpg?" + Math.random();
+ image.onload = _ => resolve(true);
+ image.onerror = _ => resolve(false);
+ });
+
+ is(result, test.loadExpected, "The loading happened correctly (by table)");
+}
+
+async function runTests() {
+ let chromeScript = SpecialPowers.loadChromeScript(_ => {
+ /* eslint-env mozilla/chrome-script */
+ const {UrlClassifierTestUtils} = ChromeUtils.importESModule(
+ "resource://testing-common/UrlClassifierTestUtils.sys.mjs"
+ );
+
+ addMessageListener("loadTrackers", __ => {
+ return UrlClassifierTestUtils.addTestTrackers();
+ });
+
+ addMessageListener("unloadTrackers", __ => {
+ UrlClassifierTestUtils.cleanupTestTrackers();
+ });
+ });
+
+ await chromeScript.sendQuery("loadTrackers");
+
+ for (let test in tests) {
+ await runTest(tests[test]);
+ }
+
+ await chromeScript.sendQuery("unloadTrackers");
+
+ chromeScript.destroy();
+ SimpleTest.finish();
+}
+
+SimpleTest.waitForExplicitFinish();
+runTests();
+
+</script>
+</body>
+</html>
diff --git a/toolkit/components/url-classifier/tests/mochitest/test_cryptomining_annotate.html b/toolkit/components/url-classifier/tests/mochitest/test_cryptomining_annotate.html
new file mode 100644
index 0000000000..ecf5cd02a4
--- /dev/null
+++ b/toolkit/components/url-classifier/tests/mochitest/test_cryptomining_annotate.html
@@ -0,0 +1,31 @@
+<!DOCTYPE HTML>
+<html>
+<head>
+ <title>Test the relationship between annotation vs blocking - cryptomining</title>
+ <script src="/tests/SimpleTest/SimpleTest.js"></script>
+ <script type="text/javascript" src="features.js"></script>
+ <link rel="stylesheet" type="text/css" href="/tests/SimpleTest/test.css" />
+</head>
+
+<body>
+<script class="testbody" type="text/javascript">
+
+runTests(SpecialPowers.Ci.nsIClassifiedChannel.CLASSIFIED_CRYPTOMINING,
+ [
+ ["privacy.trackingprotection.enabled", false],
+ ["privacy.trackingprotection.annotate_channels", false],
+ ["urlclassifier.features.fingerprinting.annotate.blacklistHosts", ""],
+ ["urlclassifier.features.fingerprinting.annotate.blacklistTables", ""],
+ ["privacy.trackingprotection.fingerprinting.enabled", false],
+ ["privacy.trackingprotection.cryptomining.enabled", true],
+ ["urlclassifier.features.socialtracking.annotate.blacklistHosts", ""],
+ ["urlclassifier.features.socialtracking.annotate.blacklistTables", ""],
+ ["privacy.trackingprotection.socialtracking.enabled", false],
+ ["privacy.trackingprotection.emailtracking.enabled", false],
+ ],
+ false /* a tracking resource */);
+SimpleTest.waitForExplicitFinish();
+
+</script>
+</body>
+</html>
diff --git a/toolkit/components/url-classifier/tests/mochitest/test_donottrack.html b/toolkit/components/url-classifier/tests/mochitest/test_donottrack.html
new file mode 100644
index 0000000000..96278ae49d
--- /dev/null
+++ b/toolkit/components/url-classifier/tests/mochitest/test_donottrack.html
@@ -0,0 +1,141 @@
+<!DOCTYPE HTML>
+<html>
+<head>
+ <title>Bug 1258033 - Fix the DNT loophole for tracking protection</title>
+ <script src="chrome://mochikit/content/tests/SimpleTest/SimpleTest.js"></script>
+ <link rel="stylesheet" type="text/css" href="chrome://mochikit/content/tests/SimpleTest/test.css">
+</head>
+
+<body>
+<p id="display"></p>
+<div id="content" style="display: none">
+</div>
+<pre id="test">
+
+<script class="testbody" type="text/javascript">
+
+var mainWindow = window.browsingContext.topChromeWindow;
+
+const tests = [
+ // DNT turned on and TP turned off, DNT signal sent in both private browsing
+ // and normal mode.
+ {
+ setting: {dntPref: true, tpPref: false, tppbPref: false, pbMode: true},
+ expected: {dnt: "1"},
+ },
+ {
+ setting: {dntPref: true, tpPref: false, tppbPref: false, pbMode: false},
+ expected: {dnt: "1"},
+ },
+ // DNT turned off and TP turned on globally, DNT signal sent in both private
+ // browsing and normal mode.
+ {
+ setting: {dntPref: false, tpPref: true, tppbPref: false, pbMode: true},
+ expected: {dnt: "1"},
+ },
+ {
+ setting: {dntPref: false, tpPref: true, tppbPref: false, pbMode: false},
+ expected: {dnt: "1"},
+ },
+ // DNT turned off and TP in Private Browsing only, DNT signal sent in private
+ // browsing mode only.
+ {
+ setting: {dntPref: false, tpPref: false, tppbPref: true, pbMode: true},
+ expected: {dnt: "1"},
+ },
+ {
+ setting: {dntPref: false, tpPref: false, tppbPref: true, pbMode: false},
+ expected: {dnt: "unspecified"},
+ },
+ // DNT turned off and TP turned off, DNT signal is never sent.
+ {
+ setting: {dntPref: false, tpPref: false, tppbPref: false, pbMode: true},
+ expected: {dnt: "unspecified"},
+ },
+ {
+ setting: {dntPref: false, tpPref: false, tppbPref: false, pbMode: false},
+ expected: {dnt: "unspecified"},
+ },
+];
+
+const DNT_PREF = "privacy.donottrackheader.enabled";
+const TP_PREF = "privacy.trackingprotection.enabled";
+const TP_PB_PREF = "privacy.trackingprotection.pbmode.enabled";
+
+const contentPage =
+ "http://mochi.test:8888/tests/toolkit/components/url-classifier/tests/mochitest/dnt.html";
+
+const {TestUtils} = ChromeUtils.importESModule(
+ "resource://testing-common/TestUtils.sys.mjs"
+);
+
+function executeTest(test) {
+ SpecialPowers.pushPrefEnv({"set": [
+ [DNT_PREF, test.setting.dntPref],
+ [TP_PREF, test.setting.tpPref],
+ [TP_PB_PREF, test.setting.tppbPref],
+ ]});
+
+ var win = mainWindow.OpenBrowserWindow({private: test.setting.pbMode});
+
+ return new Promise(function(resolve, reject) {
+ win.addEventListener("load", function() {
+ TestUtils.topicObserved("browser-delayed-startup-finished",
+ subject => subject == win).then(() => {
+ win.addEventListener("DOMContentLoaded", function onInnerLoad() {
+ if (win.content.location.href != contentPage) {
+ win.gBrowser.loadURI(Services.io.newURI(contentPage), {
+ triggeringPrincipal: Services.scriptSecurityManager.createNullPrincipal({}),
+ });
+ return;
+ }
+
+ win.removeEventListener("DOMContentLoaded", onInnerLoad, true);
+
+ win.content.addEventListener("message", function(event) {
+ let [key, value] = event.data.split("=");
+ if (key == "finish") {
+ win.close();
+ resolve();
+ } else if (key == "navigator.doNotTrack") {
+ is(value, test.expected.dnt, "navigator.doNotTrack should be " + test.expected.dnt);
+ } else if (key == "DNT") {
+ let msg = test.expected.dnt == "1" ? "" : "not ";
+ is(value, test.expected.dnt, "DNT header should " + msg + "be sent");
+ } else {
+ ok(false, "unexpected message");
+ }
+ });
+ }, true);
+ SimpleTest.executeSoon(function() {
+ win.gBrowser.loadURI(Services.io.newURI(contentPage), {
+ triggeringPrincipal: Services.scriptSecurityManager.createNullPrincipal({}),
+ });
+ });
+ });
+ }, {capture: true, once: true});
+ });
+}
+
+let loop = function loop(index) {
+ if (index >= tests.length) {
+ SimpleTest.finish();
+ return;
+ }
+
+ let test = tests[index];
+ let next = function next() {
+ loop(index + 1);
+ };
+ let result = executeTest(test);
+ result.then(next, next);
+};
+
+SimpleTest.waitForExplicitFinish();
+loop(0);
+
+</script>
+
+</pre>
+</body>
+</html>
diff --git a/toolkit/components/url-classifier/tests/mochitest/test_emailtracking.html b/toolkit/components/url-classifier/tests/mochitest/test_emailtracking.html
new file mode 100644
index 0000000000..fd2996997c
--- /dev/null
+++ b/toolkit/components/url-classifier/tests/mochitest/test_emailtracking.html
@@ -0,0 +1,130 @@
+<!DOCTYPE HTML>
+<html>
+<head>
+ <title>Test the email tracking classifier</title>
+ <script src="/tests/SimpleTest/SimpleTest.js"></script>
+ <link rel="stylesheet" type="text/css" href="/tests/SimpleTest/test.css" />
+</head>
+
+<body>
+<script class="testbody" type="text/javascript">
+
+var tests = [
+ // All disabled.
+ { config: [ false, false ], loadExpected: true },
+
+ // Just entitylisted.
+ { config: [ false, true ], loadExpected: true },
+
+ // Just blocklisted.
+ { config: [ true, false ], loadExpected: false },
+
+ // entitylist + blocklist: entitylist wins
+ { config: [ true, true ], loadExpected: true },
+];
+
+function prefValue(value, what) {
+ return value ? what : "";
+}
+
+async function runTest(test) {
+ await SpecialPowers.pushPrefEnv({set: [
+ [ "urlclassifier.features.emailtracking.blocklistHosts", prefValue(test.config[0], "example.com") ],
+ [ "urlclassifier.features.emailtracking.allowlistHosts", prefValue(test.config[1], "mochi.test,mochi.xorigin-test") ],
+ [ "urlclassifier.features.emailtracking.blocklistTables", prefValue(test.config[0], "mochitest5-track-simple") ],
+ [ "urlclassifier.features.emailtracking.allowlistTables", "" ],
+ [ "privacy.trackingprotection.enabled", false ],
+ [ "privacy.trackingprotection.annotate_channels", false ],
+ [ "privacy.trackingprotection.cryptomining.enabled", false ],
+ [ "privacy.trackingprotection.emailtracking.enabled", true ],
+ [ "privacy.trackingprotection.fingerprinting.enabled", false ],
+ [ "privacy.trackingprotection.socialtracking.enabled", false ],
+ ]});
+
+ info("Testing: " + JSON.stringify(test.config) + "\n");
+
+ // Let's load an image with a random query string to avoid network cache.
+ let result = await new Promise(resolve => {
+ let image = new Image();
+ image.src = "http://example.com/tests/toolkit/components/url-classifier/tests/mochitest/raptor.jpg?" + Math.random();
+ image.onload = _ => resolve(true);
+ image.onerror = _ => resolve(false);
+ });
+
+ is(result, test.loadExpected, "Image loading happened correctly");
+
+ // Let's load an image with a random query string to avoid network cache.
+ result = await new Promise(resolve => {
+ let image = new Image();
+ image.src = "http://email-tracking.example.org/tests/toolkit/components/url-classifier/tests/mochitest/raptor.jpg?" + Math.random();
+ image.onload = _ => resolve(true);
+ image.onerror = _ => resolve(false);
+ });
+
+ is(result, test.loadExpected, "Image loading happened correctly (by table)");
+
+ // Let's load a script with a random query string to avoid network cache.
+ result = await new Promise(resolve => {
+ let script = document.createElement("script");
+ script.setAttribute(
+ "src",
+ "http://example.com/tests/toolkit/components/url-classifier/tests/mochitest/evil.js?" +
+ Math.random()
+ );
+ script.onload = _ => resolve(true);
+ script.onerror = _ => resolve(false);
+ document.body.appendChild(script);
+ });
+
+ is(result, test.loadExpected, "Script loading happened correctly");
+
+ // Let's load a script with a random query string to avoid network cache.
+ result = await new Promise(resolve => {
+ let script = document.createElement("script");
+ script.setAttribute(
+ "src",
+ "http://email-tracking.example.org/tests/toolkit/components/url-classifier/tests/mochitest/evil.js?" +
+ Math.random()
+ );
+ script.onload = _ => resolve(true);
+ script.onerror = _ => resolve(false);
+ document.body.appendChild(script);
+ });
+
+ is(result, test.loadExpected, "Script loading happened correctly (by table)");
+}
+
+async function runTests() {
+ let chromeScript = SpecialPowers.loadChromeScript(_ => {
+ /* eslint-env mozilla/chrome-script */
+ const {UrlClassifierTestUtils} = ChromeUtils.importESModule(
+ "resource://testing-common/UrlClassifierTestUtils.sys.mjs"
+ );
+
+ addMessageListener("loadTrackers", __ => {
+ return UrlClassifierTestUtils.addTestTrackers();
+ });
+
+ addMessageListener("unloadTrackers", __ => {
+ UrlClassifierTestUtils.cleanupTestTrackers();
+ });
+ });
+
+ await chromeScript.sendQuery("loadTrackers");
+
+ for (let test in tests) {
+ await runTest(tests[test]);
+ }
+
+ await chromeScript.sendQuery("unloadTrackers");
+
+ chromeScript.destroy();
+ SimpleTest.finish();
+}
+
+SimpleTest.waitForExplicitFinish();
+runTests();
+
+</script>
+</body>
+</html>
diff --git a/toolkit/components/url-classifier/tests/mochitest/test_fingerprinting.html b/toolkit/components/url-classifier/tests/mochitest/test_fingerprinting.html
new file mode 100644
index 0000000000..81e6f9a466
--- /dev/null
+++ b/toolkit/components/url-classifier/tests/mochitest/test_fingerprinting.html
@@ -0,0 +1,130 @@
+<!DOCTYPE HTML>
+<html>
+<head>
+ <title>Test the fingerprinting classifier</title>
+ <script src="/tests/SimpleTest/SimpleTest.js"></script>
+ <link rel="stylesheet" type="text/css" href="/tests/SimpleTest/test.css" />
+</head>
+
+<body>
+<script class="testbody" type="text/javascript">
+
+var tests = [
+ // All disabled.
+ { config: [ false, false ], imgLoadExpected: true, scriptLoadExpected: true },
+
+ // Just entitylisted.
+ { config: [ false, true ], imgLoadExpected: true, scriptLoadExpected: true },
+
+ // Just blocklisted.
+ { config: [ true, false ], imgLoadExpected: true, scriptLoadExpected: false },
+
+ // entitylist + blocklist: entitylist wins
+ { config: [ true, true ], imgLoadExpected: true, scriptLoadExpected: true },
+];
+
+function prefValue(value, what) {
+ return value ? what : "";
+}
+
+async function runTest(test) {
+ await SpecialPowers.pushPrefEnv({set: [
+ [ "urlclassifier.features.fingerprinting.blacklistHosts", prefValue(test.config[0], "example.com") ],
+ [ "urlclassifier.features.fingerprinting.whitelistHosts", prefValue(test.config[1], "mochi.test,mochi.xorigin-test") ],
+ [ "urlclassifier.features.fingerprinting.blacklistTables", prefValue(test.config[0], "mochitest1-track-simple") ],
+ [ "urlclassifier.features.fingerprinting.whitelistTables", "" ],
+ [ "privacy.trackingprotection.enabled", false ],
+ [ "privacy.trackingprotection.annotate_channels", false ],
+ [ "privacy.trackingprotection.cryptomining.enabled", false ],
+ [ "privacy.trackingprotection.emailtracking.enabled", false ],
+ [ "privacy.trackingprotection.fingerprinting.enabled", true ],
+ [ "privacy.trackingprotection.socialtracking.enabled", false ],
+ ]});
+
+ info("Testing: " + JSON.stringify(test.config) + "\n");
+
+ // Let's load an image with a random query string to avoid network cache.
+ let result = await new Promise(resolve => {
+ let image = new Image();
+ image.src = "http://example.com/tests/toolkit/components/url-classifier/tests/mochitest/raptor.jpg?" + Math.random();
+ image.onload = _ => resolve(true);
+ image.onerror = _ => resolve(false);
+ });
+
+ is(result, test.imgLoadExpected, "Image loading happened correctly");
+
+ // Let's load an image with a random query string to avoid network cache.
+ result = await new Promise(resolve => {
+ let image = new Image();
+ image.src = "http://tracking.example.org/tests/toolkit/components/url-classifier/tests/mochitest/raptor.jpg?" + Math.random();
+ image.onload = _ => resolve(true);
+ image.onerror = _ => resolve(false);
+ });
+
+ is(result, test.imgLoadExpected, "Image loading happened correctly (by table)");
+
+ // Let's load a script with a random query string to avoid network cache.
+ result = await new Promise(resolve => {
+ let script = document.createElement("script");
+ script.setAttribute(
+ "src",
+ "http://example.com/tests/toolkit/components/url-classifier/tests/mochitest/evil.js?" +
+ Math.random()
+ );
+ script.onload = _ => resolve(true);
+ script.onerror = _ => resolve(false);
+ document.body.appendChild(script);
+ });
+
+ is(result, test.scriptLoadExpected, "Script loading happened correctly");
+
+ // Let's load a script with a random query string to avoid network cache.
+ result = await new Promise(resolve => {
+ let script = document.createElement("script");
+ script.setAttribute(
+ "src",
+ "http://example.com/tests/toolkit/components/url-classifier/tests/mochitest/evil.js?" +
+ Math.random()
+ );
+ script.onload = _ => resolve(true);
+ script.onerror = _ => resolve(false);
+ document.body.appendChild(script);
+ });
+
+ is(result, test.scriptLoadExpected, "Script loading happened correctly");
+}
+
+async function runTests() {
+ let chromeScript = SpecialPowers.loadChromeScript(_ => {
+ /* eslint-env mozilla/chrome-script */
+ const {UrlClassifierTestUtils} = ChromeUtils.importESModule(
+ "resource://testing-common/UrlClassifierTestUtils.sys.mjs"
+ );
+
+ addMessageListener("loadTrackers", __ => {
+ return UrlClassifierTestUtils.addTestTrackers();
+ });
+
+ addMessageListener("unloadTrackers", __ => {
+ UrlClassifierTestUtils.cleanupTestTrackers();
+ });
+ });
+
+ await chromeScript.sendQuery("loadTrackers");
+
+ for (let test in tests) {
+ await runTest(tests[test]);
+ }
+
+ await chromeScript.sendQuery("unloadTrackers");
+
+ chromeScript.destroy();
+ SimpleTest.finish();
+}
+
+SimpleTest.waitForExplicitFinish();
+runTests();
+
+</script>
+</body>
+</html>
diff --git a/toolkit/components/url-classifier/tests/mochitest/test_fingerprinting_annotate.html b/toolkit/components/url-classifier/tests/mochitest/test_fingerprinting_annotate.html
new file mode 100644
index 0000000000..0bc01645b1
--- /dev/null
+++ b/toolkit/components/url-classifier/tests/mochitest/test_fingerprinting_annotate.html
@@ -0,0 +1,31 @@
+<!DOCTYPE HTML>
+<html>
+<head>
+ <title>Test the relationship between annotation vs blocking - fingerprinting</title>
+ <script src="/tests/SimpleTest/SimpleTest.js"></script>
+ <script type="text/javascript" src="features.js"></script>
+ <link rel="stylesheet" type="text/css" href="/tests/SimpleTest/test.css" />
+</head>
+
+<body>
+<script class="testbody" type="text/javascript">
+
+runTests(SpecialPowers.Ci.nsIClassifiedChannel.CLASSIFIED_FINGERPRINTING,
+ [
+ ["privacy.trackingprotection.enabled", false],
+ ["privacy.trackingprotection.annotate_channels", false],
+ ["privacy.trackingprotection.fingerprinting.enabled", true],
+ ["urlclassifier.features.cryptomining.annotate.blacklistHosts", ""],
+ ["urlclassifier.features.cryptomining.annotate.blacklistTables", ""],
+ ["privacy.trackingprotection.cryptomining.enabled", false],
+ ["urlclassifier.features.socialtracking.annotate.blacklistHosts", ""],
+ ["urlclassifier.features.socialtracking.annotate.blacklistTables", ""],
+ ["privacy.trackingprotection.socialtracking.enabled", false],
+ ["privacy.trackingprotection.emailtracking.enabled", false],
+ ],
+ true /* a tracking resource */);
+SimpleTest.waitForExplicitFinish();
+
+</script>
+</body>
+</html>
diff --git a/toolkit/components/url-classifier/tests/mochitest/test_gethash.html b/toolkit/components/url-classifier/tests/mochitest/test_gethash.html
new file mode 100644
index 0000000000..b9d3a2fd44
--- /dev/null
+++ b/toolkit/components/url-classifier/tests/mochitest/test_gethash.html
@@ -0,0 +1,118 @@
+<!DOCTYPE HTML>
+<html>
+<head>
+ <title>Bug 1272239 - Test gethash.</title>
+ <script src="/tests/SimpleTest/SimpleTest.js"></script>
+ <script type="text/javascript" src="classifierHelper.js"></script>
+ <link rel="stylesheet" type="text/css" href="/tests/SimpleTest/test.css" />
+</head>
+
+<body>
+<p id="display"></p>
+<div id="content" style="display: none">
+</div>
+<pre id="test">
+<iframe id="testFrame1" onload=""></iframe>
+<iframe id="testFrame2" onload=""></iframe>
+
+<script src="head.js"></script>
+<script class="testbody" type="text/javascript">
+const MALWARE_LIST = "test-malware-simple";
+const MALWARE_HOST = "malware.example.com/";
+
+const UNWANTED_LIST = "test-unwanted-simple";
+const UNWANTED_HOST = "unwanted.example.com/";
+
+const GETHASH_URL = "http://mochi.test:8888/tests/toolkit/components/url-classifier/tests/mochitest/gethash.sjs";
+const NOTEXIST_URL = "http://mochi.test:8888/tests/toolkit/components/url-classifier/tests/mochitest/nonexistserver.sjs";
+
+var shouldLoad = false;
+
+// In this testcase we store prefixes to localdb and send the fullhash to gethash server.
+// When access the test page gecko should trigger gethash request to server and
+// get the completion response.
+function loadTestFrame(id) {
+ return new Promise(function(resolve, reject) {
+ var iframe = document.getElementById(id);
+ iframe.setAttribute("src", "gethashFrame.html");
+
+ iframe.onload = function() {
+ resolve();
+ };
+ });
+}
+
+// add 4-bytes prefixes to local database, so when we access the url,
+// it will trigger gethash request.
+function addPrefixToDB(list, url) {
+ var testData = [{ db: list, url, len: 4 }];
+
+ return classifierHelper.addUrlToDB(testData)
+ .catch(function(err) {
+ ok(false, "Couldn't update classifier. Error code: " + err);
+ // Abort test.
+ SimpleTest.finish();
+ });
+}
+
+function setup404() {
+ shouldLoad = true;
+
+ return Promise.all([
+ classifierHelper.allowCompletion(
+ [MALWARE_LIST, UNWANTED_LIST], NOTEXIST_URL),
+ addPrefixToDB(MALWARE_LIST, MALWARE_HOST),
+ addPrefixToDB(UNWANTED_LIST, UNWANTED_HOST),
+ ]);
+}
+
+function setup() {
+ return Promise.all([
+ classifierHelper.allowCompletion(
+ [MALWARE_LIST, UNWANTED_LIST], GETHASH_URL),
+ addPrefixToDB(MALWARE_LIST, MALWARE_HOST),
+ addPrefixToDB(UNWANTED_LIST, UNWANTED_HOST),
+ addCompletionToServer(MALWARE_LIST, MALWARE_HOST, GETHASH_URL),
+ addCompletionToServer(UNWANTED_LIST, UNWANTED_HOST, GETHASH_URL),
+ ]);
+}
+
+// manually reset DB to make sure next test won't be affected by cache.
+function reset() {
+ return classifierHelper.resetDatabase();
+}
+
+function runTest() {
+ Promise.resolve()
+ // This test resources get blocked when gethash returns successfully
+ .then(classifierHelper.waitForInit)
+ .then(setup)
+ .then(() => loadTestFrame("testFrame1"))
+ .then(reset)
+ // This test resources are not blocked when gethash returns an error
+ .then(setup404)
+ .then(() => loadTestFrame("testFrame2"))
+ .then(function() {
+ SimpleTest.finish();
+ }).catch(function(e) {
+ ok(false, "Some test failed with error " + e);
+ SimpleTest.finish();
+ });
+}
+
+SimpleTest.waitForExplicitFinish();
+
+// 'network.predictor.enabled' is disabled because if other testcase load
+// evil.js, evil.css ...etc resources, it may cause we load them from cache
+// directly and bypass classifier check
+SpecialPowers.pushPrefEnv({"set": [
+ ["browser.safebrowsing.malware.enabled", true],
+ ["urlclassifier.malwareTable", "test-malware-simple,test-unwanted-simple"],
+ ["network.predictor.enabled", false],
+ ["urlclassifier.gethash.timeout_ms", 30000],
+]}, runTest);
+
+</script>
+</pre>
+</body>
+</html>
diff --git a/toolkit/components/url-classifier/tests/mochitest/test_privatebrowsing_trackingprotection.html b/toolkit/components/url-classifier/tests/mochitest/test_privatebrowsing_trackingprotection.html
new file mode 100644
index 0000000000..0959ecf42e
--- /dev/null
+++ b/toolkit/components/url-classifier/tests/mochitest/test_privatebrowsing_trackingprotection.html
@@ -0,0 +1,153 @@
+<!DOCTYPE HTML>
+<!-- Any copyright is dedicated to the Public Domain.
+ http://creativecommons.org/publicdomain/zero/1.0/ -->
+<html>
+<head>
+ <title>Test Tracking Protection in Private Browsing mode</title>
+ <script src="chrome://mochikit/content/tests/SimpleTest/SimpleTest.js"></script>
+ <link rel="stylesheet" type="text/css" href="chrome://mochikit/content/tests/SimpleTest/test.css">
+</head>
+
+<body>
+<p id="display"></p>
+<div id="content" style="display: none">
+</div>
+<pre id="test">
+
+<script class="testbody" type="text/javascript">
+
+var mainWindow = window.browsingContext.topChromeWindow;
+var contentPage = "https://www.itisatrap.org/chrome/toolkit/components/url-classifier/tests/mochitest/classifiedAnnotatedPBFrame.html";
+
+const {UrlClassifierTestUtils} = ChromeUtils.importESModule(
+ "resource://testing-common/UrlClassifierTestUtils.sys.mjs"
+);
+const {TestUtils} = ChromeUtils.importESModule(
+ "resource://testing-common/TestUtils.sys.mjs"
+);
+
+function testOnWindow(aPrivate) {
+ return new Promise((resolve, reject) => {
+ let win = mainWindow.OpenBrowserWindow({private: aPrivate});
+ win.addEventListener("load", function() {
+ TestUtils.topicObserved("browser-delayed-startup-finished",
+ subject => subject == win).then(() => {
+ win.addEventListener("DOMContentLoaded", function onInnerLoad() {
+ if (win.content.location.href != contentPage) {
+ win.gBrowser.loadURI(Services.io.newURI(contentPage), {
+ triggeringPrincipal: Services.scriptSecurityManager.createNullPrincipal({}),
+ });
+ return;
+ }
+ win.removeEventListener("DOMContentLoaded", onInnerLoad, true);
+
+ win.content.addEventListener("load", function innerLoad2() {
+ win.content.removeEventListener("load", innerLoad2);
+ SimpleTest.executeSoon(function() {
+ resolve(win);
+ });
+ }, false, true);
+ }, true);
+ SimpleTest.executeSoon(function() {
+ win.gBrowser.loadURI(Services.io.newURI(contentPage), {
+ triggeringPrincipal: Services.scriptSecurityManager.createNullPrincipal({}),
+ });
+ });
+ });
+ }, {capture: true, once: true});
+ });
+}
+
+var badids = [
+ "badscript",
+ "badimage",
+ "badcss",
+];
+
+function checkLoads(aWindow, aBlocked) {
+ var win = aWindow.content;
+ is(win.document.getElementById("badscript").dataset.touched, aBlocked ? "no" : "yes", "Should not load tracking javascript");
+ is(win.document.getElementById("badimage").dataset.touched, aBlocked ? "no" : "yes", "Should not load tracking images");
+ is(win.document.getElementById("goodscript").dataset.touched, "yes", "Should load entitylisted tracking javascript");
+ is(win.document.getElementById("goodimage").dataset.touched, "yes", "Should load non-blocklisted image");
+
+ var elt = win.document.getElementById("styleCheck");
+ var style = win.document.defaultView.getComputedStyle(elt);
+ isnot(style.visibility, aBlocked ? "hidden" : "", "Should not load tracking css");
+
+ is(win.document.blockedNodeByClassifierCount, aBlocked ? badids.length : 0, "Should identify all tracking elements");
+
+ var blockedNodes = win.document.blockedNodesByClassifier;
+
+ // Make sure that every node in blockedNodes exists in the tree
+ // (that may not always be the case but do not expect any nodes to disappear
+ // from the tree here)
+ var allNodeMatch = true;
+ for (let i = 0; i < blockedNodes.length; i++) {
+ let nodeMatch = false;
+ for (let j = 0; j < badids.length && !nodeMatch; j++) {
+ nodeMatch = nodeMatch ||
+ (blockedNodes[i] == win.document.getElementById(badids[j]));
+ }
+
+ allNodeMatch = allNodeMatch && nodeMatch;
+ }
+ is(allNodeMatch, true, "All annotated nodes are expected in the tree");
+
+ // Make sure that every node with a badid (see badids) is found in the
+ // blockedNodes. This tells us if we are neglecting to annotate
+ // some nodes
+ allNodeMatch = true;
+ for (let j = 0; j < badids.length; j++) {
+ let nodeMatch = false;
+ for (let i = 0; i < blockedNodes.length && !nodeMatch; i++) {
+ nodeMatch = nodeMatch ||
+ (blockedNodes[i] == win.document.getElementById(badids[j]));
+ }
+
+ allNodeMatch = allNodeMatch && nodeMatch;
+ }
+ is(allNodeMatch, aBlocked, "All tracking nodes are expected to be annotated as such");
+}
+
+SpecialPowers.pushPrefEnv(
+ {"set": [
+ ["privacy.trackingprotection.enabled", false],
+ ["privacy.trackingprotection.pbmode.enabled", true],
+ ["privacy.trackingprotection.testing.report_blocked_node", true],
+ ]}, test);
+
+async function test() {
+ SimpleTest.registerCleanupFunction(UrlClassifierTestUtils.cleanupTestTrackers);
+ await UrlClassifierTestUtils.addTestTrackers();
+
+ // Normal mode, with the pref (trackers should be loaded)
+ await testOnWindow(false).then(function(aWindow) {
+ checkLoads(aWindow, false);
+ aWindow.close();
+ });
+
+ // Private Browsing, with the pref (trackers should be blocked)
+ await testOnWindow(true).then(function(aWindow) {
+ checkLoads(aWindow, true);
+ aWindow.close();
+ });
+
+ // Private Browsing, without the pref (trackers should be loaded)
+ await SpecialPowers.setBoolPref("privacy.trackingprotection.pbmode.enabled", false);
+ await testOnWindow(true).then(function(aWindow) {
+ checkLoads(aWindow, false);
+ aWindow.close();
+ });
+
+ SimpleTest.finish();
+}
+
+SimpleTest.waitForExplicitFinish();
+
+</script>
+
+</pre>
+<iframe id="testFrame" width="100%" height="100%" onload=""></iframe>
+</body>
+</html>
diff --git a/toolkit/components/url-classifier/tests/mochitest/test_reporturl.html b/toolkit/components/url-classifier/tests/mochitest/test_reporturl.html
new file mode 100644
index 0000000000..36d128cdf7
--- /dev/null
+++ b/toolkit/components/url-classifier/tests/mochitest/test_reporturl.html
@@ -0,0 +1,217 @@
+<!DOCTYPE HTML>
+<!-- Any copyright is dedicated to the Public Domain.
+ http://creativecommons.org/publicdomain/zero/1.0/ -->
+<html>
+<head>
+ <title>Test report matched URL info (Bug #1288633)</title>
+ <script src="chrome://mochikit/content/tests/SimpleTest/SimpleTest.js"></script>
+ <script type="text/javascript" src="classifierHelper.js"></script>
+ <link rel="stylesheet" type="text/css" href="chrome://mochikit/content/tests/SimpleTest/test.css">
+</head>
+
+<body>
+<p id="display"></p>
+<div id="content" style="display: none">
+</div>
+<pre id="test">
+
+<script class="testbody" type="text/javascript">
+const {BrowserTestUtils} = ChromeUtils.importESModule(
+ "resource://testing-common/BrowserTestUtils.sys.mjs"
+);
+;
+const {TestUtils} = ChromeUtils.importESModule(
+ "resource://testing-common/TestUtils.sys.mjs"
+);
+
+var mainWindow = window.browsingContext.topChromeWindow;
+const SJS = "mochi.test:8888/chrome/toolkit/components/url-classifier/tests/mochitest/report.sjs";
+const BASE_URL = "http://" + SJS + "?";
+
+var pushPrefs = (...p) => SpecialPowers.pushPrefEnv({set: p});
+
+function addUrlToDB(list, url) {
+ let testData = [{ db: list, url}];
+
+ return classifierHelper.addUrlToDB(testData)
+ .catch(function(err) {
+ ok(false, "Couldn't update classifier. Error code: " + err);
+ // Abort test.
+ SimpleTest.finish();
+ });
+}
+
+function setupTestData(data) {
+ let promises = [];
+ let providerList = "browser.safebrowsing.provider." + data.provider + ".lists";
+ if (!Services.prefs.prefHasUserValue(providerList)) {
+ promises.push(pushPrefs([providerList, data.list]));
+ } else {
+ let pref = SpecialPowers.getCharPref(providerList);
+ pref += "," + data.list;
+ promises.push(pushPrefs([providerList, pref]));
+ }
+
+ let activeTablePref = "urlclassifier.phishTable";
+ let activeTable = SpecialPowers.getCharPref(activeTablePref);
+ activeTable += "," + data.list;
+ promises.push(pushPrefs([activeTablePref, activeTable]));
+
+ promises.push(addUrlToDB(data.list, data.testUrl));
+ return Promise.all(promises);
+}
+
+function testOnWindow(aTestData, aCallback, aTestCreater) {
+ return new Promise(resolve => {
+ let win = mainWindow.OpenBrowserWindow();
+
+ (async function() {
+ await TestUtils.topicObserved("browser-delayed-startup-finished",
+ subject => subject == win);
+
+ let browser = win.gBrowser.selectedBrowser;
+ aTestCreater(win, browser, aTestData.topUrl, aTestData.testUrl);
+
+ let notification = await BrowserTestUtils.waitForNotificationBar(win.gBrowser, browser, "blocked-badware-page");
+ ok(notification, "Notification box should be displayed");
+
+ let buttons = notification.buttonContainer.getElementsByTagName("button");
+ let button = buttons[1];
+ if (aTestData.provider != "google" && aTestData.provider != "google4") {
+ is(button, undefined, "Report button should not be showed");
+ win.close();
+ resolve();
+ return;
+ }
+
+ button.click();
+
+ let newTabBrowser = win.gBrowser.selectedTab.linkedBrowser;
+ await BrowserTestUtils.browserLoaded(newTabBrowser);
+
+ aCallback(newTabBrowser);
+ win.close();
+ resolve();
+ })();
+ });
+}
+
+var createBlockedIframe = function(aWindow, aBrowser, aTopUrl, aUrl) {
+ (async function() {
+ BrowserTestUtils.startLoadingURIString(aBrowser, aTopUrl);
+ await BrowserTestUtils.browserLoaded(aBrowser);
+
+ await SpecialPowers.spawn(aBrowser, [aUrl], async function(url) {
+ return new Promise(resolve => {
+ let listener = e => {
+ docShell.chromeEventHandler.removeEventListener("AboutBlockedLoaded", listener, false, true);
+ resolve();
+ };
+ docShell.chromeEventHandler.addEventListener("AboutBlockedLoaded", listener, false, true);
+ let frame = content.document.getElementById("phishingFrame");
+ frame.setAttribute("src", "http://" + url);
+ });
+ });
+
+ let doc = aWindow.gBrowser.contentDocument.getElementsByTagName("iframe")[0].contentDocument;
+ let ignoreWarningLink = doc.getElementById("ignore_warning_link");
+ ok(ignoreWarningLink, "Ignore warning link should exist");
+ ignoreWarningLink.click();
+ })();
+};
+
+var createBlockedPage = function(aWindow, aBrowser, aTopUrl, aUrl) {
+ (async function() {
+ BrowserTestUtils.startLoadingURIString(aBrowser, aTopUrl);
+ await BrowserTestUtils.waitForContentEvent(aBrowser, "DOMContentLoaded");
+
+ let doc = aWindow.gBrowser.contentDocument;
+ let ignoreWarningLink = doc.getElementById("ignore_warning_link");
+ ok(ignoreWarningLink, "Ignore warning link should exist");
+ ignoreWarningLink.click();
+ })();
+};
+
+function checkReportURL(aReportBrowser, aUrl) {
+ let expectedReportUrl = BASE_URL + "action=reporturl&reporturl=" + encodeURIComponent(aUrl);
+ is(aReportBrowser.contentDocument.location.href, expectedReportUrl, "Correct report URL");
+}
+
+var testDatas = [
+ { topUrl: "http://itisaphishingsite.org/phishing.html",
+ testUrl: "itisaphishingsite.org/phishing.html",
+ list: "mochi1-phish-simple",
+ provider: "google",
+ blockCreater: createBlockedPage,
+ expectedReportUri: "http://itisaphishingsite.org/phishing.html",
+ },
+
+ // Non-google provider, no report button is showed.
+ // Test provider needs a valid update URL (mozilla for example) otherwise
+ // the updates inserting the test data will fail.
+ { topUrl: "http://fakeitisaphishingsite.org/phishing.html",
+ testUrl: "fakeitisaphishingsite.org/phishing.html",
+ list: "fake-phish-simple",
+ provider: "mozilla",
+ blockCreater: createBlockedPage,
+ },
+
+ // Iframe case:
+ // A top level page at
+ // http://mochi.test:8888/chrome/toolkit/components/url-classifier/tests/mochitest/report.sjs?action=create-blocked-iframe
+ // contains an iframe to http://phishing.example.com/test.html (blocked).
+
+ { topUrl: "http://mochi.test:8888/chrome/toolkit/components/url-classifier/tests/mochitest/report.sjs?action=create-blocked-iframe",
+ testUrl: "phishing.example.com/test.html",
+ list: "mochi2-phish-simple",
+ provider: "google4",
+ blockCreater: createBlockedIframe,
+ expectedReportUri: "http://phishing.example.com/test.html",
+ },
+
+ // Redirect case:
+ // A top level page at
+ // http://prefixexample.com/chrome/toolkit/components/url-classifier/tests/mochitest/report.sjs?action=create-blocked-redirect (blocked)
+ // will get redirected to
+ // https://mochi.test:8888/chrome/toolkit/components/url-classifier/tests/mochitest/report.sjs?action=create-blocked-redirect.
+ { topUrl: "http://prefixexample.com/chrome/toolkit/components/url-classifier/tests/mochitest/report.sjs?action=create-blocked-redirect",
+ testUrl: "prefixexample.com/chrome/toolkit/components/url-classifier/tests/mochitest/report.sjs?action=create-blocked-redirect",
+ list: "mochi3-phish-simple",
+ provider: "google4",
+ blockCreater: createBlockedPage,
+ expectedReportUri: "http://prefixexample.com/chrome/toolkit/components/url-classifier/tests/mochitest/report.sjs",
+ },
+
+];
+
+SpecialPowers.pushPrefEnv(
+ {"set": [["browser.safebrowsing.provider.google.reportPhishMistakeURL", BASE_URL + "action=reporturl&reporturl="],
+ ["browser.safebrowsing.provider.google4.reportPhishMistakeURL", BASE_URL + "action=reporturl&reporturl="],
+ ["browser.safebrowsing.phishing.enabled", true]]},
+ test);
+
+function test() {
+ (async function() {
+ await classifierHelper.waitForInit();
+
+ for (let testData of testDatas) {
+ await setupTestData(testData);
+ await testOnWindow(testData, function(browser) {
+ checkReportURL(browser, testData.expectedReportUri);
+ }, testData.blockCreater);
+
+ await classifierHelper._cleanup();
+ }
+
+ SimpleTest.finish();
+ })();
+}
+
+SimpleTest.waitForExplicitFinish();
+
+</script>
+
+</pre>
+<iframe id="testFrame" width="100%" height="100%" onload=""></iframe>
+</body>
+</html>
diff --git a/toolkit/components/url-classifier/tests/mochitest/test_safebrowsing_bug1272239.html b/toolkit/components/url-classifier/tests/mochitest/test_safebrowsing_bug1272239.html
new file mode 100644
index 0000000000..475145591d
--- /dev/null
+++ b/toolkit/components/url-classifier/tests/mochitest/test_safebrowsing_bug1272239.html
@@ -0,0 +1,91 @@
+<!DOCTYPE HTML>
+<html>
+<head>
+ <title>Bug 1272239 - Only tables with provider could register gethash url in listmanager.</title>
+ <script src="chrome://mochikit/content/tests/SimpleTest/SimpleTest.js"></script>
+ <link rel="stylesheet" type="text/css" href="chrome://mochikit/content/tests/SimpleTest/test.css">
+</head>
+
+<body>
+<p id="display"></p>
+<div id="content" style="display: none">
+</div>
+<pre id="test">
+
+<script class="testbody" type="text/javascript">
+
+var Cc = SpecialPowers.Cc;
+var Ci = SpecialPowers.Ci;
+
+// List all the tables
+const prefs = [
+ "urlclassifier.phishTable",
+ "urlclassifier.malwareTable",
+ "urlclassifier.downloadBlockTable",
+ "urlclassifier.downloadAllowTable",
+ "urlclassifier.trackingTable",
+ "urlclassifier.trackingWhitelistTable",
+ "urlclassifier.blockedTable",
+];
+
+// Get providers
+var providers = {};
+
+var branch = SpecialPowers.Services.prefs.getBranch("browser.safebrowsing.provider.");
+var children = branch.getChildList("");
+
+for (var child of children) {
+ var prefComponents = child.split(".");
+ var providerName = prefComponents[0];
+ providers[providerName] = {};
+}
+
+// Get lists from |browser.safebrowsing.provider.PROVIDER_NAME.lists| preference.
+var listsWithProvider = [];
+var listsToProvider = [];
+for (let provider in providers) {
+ let pref = "browser.safebrowsing.provider." + provider + ".lists";
+ let list = SpecialPowers.getCharPref(pref).split(",");
+
+ listsToProvider = listsToProvider.concat(list.map( () => { return provider; }));
+ listsWithProvider = listsWithProvider.concat(list);
+}
+
+// Get all the lists
+var lists = [];
+for (let pref of prefs) {
+ lists = lists.concat(SpecialPowers.getCharPref(pref).split(","));
+}
+
+var listmanager = Cc["@mozilla.org/url-classifier/listmanager;1"].
+ getService(Ci.nsIUrlListManager);
+
+let googleKey = SpecialPowers.Services.urlFormatter.formatURL("%GOOGLE_SAFEBROWSING_API_KEY%").trim();
+
+for (let list of lists) {
+ if (!list)
+ continue;
+
+ // For lists having a provider, it should have a correct gethash url
+ // For lists without a provider, for example, moztest-malware-simple, it should not
+ // have a gethash url.
+ var url = listmanager.getGethashUrl(list);
+ var index = listsWithProvider.indexOf(list);
+ if (index >= 0) {
+ let provider = listsToProvider[index];
+ let pref = "browser.safebrowsing.provider." + provider + ".gethashURL";
+ if ((provider == "google" || provider == "google4") &&
+ (!googleKey || googleKey == "no-google-safebrowsing-api-key")) {
+ is(url, "", "getHash url of " + list + " should be empty");
+ } else {
+ is(url, SpecialPowers.getCharPref(pref), list + " matches its gethash url");
+ }
+ } else {
+ is(url, "", list + " should not have a gethash url");
+ }
+}
+
+</script>
+</pre>
+</body>
+</html>
diff --git a/toolkit/components/url-classifier/tests/mochitest/test_socialtracking.html b/toolkit/components/url-classifier/tests/mochitest/test_socialtracking.html
new file mode 100644
index 0000000000..ef1114686f
--- /dev/null
+++ b/toolkit/components/url-classifier/tests/mochitest/test_socialtracking.html
@@ -0,0 +1,109 @@
+<!DOCTYPE HTML>
+<html>
+<head>
+ <title>Test the socialtracking classifier</title>
+ <script src="/tests/SimpleTest/SimpleTest.js"></script>
+ <link rel="stylesheet" type="text/css" href="/tests/SimpleTest/test.css" />
+</head>
+
+<body>
+<script class="testbody" type="text/javascript">
+
+var tests = [
+ // All disabled.
+ { config: [ false, false ], loadExpected: true },
+
+ // Just entitylisted.
+ { config: [ false, true ], loadExpected: true },
+
+ // Just blocklisted.
+ { config: [ true, false ], loadExpected: false },
+
+ // entitylist + blocklist: entitylist wins
+ { config: [ true, true ], loadExpected: true },
+];
+
+function prefValue(value, what) {
+ return value ? what : "";
+}
+
+async function runTest(test) {
+ await SpecialPowers.pushPrefEnv({set: [
+ [ "urlclassifier.features.socialtracking.blacklistHosts", prefValue(test.config[0], "example.com") ],
+ [ "urlclassifier.features.socialtracking.whitelistHosts", prefValue(test.config[1], "mochi.test,mochi.xorigin-test") ],
+ [ "urlclassifier.features.socialtracking.blacklistTables", prefValue(test.config[0], "mochitest1-track-simple") ],
+ [ "urlclassifier.features.socialtracking.whitelistTables", "" ],
+ [ "privacy.trackingprotection.enabled", false ],
+ [ "privacy.trackingprotection.annotate_channels", false ],
+ [ "privacy.trackingprotection.cryptomining.enabled", false ],
+ [ "privacy.trackingprotection.emailtracking.enabled", false ],
+ [ "privacy.trackingprotection.fingerprinting.enabled", false ],
+ [ "privacy.trackingprotection.socialtracking.enabled", true ],
+ ]});
+
+ info("Testing: " + JSON.stringify(test.config) + "\n");
+
+ // Let's load an image with a random query string, just to avoid network cache.
+ let result = await new Promise(resolve => {
+ let image = new Image();
+ image.src = "http://example.com/tests/toolkit/components/url-classifier/tests/mochitest/raptor.jpg?" + Math.random();
+ image.onload = _ => resolve(true);
+ image.onerror = _ => resolve(false);
+ });
+
+ is(result, test.loadExpected, "The loading happened correctly");
+
+ // Let's load an image with a random query string, just to avoid network cache.
+ result = await new Promise(resolve => {
+ let image = new Image();
+ image.src = "http://tracking.example.org/tests/toolkit/components/url-classifier/tests/mochitest/raptor.jpg?" + Math.random();
+ image.onload = _ => resolve(true);
+ image.onerror = _ => resolve(false);
+ });
+
+ is(result, test.loadExpected, "The loading happened correctly (by table)");
+}
+
+async function runTests() {
+ let chromeScript = SpecialPowers.loadChromeScript(_ => {
+ /* eslint-env mozilla/chrome-script */
+ const {UrlClassifierTestUtils} = ChromeUtils.importESModule(
+ "resource://testing-common/UrlClassifierTestUtils.sys.mjs"
+ );
+
+ addMessageListener("loadTrackers", __ => {
+ UrlClassifierTestUtils.addTestTrackers().then(___ => {
+ sendAsyncMessage("trackersLoaded");
+ });
+ });
+
+ addMessageListener("unloadTrackers", __ => {
+ UrlClassifierTestUtils.cleanupTestTrackers();
+ sendAsyncMessage("trackersUnloaded");
+ });
+ });
+
+ await new Promise(resolve => {
+ chromeScript.addMessageListener("trackersLoaded", resolve);
+ chromeScript.sendAsyncMessage("loadTrackers");
+ });
+
+ for (let test in tests) {
+ await runTest(tests[test]);
+ }
+
+ await new Promise(resolve => {
+ chromeScript.addMessageListener("trackersUnloaded", resolve);
+ chromeScript.sendAsyncMessage("unloadTrackers");
+ });
+
+ chromeScript.destroy();
+ SimpleTest.finish();
+}
+
+SimpleTest.waitForExplicitFinish();
+runTests();
+
+</script>
+</body>
+</html>
diff --git a/toolkit/components/url-classifier/tests/mochitest/test_socialtracking_annotate.html b/toolkit/components/url-classifier/tests/mochitest/test_socialtracking_annotate.html
new file mode 100644
index 0000000000..f12dcf11ec
--- /dev/null
+++ b/toolkit/components/url-classifier/tests/mochitest/test_socialtracking_annotate.html
@@ -0,0 +1,32 @@
+<!DOCTYPE HTML>
+<html>
+<head>
+ <title>Test the relationship between annotation vs blocking - socialtracking</title>
+ <script src="/tests/SimpleTest/SimpleTest.js"></script>
+ <script type="text/javascript" src="features.js"></script>
+ <link rel="stylesheet" type="text/css" href="/tests/SimpleTest/test.css" />
+</head>
+
+<body>
+<script class="testbody" type="text/javascript">
+
+runTests(SpecialPowers.Ci.nsIClassifiedChannel.CLASSIFIED_SOCIALTRACKING,
+ [
+ ["privacy.socialtracking.block_cookies.enabled", false],
+ ["privacy.trackingprotection.enabled", false],
+ ["privacy.trackingprotection.annotate_channels", false],
+ ["urlclassifier.features.fingerprinting.annotate.blacklistHosts", ""],
+ ["urlclassifier.features.fingerprinting.annotate.blacklistTables", ""],
+ ["privacy.trackingprotection.fingerprinting.enabled", false],
+ ["urlclassifier.features.cryptomining.annotate.blacklistHosts", ""],
+ ["urlclassifier.features.cryptomining.annotate.blacklistTables", ""],
+ ["privacy.trackingprotection.cryptomining.enabled", false],
+ ["privacy.trackingprotection.socialtracking.enabled", true],
+ ["privacy.trackingprotection.emailtracking.enabled", false],
+ ],
+ false /* a tracking resource */);
+SimpleTest.waitForExplicitFinish();
+
+</script>
+</body>
+</html>
diff --git a/toolkit/components/url-classifier/tests/mochitest/test_threathit_report.html b/toolkit/components/url-classifier/tests/mochitest/test_threathit_report.html
new file mode 100644
index 0000000000..341d26fa3f
--- /dev/null
+++ b/toolkit/components/url-classifier/tests/mochitest/test_threathit_report.html
@@ -0,0 +1,235 @@
+<!DOCTYPE HTML>
+<!-- Any copyright is dedicated to the Public Domain.
+ http://creativecommons.org/publicdomain/zero/1.0/ -->
+<html>
+<head>
+ <title>Test threathit repoty </title>
+ <script src="chrome://mochikit/content/tests/SimpleTest/SimpleTest.js"></script>
+ <script type="text/javascript" src="classifierHelper.js"></script>
+ <link rel="stylesheet" type="text/css" href="chrome://mochikit/content/tests/SimpleTest/test.css">
+</head>
+
+<body>
+<p id="display"></p>
+<div id="content" style="display: none">
+</div>
+<pre id="test">
+
+<script src="head.js"></script>
+<script class="testbody" type="text/javascript">
+const {BrowserTestUtils} = ChromeUtils.importESModule(
+ "resource://testing-common/BrowserTestUtils.sys.mjs"
+);
+
+var mainWindow = window.browsingContext.topChromeWindow;
+
+var listmanager = Cc["@mozilla.org/url-classifier/listmanager;1"].
+ getService(Ci.nsIUrlListManager);
+const SJS = "mochi.test:8888/chrome/toolkit/components/url-classifier/tests/mochitest/threathit.sjs";
+
+function hash(str) {
+ let hasher = Cc["@mozilla.org/security/hash;1"]
+ .createInstance(Ci.nsICryptoHash);
+
+ let data = new TextEncoder().encode(str);
+ hasher.init(hasher.SHA256);
+ hasher.update(data, data.length);
+
+ return hasher.finish(false);
+}
+
+var testDatas = [
+ { url: "itisaphishingsite1.org/phishing.html",
+ list: "test-phish-proto",
+ provider: "test",
+ // The base64 of binary protobuf representation of response:
+ //
+ // [
+ // {
+ // 'threat_type': 2, // SOCIAL_ENGINEERING_PUBLIC
+ // 'response_type': 2, // FULL_UPDATE
+ // 'new_client_state': 'sta\x0te', // NEW_CLIENT_STATE
+ // 'additions': { 'compression_type': RAW,
+ // 'prefix_size': 1,
+ // 'raw_hashes': "xxxx"} // hash prefix of url itisaphishingsite.org/phishing.html
+ // 'minimumWaitDuration': "8.1s",
+ // }
+ // ]
+ //
+ updateProtobuf: "ChoIAiACKgwIARIICAQSBM9UdYs6BnN0YQB0ZRIECAwQCg==",
+ // The base64 of binary protobuf representation of response:
+ // {
+ // "matches": [
+ // {
+ // "threat_type": 2, // SOCIAL_ENGINEERING_PUBLIC
+ // "threat": {
+ // "hash": string,
+ // },
+ // "cacheDuration": "8.1",
+ // }
+ // ],
+ // "minimumWaitDuration": 12.0..1,
+ // "negativeCacheDuration": 12.0..1,
+ // }
+ fullhashProtobuf: "CiwIAhoiCiDPVHWLptJSc/UYiabk1/wo5OkJqbggiylVKISK28bfeSoECAwQChIECAwQChoECAwQCg==",
+ },
+];
+
+function addDataV4ToServer(list, type, data) {
+ return new Promise(function(resolve, reject) {
+ var xhr = new XMLHttpRequest;
+ let params = new URLSearchParams();
+ params.append("action", "store");
+ params.append("list", list);
+ params.append("type", type);
+ params.append("data", data);
+
+ xhr.open("PUT", "http://" + SJS + "?" + params.toString(), true);
+ xhr.setRequestHeader("Content-Type", "text/plain");
+ xhr.onreadystatechange = function() {
+ if (this.readyState == this.DONE) {
+ resolve();
+ }
+ };
+ xhr.send();
+ });
+}
+/**
+ * Grabs the results via XHR
+ */
+function checkResults(aTestdata, aExpected) {
+ let xhr = new XMLHttpRequest();
+ xhr.responseType = "text";
+ xhr.onload = function() {
+ is(aExpected, xhr.response, "Correct report request");
+ SimpleTest.finish();
+ };
+ xhr.onerror = function() {
+ ok(false, "Can't get results from server.");
+ SimpleTest.finish();
+ };
+ let params = new URLSearchParams();
+ params.append("action", "getreport");
+ params.append("list", aTestdata.list);
+ let url = "http://" + SJS + "?" + params.toString();
+
+ xhr.open("GET", url, true);
+ xhr.setRequestHeader("Content-Type", "text/plain");
+ xhr.send();
+}
+
+function waitForUpdate(data) {
+ listmanager.checkForUpdates(data.updateUrl);
+ return new Promise(resolve => {
+ Services.obs.addObserver(function observer(aSubject, aTopic) {
+ Services.obs.removeObserver(observer, aTopic);
+ resolve();
+ }, "safebrowsing-update-finished");
+ });
+}
+
+function addUpdateDataV4ToServer(list, data) {
+ return addDataV4ToServer(list, "update", data);
+}
+
+function addFullhashV4DataToServer(list, data) {
+ return addDataV4ToServer(list, "fullhash", data);
+}
+
+function setupTestData(data) {
+ let updateParams = new URLSearchParams();
+ updateParams.append("action", "get");
+ updateParams.append("list", data.list);
+ updateParams.append("type", "update");
+ data.updateUrl = "http://" + SJS + "?" + updateParams.toString();
+
+ let gethashParams = new URLSearchParams();
+ gethashParams.append("action", "get");
+ gethashParams.append("list", data.list);
+ gethashParams.append("type", "fullhash");
+ data.gethashUrl = "http://" + SJS + "?" + gethashParams.toString();
+
+ listmanager.registerTable(data.list,
+ data.provider,
+ data.updateUrl,
+ data.gethashUrl);
+
+ let promises = [];
+ let activeTablePref = "urlclassifier.phishTable";
+ let activeTable = SpecialPowers.getCharPref(activeTablePref);
+ activeTable += "," + data.list;
+
+ let reportPref = "browser.safebrowsing.provider." + data.provider + ".dataSharingURL";
+ let reportParams = new URLSearchParams();
+ reportParams.append("action", "report");
+ reportParams.append("list", data.list);
+ data.reportUrl = "http://" + SJS + "?" + reportParams.toString();
+
+ let reportEnabledPref = "browser.safebrowsing.provider." + data.provider + ".dataSharing.enabled";
+
+ promises.push(pushPrefs([reportPref, data.reportUrl]));
+ promises.push(pushPrefs([reportEnabledPref, true]));
+ promises.push(pushPrefs([activeTablePref, activeTable]));
+ promises.push(addUpdateDataV4ToServer(data.list, data.updateProtobuf));
+ promises.push(addFullhashV4DataToServer(data.list, data.fullhashProtobuf));
+ return Promise.all(promises);
+}
+
+function testOnWindow(aTestData) {
+ return new Promise(resolve => {
+ let win = mainWindow.OpenBrowserWindow();
+
+ (async function() {
+ await new Promise(rs => whenDelayedStartupFinished(win, rs));
+
+ let expected;
+ let browser = win.gBrowser.selectedBrowser;
+ let progressListener = {
+ onContentBlockingEvent(aWebProgress, aRequest, aEvent) {
+ expected = aTestData.reportUrl;
+ },
+ QueryInterface: ChromeUtils.generateQI(["nsISupportsWeakReference"]),
+ };
+ win.gBrowser.addProgressListener(progressListener, Ci.nsIWebProgress.NOTIFY_CONTENT_BLOCKING);
+
+ BrowserTestUtils.startLoadingURIString(browser, aTestData.url);
+ await BrowserTestUtils.browserLoaded(
+ browser,
+ false,
+ `http://${aTestData.url}`,
+ true
+ );
+ checkResults(aTestData, expected);
+ win.close();
+ resolve();
+ })();
+ });
+}
+SpecialPowers.pushPrefEnv(
+ {"set": [
+ ["browser.safebrowsing.phishing.enabled", true],
+ ["dom.testing.sync-content-blocking-notifications", true],
+ ]},
+ test);
+
+function test() {
+ (async function() {
+ await classifierHelper.waitForInit();
+
+ for (let testData of testDatas) {
+ await setupTestData(testData);
+ await waitForUpdate(testData);
+ await testOnWindow(testData);
+ await classifierHelper._cleanup();
+ }
+ })();
+}
+
+SimpleTest.waitForExplicitFinish();
+
+</script>
+
+</pre>
+<iframe id="testFrame" width="100%" height="100%" onload=""></iframe>
+</body>
+</html>
diff --git a/toolkit/components/url-classifier/tests/mochitest/test_trackingprotection_bug1157081.html b/toolkit/components/url-classifier/tests/mochitest/test_trackingprotection_bug1157081.html
new file mode 100644
index 0000000000..098d6098d9
--- /dev/null
+++ b/toolkit/components/url-classifier/tests/mochitest/test_trackingprotection_bug1157081.html
@@ -0,0 +1,100 @@
+<!DOCTYPE HTML>
+<!-- Any copyright is dedicated to the Public Domain.
+ http://creativecommons.org/publicdomain/zero/1.0/ -->
+<html>
+<head>
+ <title>Test Tracking Protection with and without Safe Browsing (Bug #1157081)</title>
+ <script src="chrome://mochikit/content/tests/SimpleTest/SimpleTest.js"></script>
+ <link rel="stylesheet" type="text/css" href="chrome://mochikit/content/tests/SimpleTest/test.css">
+</head>
+
+<body>
+<p id="display"></p>
+<div id="content" style="display: none">
+</div>
+<pre id="test">
+
+<script class="testbody" type="text/javascript">
+
+var mainWindow = window.browsingContext.topChromeWindow;
+var contentPage = "http://mochi.test:8888/chrome/toolkit/components/url-classifier/tests/mochitest/classifiedAnnotatedPBFrame.html";
+
+const {UrlClassifierTestUtils} = ChromeUtils.importESModule(
+ "resource://testing-common/UrlClassifierTestUtils.sys.mjs"
+);
+const {TestUtils} = ChromeUtils.importESModule(
+ "resource://testing-common/TestUtils.sys.mjs"
+);
+
+function testOnWindow(aCallback) {
+ var win = mainWindow.OpenBrowserWindow();
+ win.addEventListener("load", function() {
+ TestUtils.topicObserved("browser-delayed-startup-finished",
+ subject => subject == win).then(() => {
+ win.addEventListener("DOMContentLoaded", function onInnerLoad() {
+ if (win.content.location.href != contentPage) {
+ win.gBrowser.loadURI(Services.io.newURI(contentPage), {
+ triggeringPrincipal: Services.scriptSecurityManager.createNullPrincipal({}),
+ });
+ return;
+ }
+ win.removeEventListener("DOMContentLoaded", onInnerLoad, true);
+
+ win.content.addEventListener("load", function innerLoad2() {
+ win.content.removeEventListener("load", innerLoad2);
+ SimpleTest.executeSoon(function() { aCallback(win); });
+ }, false, true);
+ }, true);
+ SimpleTest.executeSoon(function() {
+ win.gBrowser.loadURI(Services.io.newURI(contentPage), {
+ triggeringPrincipal: Services.scriptSecurityManager.createNullPrincipal({}),
+ });
+ });
+ });
+ }, {capture: true, once: true});
+}
+
+var badids = [
+ "badscript",
+];
+
+function checkLoads(aWindow, aBlocked) {
+ var win = aWindow.content;
+ is(win.document.getElementById("badscript").dataset.touched, aBlocked ? "no" : "yes", "Should not load tracking javascript");
+}
+
+SpecialPowers.pushPrefEnv(
+ {"set": [["urlclassifier.trackingTable", "moztest-track-simple"],
+ ["privacy.trackingprotection.enabled", true],
+ ["browser.safebrowsing.malware.enabled", false],
+ ["browser.safebrowsing.phishing.enabled", false],
+ ["channelclassifier.allowlist_example", true]]},
+ test);
+
+function test() {
+ SimpleTest.registerCleanupFunction(UrlClassifierTestUtils.cleanupTestTrackers);
+ UrlClassifierTestUtils.addTestTrackers().then(() => {
+ // Safe Browsing turned OFF, tracking protection should work nevertheless
+ testOnWindow(function(aWindow) {
+ checkLoads(aWindow, true);
+ aWindow.close();
+
+ // Safe Browsing turned ON, tracking protection should still work
+ SpecialPowers.setBoolPref("browser.safebrowsing.phishing.enabled", true);
+ testOnWindow(function(aWindow1) {
+ checkLoads(aWindow1, true);
+ aWindow1.close();
+ SimpleTest.finish();
+ });
+ });
+ });
+}
+
+SimpleTest.waitForExplicitFinish();
+
+</script>
+
+</pre>
+<iframe id="testFrame" width="100%" height="100%" onload=""></iframe>
+</body>
+</html>
diff --git a/toolkit/components/url-classifier/tests/mochitest/test_trackingprotection_bug1312515.html b/toolkit/components/url-classifier/tests/mochitest/test_trackingprotection_bug1312515.html
new file mode 100644
index 0000000000..31c69ac293
--- /dev/null
+++ b/toolkit/components/url-classifier/tests/mochitest/test_trackingprotection_bug1312515.html
@@ -0,0 +1,164 @@
+<!DOCTYPE HTML>
+<!-- Any copyright is dedicated to the Public Domain.
+ http://creativecommons.org/publicdomain/zero/1.0/ -->
+<html>
+<head>
+ <title>Test Bug 1312515</title>
+ <script src="chrome://mochikit/content/tests/SimpleTest/SimpleTest.js"></script>
+ <link rel="stylesheet" type="text/css" href="chrome://mochikit/content/tests/SimpleTest/test.css">
+</head>
+
+<body>
+
+<p><b>To see more of what is happening: <code>export MOZ_LOG=nsChannelClassifier:3</code></b></p>
+
+<p id="display"></p>
+<div id="content" style="display: none">
+</div>
+<pre id="test">
+
+<script class="testbody" type="text/javascript">
+
+var mainWindow = window.browsingContext.topChromeWindow;
+var contentPage = "http://www.itisatrap.org/chrome/toolkit/components/url-classifier/tests/mochitest/trackingRequest.html";
+
+const {UrlClassifierTestUtils} = ChromeUtils.importESModule(
+ "resource://testing-common/UrlClassifierTestUtils.sys.mjs"
+);
+const {BrowserTestUtils} = ChromeUtils.importESModule(
+ "resource://testing-common/BrowserTestUtils.sys.mjs"
+);
+const {TestUtils} = ChromeUtils.importESModule(
+ "resource://testing-common/TestUtils.sys.mjs"
+);
+
+function testOnWindow(aPrivate, aCallback) {
+ var win = mainWindow.OpenBrowserWindow({private: aPrivate});
+ win.addEventListener("load", function() {
+ TestUtils.topicObserved("browser-delayed-startup-finished",
+ subject => subject == win).then(() => {
+ win.addEventListener("DOMContentLoaded", function onInnerLoad() {
+ if (win.content.location.href != contentPage) {
+ BrowserTestUtils.startLoadingURIString(win.gBrowser, contentPage);
+ return;
+ }
+ win.removeEventListener("DOMContentLoaded", onInnerLoad, true);
+
+ win.content.addEventListener("load", function innerLoad2() {
+ win.content.removeEventListener("load", innerLoad2);
+ SimpleTest.executeSoon(function() { aCallback(win); });
+ }, false, true);
+ }, true);
+ SimpleTest.executeSoon(function () {
+ BrowserTestUtils.startLoadingURIString(win.gBrowser, contentPage);
+ });
+ });
+ }, {capture: true, once: true});
+}
+
+const topic = "http-on-before-connect";
+var testUrl;
+var testWindow;
+var resolve;
+
+function checkLowestPriority(aSubject) {
+ checkPriority(aSubject, checkLowestPriority, Ci.nsISupportsPriority.PRIORITY_LOWEST, "Priority should be lowest.");
+}
+
+function checkNormalPriority(aSubject) {
+ checkPriority(aSubject, checkNormalPriority, Ci.nsISupportsPriority.PRIORITY_NORMAL, "Priority should be normal.");
+}
+
+function checkPriority(aSubject, aCallback, aPriority, aMessage) {
+ var channel = aSubject.QueryInterface(Ci.nsIChannel);
+ info("Channel classified: " + channel.name);
+ if (channel.name !== testUrl) {
+ return;
+ }
+
+ SpecialPowers.removeObserver(aCallback, topic);
+
+ var p = aSubject.QueryInterface(Ci.nsISupportsPriority);
+ is(p.priority, aPriority, aMessage);
+
+ info("Resolving promise for " + channel.name);
+ resolve();
+}
+
+function testXHR1() {
+ return new Promise(function(aResolve, aReject) {
+ testUrl = "http://tracking.example.com/";
+ info("Not blocklisted: " + testUrl);
+ resolve = aResolve;
+ SpecialPowers.addObserver(checkNormalPriority, topic);
+ testWindow.content.postMessage({type: "doXHR", url: testUrl}, "*");
+ });
+}
+
+function testXHR2() {
+ return new Promise(function(aResolve, aReject) {
+ testUrl = "http://trackertest.org/";
+ info("Blocklisted and not entitylisted: " + testUrl);
+ resolve = aResolve;
+ SpecialPowers.addObserver(checkLowestPriority, topic);
+ testWindow.content.postMessage({type: "doXHR", url: testUrl}, "*");
+ });
+}
+
+function testFetch1() {
+ return new Promise(function(aResolve, aReject) {
+ testUrl = "http://itisatracker.org/"; // only entitylisted in TP, not for annotations
+ info("Blocklisted and not entitylisted: " + testUrl);
+ resolve = aResolve;
+ SpecialPowers.addObserver(checkLowestPriority, topic);
+ testWindow.content.postMessage({type: "doFetch", url: testUrl}, "*");
+ });
+}
+
+function testFetch2() {
+ return new Promise(function(aResolve, aReject) {
+ testUrl = "http://tracking.example.org/"; // only entitylisted for annotations, not in TP
+ info("Blocklisted but also entitylisted: " + testUrl);
+ resolve = aResolve;
+ SpecialPowers.addObserver(checkNormalPriority, topic);
+ testWindow.content.postMessage({type: "doFetch", url: testUrl}, "*");
+ });
+}
+
+function endTest() {
+ info("Finishing up...");
+ testWindow.close();
+ testWindow = null;
+ SimpleTest.finish();
+}
+
+async function test() {
+ await SpecialPowers.pushPrefEnv(
+ {"set": [["network.http.tailing.enabled", false],
+ ["privacy.trackingprotection.enabled", false],
+ ["privacy.trackingprotection.annotate_channels", true],
+ ["privacy.trackingprotection.lower_network_priority", true],
+ ["dom.security.https_first", false]]});
+ await UrlClassifierTestUtils.addTestTrackers();
+ testOnWindow(false, async function(aWindow) {
+ testWindow = aWindow;
+ await testXHR1();
+ await testXHR2();
+ await testFetch1();
+ await testFetch2();
+ await endTest();
+ });
+}
+
+SimpleTest.waitForExplicitFinish();
+SimpleTest.registerCleanupFunction(function() {
+ info("Cleaning up prefs...");
+ UrlClassifierTestUtils.cleanupTestTrackers();
+});
+test();
+
+</script>
+
+</pre>
+</body>
+</html>
diff --git a/toolkit/components/url-classifier/tests/mochitest/test_trackingprotection_bug1580416.html b/toolkit/components/url-classifier/tests/mochitest/test_trackingprotection_bug1580416.html
new file mode 100644
index 0000000000..75d69b2596
--- /dev/null
+++ b/toolkit/components/url-classifier/tests/mochitest/test_trackingprotection_bug1580416.html
@@ -0,0 +1,102 @@
+<!DOCTYPE HTML>
+<!-- Any copyright is dedicated to the Public Domain.
+ http://creativecommons.org/publicdomain/zero/1.0/ -->
+<html>
+<head>
+ <title>Test Tracking Protection in Private Browsing mode</title>
+ <script src="chrome://mochikit/content/tests/SimpleTest/SimpleTest.js"></script>
+ <script type="text/javascript" src="classifierHelper.js"></script>
+ <link rel="stylesheet" type="text/css" href="chrome://mochikit/content/tests/SimpleTest/test.css">
+</head>
+
+<body>
+<p id="display"></p>
+<div id="content" style="display: none">
+</div>
+<pre id="test">
+
+<script class="testbody" type="text/javascript">
+
+var mainWindow = window.window.browsingContext.topChromeWindow;
+var contentPage1 = "http://www.example.com/chrome/toolkit/components/url-classifier/tests/mochitest/bug_1580416.html";
+
+const {BrowserTestUtils} = ChromeUtils.importESModule(
+ "resource://testing-common/BrowserTestUtils.sys.mjs"
+);
+const {TestUtils} = ChromeUtils.importESModule(
+ "resource://testing-common/TestUtils.sys.mjs"
+);
+
+function testOnWindow(contentPage) {
+ return new Promise((resolve, reject) => {
+ var win = mainWindow.OpenBrowserWindow();
+ win.addEventListener("load", function() {
+ TestUtils.topicObserved("browser-delayed-startup-finished",
+ subject => subject == win).then(() => {
+ win.addEventListener("DOMContentLoaded", function onInnerLoad() {
+ if (win.content.location.href != contentPage) {
+ BrowserTestUtils.startLoadingURIString(win.gBrowser, contentPage);
+ return;
+ }
+ win.removeEventListener("DOMContentLoaded", onInnerLoad, true);
+
+ win.content.addEventListener("load", function innerLoad2() {
+ win.content.removeEventListener("load", innerLoad2);
+ SimpleTest.executeSoon(function() {
+ resolve(win);
+ });
+ }, false, true);
+ }, true);
+ SimpleTest.executeSoon(function() {
+ BrowserTestUtils.startLoadingURIString(win.gBrowser, contentPage);
+ });
+ });
+ }, {capture: true, once: true});
+ });
+}
+
+var testData = [
+ { url: "apps.fbsbx.com/",
+ db: "test-track-simple",
+ },
+ { url: "www.example.com/?resource=apps.fbsbx.com",
+ db: "test-trackwhite-simple",
+ },
+];
+
+function checkLoads(aWindow, aWhitelisted) {
+ var win = aWindow.content;
+
+ is(win.document.getElementById("goodscript").dataset.touched, aWhitelisted ? "yes" : "no", "Should load whitelisted tracking javascript");
+}
+
+SpecialPowers.pushPrefEnv(
+ // Disable STS preloadlist because apps.fbsbx.com is in the list.
+ {"set": [["privacy.trackingprotection.enabled", true],
+ ["urlclassifier.trackingTable", "test-track-simple"],
+ ["urlclassifier.trackingWhitelistTable", "test-trackwhite-simple"],
+ ["dom.security.https_first", false],
+ ["network.stricttransportsecurity.preloadlist", false]]},
+ test);
+
+async function test() {
+ await classifierHelper.waitForInit();
+ await classifierHelper.addUrlToDB(testData);
+
+ // Load the test from a URL on the whitelist
+ await testOnWindow(contentPage1).then(function(aWindow) {
+ checkLoads(aWindow, true);
+ aWindow.close();
+ });
+
+ SimpleTest.finish();
+}
+
+SimpleTest.waitForExplicitFinish();
+
+</script>
+
+</pre>
+<iframe id="testFrame" width="100%" height="100%" onload=""></iframe>
+</body>
+</html>
diff --git a/toolkit/components/url-classifier/tests/mochitest/test_trackingprotection_whitelist.html b/toolkit/components/url-classifier/tests/mochitest/test_trackingprotection_whitelist.html
new file mode 100644
index 0000000000..69ca337c33
--- /dev/null
+++ b/toolkit/components/url-classifier/tests/mochitest/test_trackingprotection_whitelist.html
@@ -0,0 +1,165 @@
+<!DOCTYPE HTML>
+<!-- Any copyright is dedicated to the Public Domain.
+ http://creativecommons.org/publicdomain/zero/1.0/ -->
+<html>
+<head>
+ <title>Test Tracking Protection in Private Browsing mode</title>
+ <script src="chrome://mochikit/content/tests/SimpleTest/SimpleTest.js"></script>
+ <link rel="stylesheet" type="text/css" href="chrome://mochikit/content/tests/SimpleTest/test.css">
+</head>
+
+<body>
+<p id="display"></p>
+<div id="content" style="display: none">
+</div>
+<pre id="test">
+
+<script class="testbody" type="text/javascript">
+
+var mainWindow = window.browsingContext.topChromeWindow;
+var contentPage1 = "http://www.itisatrap.org/tests/toolkit/components/url-classifier/tests/mochitest/whitelistFrame.html";
+var contentPage2 = "http://example.com/tests/toolkit/components/url-classifier/tests/mochitest/whitelistFrame.html";
+
+const {UrlClassifierTestUtils} = ChromeUtils.importESModule(
+ "resource://testing-common/UrlClassifierTestUtils.sys.mjs"
+);
+const {BrowserTestUtils} = ChromeUtils.importESModule(
+ "resource://testing-common/BrowserTestUtils.sys.mjs"
+);
+const {TestUtils} = ChromeUtils.importESModule(
+ "resource://testing-common/TestUtils.sys.mjs"
+);
+
+function testOnWindow(contentPage) {
+ return new Promise((resolve, reject) => {
+ var win = mainWindow.OpenBrowserWindow();
+ win.addEventListener("load", function() {
+ TestUtils.topicObserved("browser-delayed-startup-finished",
+ subject => subject == win).then(() => {
+ win.addEventListener("DOMContentLoaded", function onInnerLoad() {
+ if (win.content.location.href != contentPage) {
+ BrowserTestUtils.startLoadingURIString(win.gBrowser, contentPage);
+ return;
+ }
+ win.removeEventListener("DOMContentLoaded", onInnerLoad, true);
+
+ win.content.addEventListener("load", function innerLoad2() {
+ win.content.removeEventListener("load", innerLoad2);
+ SimpleTest.executeSoon(function() {
+ resolve(win);
+ });
+ }, false, true);
+ }, true);
+ SimpleTest.executeSoon(function() {
+ BrowserTestUtils.startLoadingURIString(win.gBrowser, contentPage);
+ });
+ });
+ }, {capture: true, once: true});
+ });
+}
+
+var alwaysbadids = [
+ "badscript",
+];
+
+function checkLoads(aWindow, aWhitelisted, tpEnabled) {
+ var win = aWindow.content;
+ if (!tpEnabled) {
+ is(win.document.getElementById("badscript").dataset.touched, "yes", "Should load tracking javascript");
+ is(win.document.blockedNodeByClassifierCount, 0, "Should not identify any tracking elements");
+ return;
+ }
+
+ is(win.document.getElementById("badscript").dataset.touched, "no", "Should not load tracking javascript");
+ is(win.document.getElementById("goodscript").dataset.touched, aWhitelisted ? "yes" : "no", "Should load whitelisted tracking javascript");
+
+ var badids = alwaysbadids.slice();
+ if (!aWhitelisted) {
+ badids.push("goodscript");
+ }
+ is(win.document.blockedNodeByClassifierCount, badids.length, "Should identify all tracking elements");
+
+ var blockedNodes = win.document.blockedNodesByClassifier;
+
+ // Make sure that every node in blockedNodes exists in the tree
+ // (that may not always be the case but do not expect any nodes to disappear
+ // from the tree here)
+ var allNodeMatch = true;
+ for (let i = 0; i < blockedNodes.length; i++) {
+ let nodeMatch = false;
+ for (let j = 0; j < badids.length && !nodeMatch; j++) {
+ nodeMatch = nodeMatch ||
+ (blockedNodes[i] == win.document.getElementById(badids[j]));
+ }
+
+ allNodeMatch = allNodeMatch && nodeMatch;
+ }
+ is(allNodeMatch, true, "All annotated nodes are expected in the tree");
+
+ // Make sure that every node with a badid (see badids) is found in the
+ // blockedNodes. This tells us if we are neglecting to annotate
+ // some nodes
+ allNodeMatch = true;
+ for (let j = 0; j < badids.length; j++) {
+ let nodeMatch = false;
+ for (let i = 0; i < blockedNodes.length && !nodeMatch; i++) {
+ nodeMatch = nodeMatch ||
+ (blockedNodes[i] == win.document.getElementById(badids[j]));
+ }
+
+ allNodeMatch = allNodeMatch && nodeMatch;
+ }
+ is(allNodeMatch, true, "All tracking nodes are expected to be annotated as such");
+}
+
+SpecialPowers.pushPrefEnv(
+ {"set": [["privacy.trackingprotection.enabled", true],
+ ["privacy.trackingprotection.testing.report_blocked_node", true],
+ ["dom.security.https_first", false],
+ ["channelclassifier.allowlist_example", true]]},
+ test);
+
+async function test() {
+ SimpleTest.registerCleanupFunction(UrlClassifierTestUtils.cleanupTestTrackers);
+ await UrlClassifierTestUtils.addTestTrackers();
+
+ // Load the test from a URL that's NOT on the whitelist with tracking protection disabled
+ await SpecialPowers.setBoolPref("privacy.trackingprotection.enabled", false);
+ await testOnWindow(contentPage2).then(function(aWindow) {
+ checkLoads(aWindow, false, false);
+ aWindow.close();
+ });
+ await SpecialPowers.setBoolPref("privacy.trackingprotection.enabled", true);
+
+ // Load the test from a URL that's NOT on the whitelist
+ await testOnWindow(contentPage2).then(function(aWindow) {
+ checkLoads(aWindow, false, true);
+ aWindow.close();
+ });
+
+ // Load the test from a URL on the whitelist
+ await testOnWindow(contentPage1).then(function(aWindow) {
+ checkLoads(aWindow, true, true);
+ aWindow.close();
+ });
+
+ // Load the test from a URL on the whitelist but without the whitelist
+ await SpecialPowers.setCharPref("urlclassifier.trackingWhitelistTable", "");
+ await testOnWindow(contentPage1).then(function(aWindow) {
+ checkLoads(aWindow, false, true);
+ aWindow.close();
+ });
+ await SpecialPowers.clearUserPref("urlclassifier.trackingWhitelistTable");
+ await SpecialPowers.clearUserPref("privacy.trackingprotection.enabled");
+
+ SimpleTest.finish();
+}
+
+SimpleTest.waitForExplicitFinish();
+
+</script>
+
+</pre>
+<iframe id="testFrame" width="100%" height="100%" onload=""></iframe>
+</body>
+</html>
diff --git a/toolkit/components/url-classifier/tests/mochitest/threathit.sjs b/toolkit/components/url-classifier/tests/mochitest/threathit.sjs
new file mode 100644
index 0000000000..a083a2e247
--- /dev/null
+++ b/toolkit/components/url-classifier/tests/mochitest/threathit.sjs
@@ -0,0 +1,47 @@
+/* Any copyright is dedicated to the Public Domain.
+ * http://creativecommons.org/publicdomain/zero/1.0/ */
+
+const CC = Components.Constructor;
+const BinaryInputStream = CC(
+ "@mozilla.org/binaryinputstream;1",
+ "nsIBinaryInputStream",
+ "setInputStream"
+);
+
+function handleRequest(request, response) {
+ let params = new URLSearchParams(request.queryString);
+ var action = params.get("action");
+
+ var responseBody;
+
+ // Store data in the server side.
+ if (action == "store") {
+ // In the server side we will store:
+ // All the full hashes or update for a given list
+ let state = params.get("list") + params.get("type");
+ let dataStr = params.get("data");
+ setState(state, dataStr);
+ } else if (action == "get") {
+ let state = params.get("list") + params.get("type");
+ responseBody = atob(getState(state));
+ response.setStatusLine(request.httpVersion, 200, "OK");
+ response.bodyOutputStream.write(responseBody, responseBody.length);
+ } else if (action == "report") {
+ let state = params.get("list") + "report";
+ let requestUrl =
+ request.scheme +
+ "://" +
+ request.host +
+ ":" +
+ request.port +
+ request.path +
+ "?" +
+ request.queryString;
+ setState(state, requestUrl);
+ } else if (action == "getreport") {
+ let state = params.get("list") + "report";
+ responseBody = getState(state);
+ response.setHeader("Content-Type", "text/plain", false);
+ response.write(responseBody);
+ }
+}
diff --git a/toolkit/components/url-classifier/tests/mochitest/track.html b/toolkit/components/url-classifier/tests/mochitest/track.html
new file mode 100644
index 0000000000..8785e7c5b1
--- /dev/null
+++ b/toolkit/components/url-classifier/tests/mochitest/track.html
@@ -0,0 +1,7 @@
+<html>
+ <head>
+ </head>
+ <body>
+ <h1>Tracking Works!</h1>
+ </body>
+</html>
diff --git a/toolkit/components/url-classifier/tests/mochitest/trackerFrame.html b/toolkit/components/url-classifier/tests/mochitest/trackerFrame.html
new file mode 100644
index 0000000000..73409b5cda
--- /dev/null
+++ b/toolkit/components/url-classifier/tests/mochitest/trackerFrame.html
@@ -0,0 +1,91 @@
+<!DOCTYPE HTML>
+<!-- Any copyright is dedicated to the Public Domain.
+ http://creativecommons.org/publicdomain/zero/1.0/ -->
+<html>
+<head>
+<meta charset="utf-8">
+<title></title>
+</head>
+<body>
+<div id="content" style="display: none">
+
+<img src="https://itisatracker.org/tests/toolkit/components/url-classifier/tests/mochitest/trackerFrame.sjs?id=img-src">
+
+<!--nsObjectLoadingContent::OpenChannel-->
+<object data="https://itisatracker.org/tests/toolkit/components/url-classifier/tests/mochitest/trackerFrame.sjs?id=object-data"></object>
+
+<!--ScriptLoader::StartLoad-->
+<script src="https://itisatracker.org/tests/toolkit/components/url-classifier/tests/mochitest/trackerFrame.sjs?id=script-src"></script>
+
+<!--nsDocShell::DoURILoad-->
+<iframe src="https://itisatracker.org/tests/toolkit/components/url-classifier/tests/mochitest/trackerFrame.sjs?id=iframe-src"></iframe>
+
+<!--Loader::LoadSheet-->
+<link rel="stylesheet" href="https://itisatracker.org/tests/toolkit/components/url-classifier/tests/mochitest/trackerFrame.sjs?id=link-rel-stylesheet" />
+
+<!--nsPrefetchNode::OpenChannel-->
+<!-- Temporarily disable this because it doesn't work in fission when the scheme is https -->
+<!--<link rel="prefetch" href="https://itisatracker.org/tests/toolkit/components/url-classifier/tests/mochitest/trackerFrame.sjs?id=link-rel-prefetch" />-->
+
+<!--HTMLMediaElement::ChannelLoader::LoadInternal-->
+<video src="https://itisatracker.org/tests/toolkit/components/url-classifier/tests/mochitest/trackerFrame.sjs?id=video-src">
+</video>
+
+<video src="https://mochi.test:8888/basic.vtt", crossorigin=use-credentials>
+ <track default src="https://itisatracker.org/tests/toolkit/components/url-classifier/tests/mochitest/trackerFrame.sjs?id=track-src" ></track>
+</video>
+
+<!--SendPing-->
+<a ping="https://itisatracker.org/tests/toolkit/components/url-classifier/tests/mochitest/trackerFrame.sjs?id=ping" id="a-ping" href="#"></a>
+<script>
+ (function() {
+ document.getElementById("a-ping").click();
+ })();
+</script>
+
+<script>
+
+// FetchDriver::HttpFetch
+(function() {
+ try {
+ fetch(new Request("https://itisatracker.org/tests/toolkit/components/url-classifier/tests/mochitest/trackerFrame.sjs?id=fetch"), {
+ credentials: "include",
+ });
+ } catch (err) {
+ console.log(err);
+ }
+})();
+
+// XMLHttpRequestMainThread::CreateChannel
+(function() {
+ var xhr = new XMLHttpRequest();
+ xhr.open("GET", "https://itisatracker.org/tests/toolkit/components/url-classifier/tests/mochitest/trackerFrame.sjs?id=xmlhttprequest");
+ xhr.withCredentials = true;
+ xhr.send();
+})();
+
+// Navigator::SendBeaconInternal
+(function() {
+ navigator.sendBeacon("https://itisatracker.org/tests/toolkit/components/url-classifier/tests/mochitest/trackerFrame.sjs?id=send-beacon");
+})();
+
+</script>
+
+// Fetch inside service worker's script
+<iframe id="sw" src="https://example.com/tests/toolkit/components/url-classifier/tests/mochitest/sw_register.html"></iframe>
+<script>
+ let iframe = document.getElementById("sw");
+ window.onmessage = function(e) {
+ if (e.data.status == "registrationdone") {
+ iframe.remove();
+ iframe = document.createElement("iframe");
+ document.getElementById("content").appendChild(iframe);
+ iframe.src = "https://example.com/tests/toolkit/components/url-classifier/tests/mochitest/synth.html?" +
+ "https://itisatracker.org/tests/toolkit/components/url-classifier/tests/mochitest/trackerFrame.sjs?id=fetch-in-sw";
+ }
+ };
+</script>
+
+</div>
+</body>
+</html>
diff --git a/toolkit/components/url-classifier/tests/mochitest/trackerFrame.sjs b/toolkit/components/url-classifier/tests/mochitest/trackerFrame.sjs
new file mode 100644
index 0000000000..be31e666b0
--- /dev/null
+++ b/toolkit/components/url-classifier/tests/mochitest/trackerFrame.sjs
@@ -0,0 +1,80 @@
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+const stateTotalRequests = "total-request";
+const stateCallback = "callback-response";
+const stateTrackersWithCookie = "trackers-with-cookie";
+const stateTrackersWithoutCookie = "trackers-without-cookie";
+const stateReceivedTrackers = "received-trackers";
+const stateResponseType = "response-tracker-with-cookie";
+
+function reset() {
+ setState(stateCallback, "");
+ setState(stateTrackersWithCookie, "");
+ setState(stateTrackersWithoutCookie, "");
+ setState(stateReceivedTrackers, "");
+ setState(stateResponseType, "");
+}
+
+function handleRequest(aRequest, aResponse) {
+ let params = new URLSearchParams(aRequest.queryString);
+
+ // init the server and tell the server the total number requests to process
+ // server set the cookie
+ if (params.has("init")) {
+ setState(stateTotalRequests, params.get("init"));
+
+ aResponse.setStatusLine(aRequest.httpVersion, 200);
+ aResponse.setHeader("Content-Type", "text/plain", false);
+
+ // Prepare the cookie
+ aResponse.setHeader("Set-Cookie", "cookie=1234; SameSite=None; Secure");
+ aResponse.setHeader(
+ "Access-Control-Allow-Origin",
+ aRequest.getHeader("Origin"),
+ false
+ );
+ aResponse.setHeader("Access-Control-Allow-Credentials", "true", false);
+ aResponse.write("begin-test");
+ // register the callback response, the response will be fired after receiving
+ // all the request
+ } else if (params.has("callback")) {
+ aResponse.processAsync();
+ aResponse.setHeader("Content-Type", "text/plain", false);
+ aResponse.setHeader(
+ "Access-Control-Allow-Origin",
+ aRequest.getHeader("Origin"),
+ false
+ );
+ aResponse.setHeader("Access-Control-Allow-Credentials", "true", false);
+
+ setState(stateResponseType, params.get("callback"));
+ setObjectState(stateCallback, aResponse);
+ } else {
+ let count = parseInt(getState(stateReceivedTrackers) || 0) + 1;
+ setState(stateReceivedTrackers, count.toString());
+
+ let state = "";
+ if (aRequest.hasHeader("Cookie")) {
+ state = stateTrackersWithCookie;
+ } else {
+ state = stateTrackersWithoutCookie;
+ }
+
+ let ids = params.get("id").concat(",", getState(state));
+ setState(state, ids);
+
+ if (getState(stateTotalRequests) == getState(stateReceivedTrackers)) {
+ getObjectState(stateCallback, r => {
+ if (getState(stateResponseType) == "with-cookie") {
+ r.write(getState(stateTrackersWithCookie));
+ } else {
+ r.write(getState(stateTrackersWithoutCookie));
+ }
+ r.finish();
+ reset();
+ });
+ }
+ }
+}
diff --git a/toolkit/components/url-classifier/tests/mochitest/trackingRequest.html b/toolkit/components/url-classifier/tests/mochitest/trackingRequest.html
new file mode 100644
index 0000000000..ea0f92c481
--- /dev/null
+++ b/toolkit/components/url-classifier/tests/mochitest/trackingRequest.html
@@ -0,0 +1,21 @@
+<!DOCTYPE HTML>
+<!-- Any copyright is dedicated to the Public Domain.
+ http://creativecommons.org/publicdomain/zero/1.0/ -->
+<html>
+<head>
+<title></title>
+
+</head>
+<body>
+
+<!--
+ This domain is not blocklisted for annotations but it is for tracking protection.
+ Therefore if tracking protection is accidentally enabled, this test will fail. On
+ the other hand, tracking.example.com will not be used in any of the same-origin
+ comparisons since we always look for the top window URI when there is one and
+ that's set to be www.itisatrap.org.
+-->
+<script id="badscript" data-touched="not sure" src="http://tracking.example.com/tests/toolkit/components/url-classifier/tests/mochitest/trackingRequest.js"></script>
+
+</body>
+</html>
diff --git a/toolkit/components/url-classifier/tests/mochitest/trackingRequest.js b/toolkit/components/url-classifier/tests/mochitest/trackingRequest.js
new file mode 100644
index 0000000000..2720578eed
--- /dev/null
+++ b/toolkit/components/url-classifier/tests/mochitest/trackingRequest.js
@@ -0,0 +1,9 @@
+window.addEventListener("message", function onMessage(evt) {
+ if (evt.data.type === "doXHR") {
+ var request = new XMLHttpRequest();
+ request.open("GET", evt.data.url, true);
+ request.send(null);
+ } else if (evt.data.type === "doFetch") {
+ fetch(evt.data.url);
+ }
+});
diff --git a/toolkit/components/url-classifier/tests/mochitest/trackingRequest.js^headers^ b/toolkit/components/url-classifier/tests/mochitest/trackingRequest.js^headers^
new file mode 100644
index 0000000000..3eced96143
--- /dev/null
+++ b/toolkit/components/url-classifier/tests/mochitest/trackingRequest.js^headers^
@@ -0,0 +1,2 @@
+Access-Control-Allow-Origin: *
+Cache-Control: no-store
diff --git a/toolkit/components/url-classifier/tests/mochitest/unwantedWorker.js b/toolkit/components/url-classifier/tests/mochitest/unwantedWorker.js
new file mode 100644
index 0000000000..b4e8a47602
--- /dev/null
+++ b/toolkit/components/url-classifier/tests/mochitest/unwantedWorker.js
@@ -0,0 +1,5 @@
+/* eslint-env worker */
+
+onmessage = function () {
+ postMessage("loaded bad file");
+};
diff --git a/toolkit/components/url-classifier/tests/mochitest/update.sjs b/toolkit/components/url-classifier/tests/mochitest/update.sjs
new file mode 100644
index 0000000000..f3984b2a6f
--- /dev/null
+++ b/toolkit/components/url-classifier/tests/mochitest/update.sjs
@@ -0,0 +1,71 @@
+const CC = Components.Constructor;
+const BinaryInputStream = CC(
+ "@mozilla.org/binaryinputstream;1",
+ "nsIBinaryInputStream",
+ "setInputStream"
+);
+
+function handleRequest(request, response) {
+ var query = {};
+ request.queryString.split("&").forEach(function (val) {
+ var idx = val.indexOf("=");
+ query[val.slice(0, idx)] = unescape(val.slice(idx + 1));
+ });
+
+ // Store fullhash in the server side.
+ if ("list" in query && "fullhash" in query) {
+ // In the server side we will store:
+ // 1. All the full hashes for a given list
+ // 2. All the lists we have right now
+ // data is separate by '\n'
+ let list = query.list;
+ let hashes = getState(list);
+
+ let hash = atob(query.fullhash);
+ hashes += hash + "\n";
+ setState(list, hashes);
+
+ let lists = getState("lists");
+ if (!lists.includes(list)) {
+ lists += list + "\n";
+ setState("lists", lists);
+ }
+
+ return;
+ }
+
+ var body = new BinaryInputStream(request.bodyInputStream);
+ var avail;
+ var bytes = [];
+
+ while ((avail = body.available()) > 0) {
+ Array.prototype.push.apply(bytes, body.readByteArray(avail));
+ }
+
+ var responseBody = parseV2Request(bytes);
+
+ response.setHeader("Content-Type", "text/plain", false);
+ response.write(responseBody);
+}
+
+function parseV2Request(bytes) {
+ var table = String.fromCharCode.apply(this, bytes).slice(0, -2);
+
+ var ret = "";
+ getState("lists")
+ .split("\n")
+ .forEach(function (list) {
+ if (list == table) {
+ var completions = getState(list).split("\n");
+ ret += "n:1000\n";
+ ret += "i:" + list + "\n";
+ ret += "a:1:32:" + 32 * (completions.length - 1) + "\n";
+
+ for (var completion of completions) {
+ ret += completion;
+ }
+ }
+ });
+
+ return ret;
+}
diff --git a/toolkit/components/url-classifier/tests/mochitest/vp9.webm b/toolkit/components/url-classifier/tests/mochitest/vp9.webm
new file mode 100644
index 0000000000..221877e303
--- /dev/null
+++ b/toolkit/components/url-classifier/tests/mochitest/vp9.webm
Binary files differ
diff --git a/toolkit/components/url-classifier/tests/mochitest/whitelistFrame.html b/toolkit/components/url-classifier/tests/mochitest/whitelistFrame.html
new file mode 100644
index 0000000000..620416fc74
--- /dev/null
+++ b/toolkit/components/url-classifier/tests/mochitest/whitelistFrame.html
@@ -0,0 +1,15 @@
+<!DOCTYPE HTML>
+<!-- Any copyright is dedicated to the Public Domain.
+ http://creativecommons.org/publicdomain/zero/1.0/ -->
+<html>
+<head>
+<title></title>
+</head>
+<body>
+
+<script id="badscript" data-touched="not sure" src="http://trackertest.org/tests/toolkit/components/url-classifier/tests/mochitest/evil.js" onload="this.dataset.touched = 'yes';" onerror="this.dataset.touched = 'no';"></script>
+
+<script id="goodscript" data-touched="not sure" src="http://itisatracker.org/tests/toolkit/components/url-classifier/tests/mochitest/good.js" onload="this.dataset.touched = 'yes';" onerror="this.dataset.touched = 'no';"></script>
+
+</body>
+</html>
diff --git a/toolkit/components/url-classifier/tests/mochitest/workerFrame.html b/toolkit/components/url-classifier/tests/mochitest/workerFrame.html
new file mode 100644
index 0000000000..69e8dd0074
--- /dev/null
+++ b/toolkit/components/url-classifier/tests/mochitest/workerFrame.html
@@ -0,0 +1,65 @@
+<html>
+<head>
+<title></title>
+
+<script type="text/javascript">
+
+function startCleanWorker() {
+ var worker = new Worker("cleanWorker.js");
+
+ worker.onmessage = function(event) {
+ if (event.data == "success") {
+ window.parent.postMessage("success:blocked importScripts('evilWorker.js')", "*");
+ } else {
+ window.parent.postMessage("failure:failed to block importScripts('evilWorker.js')", "*");
+ }
+ window.parent.postMessage("finish", "*");
+ };
+
+ worker.onerror = function(event) {
+ window.parent.postmessage("failure:failed to load cleanWorker.js", "*");
+ window.parent.postMessage("finish", "*");
+ };
+
+ worker.postMessage("");
+}
+
+function startEvilWorker() {
+ var worker = new Worker("evilWorker.js");
+
+ worker.onmessage = function(event) {
+ window.parent.postMessage("failure:failed to block evilWorker.js", "*");
+ startUnwantedWorker();
+ };
+
+ worker.onerror = function(event) {
+ window.parent.postMessage("success:blocked evilWorker.js", "*");
+ startUnwantedWorker();
+ };
+
+ worker.postMessage("");
+}
+
+function startUnwantedWorker() {
+ var worker = new Worker("unwantedWorker.js");
+
+ worker.onmessage = function(event) {
+ window.parent.postMessage("failure:failed to block unwantedWorker.js", "*");
+ startCleanWorker();
+ };
+
+ worker.onerror = function(event) {
+ window.parent.postMessage("success:blocked unwantedWorker.js", "*");
+ startCleanWorker();
+ };
+
+ worker.postMessage("");
+}
+
+</script>
+
+</head>
+
+<body onload="startEvilWorker()">
+</body>
+</html>
diff --git a/toolkit/components/url-classifier/tests/moz.build b/toolkit/components/url-classifier/tests/moz.build
new file mode 100644
index 0000000000..79364db097
--- /dev/null
+++ b/toolkit/components/url-classifier/tests/moz.build
@@ -0,0 +1,17 @@
+# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
+# vim: set filetype=python:
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+MOCHITEST_MANIFESTS += ["mochitest/mochitest.toml"]
+MOCHITEST_CHROME_MANIFESTS += ["mochitest/chrome.toml"]
+XPCSHELL_TESTS_MANIFESTS += ["unit/xpcshell.toml"]
+BROWSER_CHROME_MANIFESTS += ["browser/browser.toml"]
+
+TESTING_JS_MODULES += [
+ "UrlClassifierTestUtils.sys.mjs",
+]
+
+if CONFIG["ENABLE_TESTS"]:
+ DIRS += ["gtest"]
diff --git a/toolkit/components/url-classifier/tests/unit/data/content-fingerprinting-track-digest256 b/toolkit/components/url-classifier/tests/unit/data/content-fingerprinting-track-digest256
new file mode 100644
index 0000000000..cf95b25ac3
--- /dev/null
+++ b/toolkit/components/url-classifier/tests/unit/data/content-fingerprinting-track-digest256
Binary files differ
diff --git a/toolkit/components/url-classifier/tests/unit/data/digest1.chunk b/toolkit/components/url-classifier/tests/unit/data/digest1.chunk
new file mode 100644
index 0000000000..3850373c19
--- /dev/null
+++ b/toolkit/components/url-classifier/tests/unit/data/digest1.chunk
Binary files differ
diff --git a/toolkit/components/url-classifier/tests/unit/data/digest2.chunk b/toolkit/components/url-classifier/tests/unit/data/digest2.chunk
new file mode 100644
index 0000000000..738c96f6ba
--- /dev/null
+++ b/toolkit/components/url-classifier/tests/unit/data/digest2.chunk
@@ -0,0 +1,2 @@
+a:5:32:32
+“Ê_Há^˜aÍ7ÂÙ]´=#ÌnmåÃøún‹æo—ÌQ‰ \ No newline at end of file
diff --git a/toolkit/components/url-classifier/tests/unit/data/google-trackwhite-digest256 b/toolkit/components/url-classifier/tests/unit/data/google-trackwhite-digest256
new file mode 100644
index 0000000000..81d28053ff
--- /dev/null
+++ b/toolkit/components/url-classifier/tests/unit/data/google-trackwhite-digest256
Binary files differ
diff --git a/toolkit/components/url-classifier/tests/unit/data/invalid.chunk b/toolkit/components/url-classifier/tests/unit/data/invalid.chunk
new file mode 100644
index 0000000000..7911ca4963
--- /dev/null
+++ b/toolkit/components/url-classifier/tests/unit/data/invalid.chunk
@@ -0,0 +1,2 @@
+a:5:32
+“Ê_Há^˜aÃ7ÂÙ]´=#ÃŒnmåÃøún‹æo—ÌQ‰
diff --git a/toolkit/components/url-classifier/tests/unit/data/mozplugin-block-digest256 b/toolkit/components/url-classifier/tests/unit/data/mozplugin-block-digest256
new file mode 100644
index 0000000000..40f64f3cbf
--- /dev/null
+++ b/toolkit/components/url-classifier/tests/unit/data/mozplugin-block-digest256
Binary files differ
diff --git a/toolkit/components/url-classifier/tests/unit/head_urlclassifier.js b/toolkit/components/url-classifier/tests/unit/head_urlclassifier.js
new file mode 100644
index 0000000000..0ed731b564
--- /dev/null
+++ b/toolkit/components/url-classifier/tests/unit/head_urlclassifier.js
@@ -0,0 +1,570 @@
+//* -*- indent-tabs-mode: nil; js-indent-level: 2 -*- *
+function dumpn(s) {
+ dump(s + "\n");
+}
+
+const NS_APP_USER_PROFILE_50_DIR = "ProfD";
+const NS_APP_USER_PROFILE_LOCAL_50_DIR = "ProfLD";
+
+var {
+ HTTP_400,
+ HTTP_401,
+ HTTP_402,
+ HTTP_403,
+ HTTP_404,
+ HTTP_405,
+ HTTP_406,
+ HTTP_407,
+ HTTP_408,
+ HTTP_409,
+ HTTP_410,
+ HTTP_411,
+ HTTP_412,
+ HTTP_413,
+ HTTP_414,
+ HTTP_415,
+ HTTP_417,
+ HTTP_500,
+ HTTP_501,
+ HTTP_502,
+ HTTP_503,
+ HTTP_504,
+ HTTP_505,
+ HttpError,
+ HttpServer,
+} = ChromeUtils.importESModule("resource://testing-common/httpd.sys.mjs");
+
+do_get_profile();
+
+// Ensure PSM is initialized before the test
+Cc["@mozilla.org/psm;1"].getService(Ci.nsISupports);
+
+// Disable hashcompleter noise for tests
+Services.prefs.setIntPref("urlclassifier.gethashnoise", 0);
+
+// Enable malware/phishing checking for tests
+Services.prefs.setBoolPref("browser.safebrowsing.malware.enabled", true);
+Services.prefs.setBoolPref("browser.safebrowsing.blockedURIs.enabled", true);
+Services.prefs.setBoolPref("browser.safebrowsing.phishing.enabled", true);
+Services.prefs.setBoolPref(
+ "browser.safebrowsing.provider.test.disableBackoff",
+ true
+);
+
+// Add testing tables, we don't use moztest-* here because it doesn't support update
+Services.prefs.setCharPref("urlclassifier.phishTable", "test-phish-simple");
+Services.prefs.setCharPref(
+ "urlclassifier.malwareTable",
+ "test-harmful-simple,test-malware-simple,test-unwanted-simple"
+);
+Services.prefs.setCharPref("urlclassifier.blockedTable", "test-block-simple");
+Services.prefs.setCharPref("urlclassifier.trackingTable", "test-track-simple");
+Services.prefs.setCharPref(
+ "urlclassifier.trackingWhitelistTable",
+ "test-trackwhite-simple"
+);
+
+// Enable all completions for tests
+Services.prefs.setCharPref("urlclassifier.disallow_completions", "");
+
+// Hash completion timeout
+Services.prefs.setIntPref("urlclassifier.gethash.timeout_ms", 5000);
+
+function delFile(name) {
+ try {
+ // Delete a previously created sqlite file
+ var file = Services.dirsvc.get("ProfLD", Ci.nsIFile);
+ file.append(name);
+ if (file.exists()) {
+ file.remove(false);
+ }
+ } catch (e) {}
+}
+
+function cleanUp() {
+ delFile("urlclassifier3.sqlite");
+ delFile("safebrowsing/classifier.hashkey");
+ delFile("safebrowsing/test-phish-simple.sbstore");
+ delFile("safebrowsing/test-malware-simple.sbstore");
+ delFile("safebrowsing/test-unwanted-simple.sbstore");
+ delFile("safebrowsing/test-block-simple.sbstore");
+ delFile("safebrowsing/test-harmful-simple.sbstore");
+ delFile("safebrowsing/test-track-simple.sbstore");
+ delFile("safebrowsing/test-trackwhite-simple.sbstore");
+ delFile("safebrowsing/test-phish-simple.pset");
+ delFile("safebrowsing/test-malware-simple.pset");
+ delFile("safebrowsing/test-unwanted-simple.pset");
+ delFile("safebrowsing/test-block-simple.pset");
+ delFile("safebrowsing/test-harmful-simple.pset");
+ delFile("safebrowsing/test-track-simple.pset");
+ delFile("safebrowsing/test-trackwhite-simple.pset");
+ delFile("safebrowsing/moz-phish-simple.sbstore");
+ delFile("safebrowsing/moz-phish-simple.pset");
+ delFile("testLarge.pset");
+ delFile("testNoDelta.pset");
+}
+
+// Update uses allTables by default
+var allTables =
+ "test-phish-simple,test-malware-simple,test-unwanted-simple,test-track-simple,test-trackwhite-simple,test-block-simple";
+var mozTables = "moz-phish-simple";
+
+var dbservice = Cc["@mozilla.org/url-classifier/dbservice;1"].getService(
+ Ci.nsIUrlClassifierDBService
+);
+var streamUpdater = Cc[
+ "@mozilla.org/url-classifier/streamupdater;1"
+].getService(Ci.nsIUrlClassifierStreamUpdater);
+
+/*
+ * Builds an update from an object that looks like:
+ *{ "test-phish-simple" : [{
+ * "chunkType" : "a", // 'a' is assumed if not specified
+ * "chunkNum" : 1, // numerically-increasing chunk numbers are assumed
+ * // if not specified
+ * "urls" : [ "foo.com/a", "foo.com/b", "bar.com/" ]
+ * }
+ */
+
+function buildUpdate(update, hashSize) {
+ if (!hashSize) {
+ hashSize = 32;
+ }
+ var updateStr = "n:1000\n";
+
+ for (var tableName in update) {
+ if (tableName != "") {
+ updateStr += "i:" + tableName + "\n";
+ }
+ var chunks = update[tableName];
+ for (var j = 0; j < chunks.length; j++) {
+ var chunk = chunks[j];
+ var chunkType = chunk.chunkType ? chunk.chunkType : "a";
+ var chunkNum = chunk.chunkNum ? chunk.chunkNum : j;
+ updateStr += chunkType + ":" + chunkNum + ":" + hashSize;
+
+ if (chunk.urls) {
+ var chunkData = chunk.urls.join("\n");
+ updateStr += ":" + chunkData.length + "\n" + chunkData;
+ }
+
+ updateStr += "\n";
+ }
+ }
+
+ return updateStr;
+}
+
+function buildPhishingUpdate(chunks, hashSize) {
+ return buildUpdate({ "test-phish-simple": chunks }, hashSize);
+}
+
+function buildMalwareUpdate(chunks, hashSize) {
+ return buildUpdate({ "test-malware-simple": chunks }, hashSize);
+}
+
+function buildUnwantedUpdate(chunks, hashSize) {
+ return buildUpdate({ "test-unwanted-simple": chunks }, hashSize);
+}
+
+function buildBlockedUpdate(chunks, hashSize) {
+ return buildUpdate({ "test-block-simple": chunks }, hashSize);
+}
+
+function buildMozPhishingUpdate(chunks, hashSize) {
+ return buildUpdate({ "moz-phish-simple": chunks }, hashSize);
+}
+
+function buildBareUpdate(chunks, hashSize) {
+ return buildUpdate({ "": chunks }, hashSize);
+}
+
+/**
+ * Performs an update of the dbservice manually, bypassing the stream updater
+ */
+function doSimpleUpdate(updateText, success, failure) {
+ var listener = {
+ QueryInterface: ChromeUtils.generateQI(["nsIUrlClassifierUpdateObserver"]),
+
+ updateUrlRequested(url) {},
+ streamFinished(status) {},
+ updateError(errorCode) {
+ failure(errorCode);
+ },
+ updateSuccess(requestedTimeout) {
+ success(requestedTimeout);
+ },
+ };
+
+ dbservice.beginUpdate(listener, allTables);
+ dbservice.beginStream("", "");
+ dbservice.updateStream(updateText);
+ dbservice.finishStream();
+ dbservice.finishUpdate();
+}
+
+/**
+ * Simulates a failed database update.
+ */
+function doErrorUpdate(tables, success, failure) {
+ var listener = {
+ QueryInterface: ChromeUtils.generateQI(["nsIUrlClassifierUpdateObserver"]),
+
+ updateUrlRequested(url) {},
+ streamFinished(status) {},
+ updateError(errorCode) {
+ success(errorCode);
+ },
+ updateSuccess(requestedTimeout) {
+ failure(requestedTimeout);
+ },
+ };
+
+ dbservice.beginUpdate(listener, tables, null);
+ dbservice.beginStream("", "");
+ dbservice.cancelUpdate();
+}
+
+/**
+ * Performs an update of the dbservice using the stream updater and a
+ * data: uri
+ */
+function doStreamUpdate(updateText, success, failure, downloadFailure) {
+ var dataUpdate = "data:," + encodeURIComponent(updateText);
+
+ if (!downloadFailure) {
+ downloadFailure = failure;
+ }
+
+ streamUpdater.downloadUpdates(
+ allTables,
+ "",
+ true,
+ dataUpdate,
+ success,
+ failure,
+ downloadFailure
+ );
+}
+
+var gAssertions = {
+ tableData(expectedTables, cb) {
+ dbservice.getTables(function (tables) {
+ // rebuild the tables in a predictable order.
+ var parts = tables.split("\n");
+ while (parts[parts.length - 1] == "") {
+ parts.pop();
+ }
+ parts.sort();
+ tables = parts.join("\n");
+
+ Assert.equal(tables, expectedTables);
+ cb();
+ });
+ },
+
+ checkUrls(urls, expected, cb, useMoz = false) {
+ // work with a copy of the list.
+ urls = urls.slice(0);
+ var doLookup = function () {
+ if (urls.length) {
+ var tables = useMoz ? mozTables : allTables;
+ var fragment = urls.shift();
+ var principal = Services.scriptSecurityManager.createContentPrincipal(
+ Services.io.newURI("http://" + fragment),
+ {}
+ );
+ dbservice.lookup(
+ principal,
+ tables,
+ function (arg) {
+ Assert.equal(expected, arg);
+ doLookup();
+ },
+ true
+ );
+ } else {
+ cb();
+ }
+ };
+ doLookup();
+ },
+
+ checkTables(url, expected, cb) {
+ var principal = Services.scriptSecurityManager.createContentPrincipal(
+ Services.io.newURI("http://" + url),
+ {}
+ );
+ dbservice.lookup(
+ principal,
+ allTables,
+ function (tables) {
+ // Rebuild tables in a predictable order.
+ var parts = tables.split(",");
+ while (parts[parts.length - 1] == "") {
+ parts.pop();
+ }
+ parts.sort();
+ tables = parts.join(",");
+ Assert.equal(tables, expected);
+ cb();
+ },
+ true
+ );
+ },
+
+ urlsDontExist(urls, cb) {
+ this.checkUrls(urls, "", cb);
+ },
+
+ urlsExist(urls, cb) {
+ this.checkUrls(urls, "test-phish-simple", cb);
+ },
+
+ malwareUrlsExist(urls, cb) {
+ this.checkUrls(urls, "test-malware-simple", cb);
+ },
+
+ unwantedUrlsExist(urls, cb) {
+ this.checkUrls(urls, "test-unwanted-simple", cb);
+ },
+
+ blockedUrlsExist(urls, cb) {
+ this.checkUrls(urls, "test-block-simple", cb);
+ },
+
+ mozPhishingUrlsExist(urls, cb) {
+ this.checkUrls(urls, "moz-phish-simple", cb, true);
+ },
+
+ subsDontExist(urls, cb) {
+ // XXX: there's no interface for checking items in the subs table
+ cb();
+ },
+
+ subsExist(urls, cb) {
+ // XXX: there's no interface for checking items in the subs table
+ cb();
+ },
+
+ urlExistInMultipleTables(data, cb) {
+ this.checkTables(data.url, data.tables, cb);
+ },
+};
+
+/**
+ * Check a set of assertions against the gAssertions table.
+ */
+function checkAssertions(assertions, doneCallback) {
+ var checkAssertion = function () {
+ for (var i in assertions) {
+ var data = assertions[i];
+ delete assertions[i];
+ gAssertions[i](data, checkAssertion);
+ return;
+ }
+
+ doneCallback();
+ };
+
+ checkAssertion();
+}
+
+function updateError(arg) {
+ do_throw(arg);
+}
+
+/**
+ * Utility functions
+ */
+ChromeUtils.defineESModuleGetters(this, {
+ NetUtil: "resource://gre/modules/NetUtil.sys.mjs",
+});
+
+function readFileToString(aFilename) {
+ let f = do_get_file(aFilename);
+ let stream = Cc["@mozilla.org/network/file-input-stream;1"].createInstance(
+ Ci.nsIFileInputStream
+ );
+ stream.init(f, -1, 0, 0);
+ let buf = NetUtil.readInputStreamToString(stream, stream.available());
+ return buf;
+}
+
+// Runs a set of updates, and then checks a set of assertions.
+function doUpdateTest(updates, assertions, successCallback, errorCallback) {
+ var errorUpdate = function () {
+ checkAssertions(assertions, errorCallback);
+ };
+
+ var runUpdate = function () {
+ if (updates.length) {
+ var update = updates.shift();
+ doStreamUpdate(update, runUpdate, errorUpdate, null);
+ } else {
+ checkAssertions(assertions, successCallback);
+ }
+ };
+
+ runUpdate();
+}
+
+var gTests;
+var gNextTest = 0;
+
+function runNextTest() {
+ if (gNextTest >= gTests.length) {
+ do_test_finished();
+ return;
+ }
+
+ dbservice.resetDatabase();
+ dbservice.setHashCompleter("test-phish-simple", null);
+
+ let test = gTests[gNextTest++];
+ dump("running " + test.name + "\n");
+ test();
+}
+
+function runTests(tests) {
+ gTests = tests;
+ runNextTest();
+}
+
+var timerArray = [];
+
+function Timer(delay, cb) {
+ this.cb = cb;
+ var timer = Cc["@mozilla.org/timer;1"].createInstance(Ci.nsITimer);
+ timer.initWithCallback(this, delay, timer.TYPE_ONE_SHOT);
+ timerArray.push(timer);
+}
+
+Timer.prototype = {
+ QueryInterface: ChromeUtils.generateQI(["nsITimerCallback"]),
+ notify(timer) {
+ this.cb();
+ },
+};
+
+// LFSRgenerator is a 32-bit linear feedback shift register random number
+// generator. It is highly predictable and is not intended to be used for
+// cryptography but rather to allow easier debugging than a test that uses
+// Math.random().
+function LFSRgenerator(seed) {
+ // Force |seed| to be a number.
+ seed = +seed;
+ // LFSR generators do not work with a value of 0.
+ if (seed == 0) {
+ seed = 1;
+ }
+
+ this._value = seed;
+}
+LFSRgenerator.prototype = {
+ // nextNum returns a random unsigned integer of in the range [0,2^|bits|].
+ nextNum(bits) {
+ if (!bits) {
+ bits = 32;
+ }
+
+ let val = this._value;
+ // Taps are 32, 22, 2 and 1.
+ let bit = ((val >>> 0) ^ (val >>> 10) ^ (val >>> 30) ^ (val >>> 31)) & 1;
+ val = (val >>> 1) | (bit << 31);
+ this._value = val;
+
+ return val >>> (32 - bits);
+ },
+};
+
+function waitUntilMetaDataSaved(expectedState, expectedChecksum, callback) {
+ let dbService = Cc["@mozilla.org/url-classifier/dbservice;1"].getService(
+ Ci.nsIUrlClassifierDBService
+ );
+
+ dbService.getTables(metaData => {
+ info("metadata: " + metaData);
+ let didCallback = false;
+ metaData.split("\n").some(line => {
+ // Parse [tableName];[stateBase64]
+ let p = line.indexOf(";");
+ if (-1 === p) {
+ return false; // continue.
+ }
+ let tableName = line.substring(0, p);
+ let metadata = line.substring(p + 1).split(":");
+ let stateBase64 = metadata[0];
+ let checksumBase64 = metadata[1];
+
+ if (tableName !== "test-phish-proto") {
+ return false; // continue.
+ }
+
+ if (
+ stateBase64 === btoa(expectedState) &&
+ checksumBase64 === btoa(expectedChecksum)
+ ) {
+ info("State has been saved to disk!");
+
+ // We slightly defer the callback to see if the in-memory
+ // |getTables| caching works correctly.
+ dbService.getTables(cachedMetadata => {
+ equal(cachedMetadata, metaData);
+ callback();
+ });
+
+ // Even though we haven't done callback at this moment
+ // but we still claim "we have" in order to stop repeating
+ // a new timer.
+ didCallback = true;
+ }
+
+ return true; // break no matter whether the state is matching.
+ });
+
+ if (!didCallback) {
+ do_timeout(
+ 1000,
+ waitUntilMetaDataSaved.bind(
+ null,
+ expectedState,
+ expectedChecksum,
+ callback
+ )
+ );
+ }
+ });
+}
+
+var gUpdateFinishedObserverEnabled = false;
+var gUpdateFinishedObserver = function (aSubject, aTopic, aData) {
+ info("[" + aTopic + "] " + aData);
+ if (aData != "success") {
+ updateError(aData);
+ }
+};
+
+function throwOnUpdateErrors() {
+ Services.obs.addObserver(
+ gUpdateFinishedObserver,
+ "safebrowsing-update-finished"
+ );
+ gUpdateFinishedObserverEnabled = true;
+}
+
+function stopThrowingOnUpdateErrors() {
+ if (gUpdateFinishedObserverEnabled) {
+ Services.obs.removeObserver(
+ gUpdateFinishedObserver,
+ "safebrowsing-update-finished"
+ );
+ gUpdateFinishedObserverEnabled = false;
+ }
+}
+
+cleanUp();
+
+registerCleanupFunction(function () {
+ cleanUp();
+});
diff --git a/toolkit/components/url-classifier/tests/unit/test_addsub.js b/toolkit/components/url-classifier/tests/unit/test_addsub.js
new file mode 100644
index 0000000000..f58a02506f
--- /dev/null
+++ b/toolkit/components/url-classifier/tests/unit/test_addsub.js
@@ -0,0 +1,329 @@
+function doTest(updates, assertions) {
+ doUpdateTest(updates, assertions, runNextTest, updateError);
+}
+
+// Test an add of two urls to a fresh database
+function testSimpleAdds() {
+ var addUrls = ["foo.com/a", "foo.com/b", "bar.com/c"];
+ var update = buildPhishingUpdate([{ chunkNum: 1, urls: addUrls }]);
+
+ var assertions = {
+ tableData: "test-phish-simple;a:1",
+ urlsExist: addUrls,
+ };
+
+ doTest([update], assertions);
+}
+
+// Same as testSimpleAdds, but make the same-domain URLs come from different
+// chunks.
+function testMultipleAdds() {
+ var add1Urls = ["foo.com/a", "bar.com/c"];
+ var add2Urls = ["foo.com/b"];
+
+ var update = buildPhishingUpdate([
+ { chunkNum: 1, urls: add1Urls },
+ { chunkNum: 2, urls: add2Urls },
+ ]);
+ var assertions = {
+ tableData: "test-phish-simple;a:1-2",
+ urlsExist: add1Urls.concat(add2Urls),
+ };
+
+ doTest([update], assertions);
+}
+
+// Test that a sub will remove an existing add
+function testSimpleSub() {
+ var addUrls = ["foo.com/a", "bar.com/b"];
+ var subUrls = ["1:foo.com/a"];
+
+ var addUpdate = buildPhishingUpdate([
+ {
+ chunkNum: 1, // adds and subtracts don't share a chunk numbering space
+ urls: addUrls,
+ },
+ ]);
+
+ var subUpdate = buildPhishingUpdate([
+ { chunkNum: 50, chunkType: "s", urls: subUrls },
+ ]);
+
+ var assertions = {
+ tableData: "test-phish-simple;a:1:s:50",
+ urlsExist: ["bar.com/b"],
+ urlsDontExist: ["foo.com/a"],
+ subsDontExist: ["foo.com/a"],
+ };
+
+ doTest([addUpdate, subUpdate], assertions);
+}
+
+// Same as testSimpleSub(), but the sub comes in before the add.
+function testSubEmptiesAdd() {
+ var subUrls = ["1:foo.com/a"];
+ var addUrls = ["foo.com/a", "bar.com/b"];
+
+ var subUpdate = buildPhishingUpdate([
+ { chunkNum: 50, chunkType: "s", urls: subUrls },
+ ]);
+
+ var addUpdate = buildPhishingUpdate([{ chunkNum: 1, urls: addUrls }]);
+
+ var assertions = {
+ tableData: "test-phish-simple;a:1:s:50",
+ urlsExist: ["bar.com/b"],
+ urlsDontExist: ["foo.com/a"],
+ subsDontExist: ["foo.com/a"], // this sub was found, it shouldn't exist anymore
+ };
+
+ doTest([subUpdate, addUpdate], assertions);
+}
+
+// Very similar to testSubEmptiesAdd, except that the domain entry will
+// still have an item left over that needs to be synced.
+function testSubPartiallyEmptiesAdd() {
+ var subUrls = ["1:foo.com/a"];
+ var addUrls = ["foo.com/a", "foo.com/b", "bar.com/b"];
+
+ var subUpdate = buildPhishingUpdate([
+ { chunkNum: 1, chunkType: "s", urls: subUrls },
+ ]);
+
+ var addUpdate = buildPhishingUpdate([
+ {
+ chunkNum: 1, // adds and subtracts don't share a chunk numbering space
+ urls: addUrls,
+ },
+ ]);
+
+ var assertions = {
+ tableData: "test-phish-simple;a:1:s:1",
+ urlsExist: ["foo.com/b", "bar.com/b"],
+ urlsDontExist: ["foo.com/a"],
+ subsDontExist: ["foo.com/a"], // this sub was found, it shouldn't exist anymore
+ };
+
+ doTest([subUpdate, addUpdate], assertions);
+}
+
+// We SHOULD be testing that pending subs are removed using
+// subsDontExist assertions. Since we don't have a good interface for getting
+// at sub entries, we'll verify it by side-effect. Subbing a url once
+// then adding it twice should leave the url intact.
+function testPendingSubRemoved() {
+ var subUrls = ["1:foo.com/a", "2:foo.com/b"];
+ var addUrls = ["foo.com/a", "foo.com/b"];
+
+ var subUpdate = buildPhishingUpdate([
+ { chunkNum: 1, chunkType: "s", urls: subUrls },
+ ]);
+
+ var addUpdate1 = buildPhishingUpdate([
+ {
+ chunkNum: 1, // adds and subtracts don't share a chunk numbering space
+ urls: addUrls,
+ },
+ ]);
+
+ var addUpdate2 = buildPhishingUpdate([{ chunkNum: 2, urls: addUrls }]);
+
+ var assertions = {
+ tableData: "test-phish-simple;a:1-2:s:1",
+ urlsExist: ["foo.com/a", "foo.com/b"],
+ subsDontExist: ["foo.com/a", "foo.com/b"], // this sub was found, it shouldn't exist anymore
+ };
+
+ doTest([subUpdate, addUpdate1, addUpdate2], assertions);
+}
+
+// Make sure that a saved sub is removed when the sub chunk is expired.
+function testPendingSubExpire() {
+ var subUrls = ["1:foo.com/a", "1:foo.com/b"];
+ var addUrls = ["foo.com/a", "foo.com/b"];
+
+ var subUpdate = buildPhishingUpdate([
+ { chunkNum: 1, chunkType: "s", urls: subUrls },
+ ]);
+
+ var expireUpdate = buildPhishingUpdate([{ chunkNum: 1, chunkType: "sd" }]);
+
+ var addUpdate = buildPhishingUpdate([
+ {
+ chunkNum: 1, // adds and subtracts don't share a chunk numbering space
+ urls: addUrls,
+ },
+ ]);
+
+ var assertions = {
+ tableData: "test-phish-simple;a:1",
+ urlsExist: ["foo.com/a", "foo.com/b"],
+ subsDontExist: ["foo.com/a", "foo.com/b"], // this sub was expired
+ };
+
+ doTest([subUpdate, expireUpdate, addUpdate], assertions);
+}
+
+// Make sure that the sub url removes from only the chunk that it specifies
+function testDuplicateAdds() {
+ var urls = ["foo.com/a"];
+
+ var addUpdate1 = buildPhishingUpdate([{ chunkNum: 1, urls }]);
+ var addUpdate2 = buildPhishingUpdate([{ chunkNum: 2, urls }]);
+ var subUpdate = buildPhishingUpdate([
+ { chunkNum: 3, chunkType: "s", urls: ["2:foo.com/a"] },
+ ]);
+
+ var assertions = {
+ tableData: "test-phish-simple;a:1-2:s:3",
+ urlsExist: ["foo.com/a"],
+ subsDontExist: ["foo.com/a"],
+ };
+
+ doTest([addUpdate1, addUpdate2, subUpdate], assertions);
+}
+
+// Tests a sub which matches some existing adds but leaves others.
+function testSubPartiallyMatches() {
+ var addUrls = ["1:foo.com/a", "2:foo.com/b"];
+
+ var addUpdate = buildPhishingUpdate([{ chunkNum: 1, urls: addUrls }]);
+
+ var subUpdate = buildPhishingUpdate([
+ { chunkNum: 1, chunkType: "s", urls: addUrls },
+ ]);
+
+ var assertions = {
+ tableData: "test-phish-simple;a:1:s:1",
+ urlsDontExist: ["foo.com/a"],
+ subsDontExist: ["foo.com/a"],
+ subsExist: ["foo.com/b"],
+ };
+
+ doTest([addUpdate, subUpdate], assertions);
+}
+
+// XXX: because subsExist isn't actually implemented, this is the same
+// test as above but with a second add chunk that should fail to be added
+// because of a pending sub chunk.
+function testSubPartiallyMatches2() {
+ var addUrls = ["foo.com/a"];
+ var subUrls = ["1:foo.com/a", "2:foo.com/b"];
+ var addUrls2 = ["foo.com/b"];
+
+ var addUpdate = buildPhishingUpdate([{ chunkNum: 1, urls: addUrls }]);
+
+ var subUpdate = buildPhishingUpdate([
+ { chunkNum: 1, chunkType: "s", urls: subUrls },
+ ]);
+
+ var addUpdate2 = buildPhishingUpdate([{ chunkNum: 2, urls: addUrls2 }]);
+
+ var assertions = {
+ tableData: "test-phish-simple;a:1-2:s:1",
+ urlsDontExist: ["foo.com/a", "foo.com/b"],
+ subsDontExist: ["foo.com/a", "foo.com/b"],
+ };
+
+ doTest([addUpdate, subUpdate, addUpdate2], assertions);
+}
+
+// Verify that two subs for the same domain but from different chunks
+// match (tests that existing sub entries are properly updated)
+function testSubsDifferentChunks() {
+ var subUrls1 = ["3:foo.com/a"];
+ var subUrls2 = ["3:foo.com/b"];
+
+ var addUrls = ["foo.com/a", "foo.com/b", "foo.com/c"];
+
+ var subUpdate1 = buildPhishingUpdate([
+ { chunkNum: 1, chunkType: "s", urls: subUrls1 },
+ ]);
+ var subUpdate2 = buildPhishingUpdate([
+ { chunkNum: 2, chunkType: "s", urls: subUrls2 },
+ ]);
+ var addUpdate = buildPhishingUpdate([{ chunkNum: 3, urls: addUrls }]);
+
+ var assertions = {
+ tableData: "test-phish-simple;a:3:s:1-2",
+ urlsExist: ["foo.com/c"],
+ urlsDontExist: ["foo.com/a", "foo.com/b"],
+ subsDontExist: ["foo.com/a", "foo.com/b"],
+ };
+
+ doTest([subUpdate1, subUpdate2, addUpdate], assertions);
+}
+
+// for bug 534079
+function testSubsDifferentChunksSameHostId() {
+ var subUrls1 = ["1:foo.com/a"];
+ var subUrls2 = ["1:foo.com/b", "2:foo.com/c"];
+
+ var addUrls = ["foo.com/a", "foo.com/b"];
+ var addUrls2 = ["foo.com/c"];
+
+ var subUpdate1 = buildPhishingUpdate([
+ { chunkNum: 1, chunkType: "s", urls: subUrls1 },
+ ]);
+ var subUpdate2 = buildPhishingUpdate([
+ { chunkNum: 2, chunkType: "s", urls: subUrls2 },
+ ]);
+
+ var addUpdate = buildPhishingUpdate([{ chunkNum: 1, urls: addUrls }]);
+ var addUpdate2 = buildPhishingUpdate([{ chunkNum: 2, urls: addUrls2 }]);
+
+ var assertions = {
+ tableData: "test-phish-simple;a:1-2:s:1-2",
+ urlsDontExist: ["foo.com/c", "foo.com/b", "foo.com/a"],
+ };
+
+ doTest([addUpdate, addUpdate2, subUpdate1, subUpdate2], assertions);
+}
+
+// Test lists of expired chunks
+function testExpireLists() {
+ var addUpdate = buildPhishingUpdate([
+ { chunkNum: 1, urls: ["foo.com/a"] },
+ { chunkNum: 3, urls: ["bar.com/a"] },
+ { chunkNum: 4, urls: ["baz.com/a"] },
+ { chunkNum: 5, urls: ["blah.com/a"] },
+ ]);
+ var subUpdate = buildPhishingUpdate([
+ { chunkNum: 1, chunkType: "s", urls: ["50:foo.com/1"] },
+ { chunkNum: 2, chunkType: "s", urls: ["50:bar.com/1"] },
+ { chunkNum: 3, chunkType: "s", urls: ["50:baz.com/1"] },
+ { chunkNum: 5, chunkType: "s", urls: ["50:blah.com/1"] },
+ ]);
+
+ var expireUpdate = buildPhishingUpdate([
+ { chunkType: "ad:1,3-5" },
+ { chunkType: "sd:1-3,5" },
+ ]);
+
+ var assertions = {
+ // "tableData" : "test-phish-simple;"
+ tableData: "",
+ };
+
+ doTest([addUpdate, subUpdate, expireUpdate], assertions);
+}
+
+function run_test() {
+ runTests([
+ testSimpleAdds,
+ testMultipleAdds,
+ testSimpleSub,
+ testSubEmptiesAdd,
+ testSubPartiallyEmptiesAdd,
+ testPendingSubRemoved,
+ testPendingSubExpire,
+ testDuplicateAdds,
+ testSubPartiallyMatches,
+ testSubPartiallyMatches2,
+ testSubsDifferentChunks,
+ testSubsDifferentChunksSameHostId,
+ testExpireLists,
+ ]);
+}
+
+do_test_pending();
diff --git a/toolkit/components/url-classifier/tests/unit/test_backoff.js b/toolkit/components/url-classifier/tests/unit/test_backoff.js
new file mode 100644
index 0000000000..e78a3ee23c
--- /dev/null
+++ b/toolkit/components/url-classifier/tests/unit/test_backoff.js
@@ -0,0 +1,92 @@
+// Some unittests (e.g., paste into JS shell)
+var jslib =
+ Cc["@mozilla.org/url-classifier/jslib;1"].getService().wrappedJSObject;
+
+var jslibDate = Cu.getGlobalForObject(jslib).Date;
+
+var _Datenow = jslibDate.now;
+function setNow(time) {
+ jslibDate.now = function () {
+ return time;
+ };
+}
+
+function run_test() {
+ // 3 errors, 1ms retry period, max 3 requests per ten milliseconds,
+ // 5ms backoff interval, 19ms max delay
+ var rb = new jslib.RequestBackoff(3, 1, 3, 10, 5, 19, 0);
+ setNow(1);
+ rb.noteServerResponse(200);
+ Assert.ok(rb.canMakeRequest());
+ setNow(2);
+ Assert.ok(rb.canMakeRequest());
+
+ // First error should trigger a 1ms delay
+ rb.noteServerResponse(500);
+ Assert.ok(!rb.canMakeRequest());
+ Assert.equal(rb.nextRequestTime_, 3);
+ setNow(3);
+ Assert.ok(rb.canMakeRequest());
+
+ // Second error should also trigger a 1ms delay
+ rb.noteServerResponse(500);
+ Assert.ok(!rb.canMakeRequest());
+ Assert.equal(rb.nextRequestTime_, 4);
+ setNow(4);
+ Assert.ok(rb.canMakeRequest());
+
+ // Third error should trigger a 5ms backoff
+ rb.noteServerResponse(500);
+ Assert.ok(!rb.canMakeRequest());
+ Assert.equal(rb.nextRequestTime_, 9);
+ setNow(9);
+ Assert.ok(rb.canMakeRequest());
+
+ // Trigger backoff again
+ rb.noteServerResponse(503);
+ Assert.ok(!rb.canMakeRequest());
+ Assert.equal(rb.nextRequestTime_, 19);
+ setNow(19);
+ Assert.ok(rb.canMakeRequest());
+
+ // Trigger backoff a third time and hit max timeout
+ rb.noteServerResponse(302);
+ Assert.ok(!rb.canMakeRequest());
+ Assert.equal(rb.nextRequestTime_, 38);
+ setNow(38);
+ Assert.ok(rb.canMakeRequest());
+
+ // One more backoff, should still be at the max timeout
+ rb.noteServerResponse(400);
+ Assert.ok(!rb.canMakeRequest());
+ Assert.equal(rb.nextRequestTime_, 57);
+ setNow(57);
+ Assert.ok(rb.canMakeRequest());
+
+ // Request goes through
+ rb.noteServerResponse(200);
+ Assert.ok(rb.canMakeRequest());
+ Assert.equal(rb.nextRequestTime_, 0);
+ setNow(58);
+ rb.noteServerResponse(500);
+
+ // Another error, should trigger a 1ms backoff
+ Assert.ok(!rb.canMakeRequest());
+ Assert.equal(rb.nextRequestTime_, 59);
+
+ setNow(59);
+ Assert.ok(rb.canMakeRequest());
+
+ setNow(200);
+ rb.noteRequest();
+ setNow(201);
+ rb.noteRequest();
+ setNow(202);
+ Assert.ok(rb.canMakeRequest());
+ rb.noteRequest();
+ Assert.ok(!rb.canMakeRequest());
+ setNow(211);
+ Assert.ok(rb.canMakeRequest());
+
+ jslibDate.now = _Datenow;
+}
diff --git a/toolkit/components/url-classifier/tests/unit/test_bug1274685_unowned_list.js b/toolkit/components/url-classifier/tests/unit/test_bug1274685_unowned_list.js
new file mode 100644
index 0000000000..4668a901eb
--- /dev/null
+++ b/toolkit/components/url-classifier/tests/unit/test_bug1274685_unowned_list.js
@@ -0,0 +1,65 @@
+const { SafeBrowsing } = ChromeUtils.importESModule(
+ "resource://gre/modules/SafeBrowsing.sys.mjs"
+);
+const { updateAppInfo } = ChromeUtils.importESModule(
+ "resource://testing-common/AppInfo.sys.mjs"
+);
+
+add_setup(async () => {
+ // 'Cc["@mozilla.org/xre/app-info;1"]' for xpcshell has no nsIXULAppInfo
+ // so that we have to update it to make nsURLFormatter.js happy.
+ // (SafeBrowsing.init() will indirectly use nsURLFormatter.js)
+ updateAppInfo();
+
+ // This test should not actually try to create a connection to any real
+ // endpoint. But a background request could try that while the test is in
+ // progress before we've actually shut down networking, and would cause a
+ // crash due to connecting to a non-local IP.
+ Services.prefs.setCharPref(
+ "browser.safebrowsing.provider.mozilla.updateURL",
+ `http://localhost:4444/safebrowsing/update`
+ );
+ registerCleanupFunction(() => {
+ Services.prefs.clearUserPref(
+ "browser.safebrowsing.provider.mozilla.updateURL"
+ );
+ Services.prefs.clearUserPref("browser.safebrowsing.provider.google.lists");
+ Services.prefs.clearUserPref("browser.safebrowsing.provider.google4.lists");
+ });
+});
+
+add_task(async function test() {
+ SafeBrowsing.init();
+
+ let origListV2 = Services.prefs.getCharPref(
+ "browser.safebrowsing.provider.google.lists"
+ );
+ let origListV4 = Services.prefs.getCharPref(
+ "browser.safebrowsing.provider.google4.lists"
+ );
+
+ // Ensure there's a list missing in both Safe Browsing V2 and V4.
+ let trimmedListV2 = origListV2.replace("goog-malware-shavar,", "");
+ Services.prefs.setCharPref(
+ "browser.safebrowsing.provider.google.lists",
+ trimmedListV2
+ );
+ let trimmedListV4 = origListV4.replace("goog-malware-proto,", "");
+ Services.prefs.setCharPref(
+ "browser.safebrowsing.provider.google4.lists",
+ trimmedListV4
+ );
+
+ try {
+ // Bug 1274685 - Unowned Safe Browsing tables break list updates
+ //
+ // If SafeBrowsing.registerTableWithURLs() doesn't check if
+ // a provider is found before registering table, an exception
+ // will be thrown while accessing a null object.
+ //
+ SafeBrowsing.registerTables();
+ ok(true, "SafeBrowsing.registerTables() did not throw.");
+ } catch (e) {
+ ok(false, "Exception thrown due to " + e.toString());
+ }
+});
diff --git a/toolkit/components/url-classifier/tests/unit/test_canonicalization.js b/toolkit/components/url-classifier/tests/unit/test_canonicalization.js
new file mode 100644
index 0000000000..e26bb5d84a
--- /dev/null
+++ b/toolkit/components/url-classifier/tests/unit/test_canonicalization.js
@@ -0,0 +1,83 @@
+/* Any copyright is dedicated to the Public Domain.
+http://creativecommons.org/publicdomain/zero/1.0/ */
+
+"use strict";
+
+function canonicalize(url) {
+ let urlUtils = Cc["@mozilla.org/url-classifier/utils;1"].getService(
+ Ci.nsIUrlClassifierUtils
+ );
+
+ let uri = Services.io.newURI(url);
+ return uri.scheme + "://" + urlUtils.getKeyForURI(uri);
+}
+
+function run_test() {
+ // These testcases are from
+ // https://developers.google.com/safe-browsing/v4/urls-hashing
+ equal(canonicalize("http://host/%25%32%35"), "http://host/%25");
+ equal(canonicalize("http://host/%25%32%35%25%32%35"), "http://host/%25%25");
+ equal(canonicalize("http://host/%2525252525252525"), "http://host/%25");
+ equal(canonicalize("http://host/asdf%25%32%35asd"), "http://host/asdf%25asd");
+ equal(
+ canonicalize("http://host/%%%25%32%35asd%%"),
+ "http://host/%25%25%25asd%25%25"
+ );
+ equal(canonicalize("http://www.google.com/"), "http://www.google.com/");
+ equal(
+ canonicalize(
+ "http://%31%36%38%2e%31%38%38%2e%39%39%2e%32%36/%2E%73%65%63%75%72%65/%77%77%77%2E%65%62%61%79%2E%63%6F%6D/"
+ ),
+ "http://168.188.99.26/.secure/www.ebay.com/"
+ );
+ equal(
+ canonicalize(
+ "http://195.127.0.11/uploads/%20%20%20%20/.verify/.eBaysecure=updateuserdataxplimnbqmn-xplmvalidateinfoswqpcmlx=hgplmcx/"
+ ),
+ "http://195.127.0.11/uploads/%20%20%20%20/.verify/.eBaysecure=updateuserdataxplimnbqmn-xplmvalidateinfoswqpcmlx=hgplmcx/"
+ );
+ equal(canonicalize("http://3279880203/blah"), "http://195.127.0.11/blah");
+ equal(
+ canonicalize("http://www.google.com/blah/.."),
+ "http://www.google.com/"
+ );
+ equal(
+ canonicalize("http://www.evil.com/blah#frag"),
+ "http://www.evil.com/blah"
+ );
+ equal(canonicalize("http://www.GOOgle.com/"), "http://www.google.com/");
+ equal(canonicalize("http://www.google.com.../"), "http://www.google.com/");
+ equal(
+ canonicalize("http://www.google.com/foo\tbar\rbaz\n2"),
+ "http://www.google.com/foobarbaz2"
+ );
+ equal(canonicalize("http://www.google.com/q?"), "http://www.google.com/q?");
+ equal(
+ canonicalize("http://www.google.com/q?r?"),
+ "http://www.google.com/q?r?"
+ );
+ equal(
+ canonicalize("http://www.google.com/q?r?s"),
+ "http://www.google.com/q?r?s"
+ );
+ equal(canonicalize("http://evil.com/foo#bar#baz"), "http://evil.com/foo");
+ equal(canonicalize("http://evil.com/foo;"), "http://evil.com/foo;");
+ equal(canonicalize("http://evil.com/foo?bar;"), "http://evil.com/foo?bar;");
+ equal(
+ canonicalize("http://notrailingslash.com"),
+ "http://notrailingslash.com/"
+ );
+ equal(
+ canonicalize("http://www.gotaport.com:1234/"),
+ "http://www.gotaport.com/"
+ );
+ equal(
+ canonicalize("https://www.securesite.com/"),
+ "https://www.securesite.com/"
+ );
+ equal(canonicalize("http://host.com/ab%23cd"), "http://host.com/ab%23cd");
+ equal(
+ canonicalize("http://host.com//twoslashes?more//slashes"),
+ "http://host.com/twoslashes?more//slashes"
+ );
+}
diff --git a/toolkit/components/url-classifier/tests/unit/test_channelClassifierService.js b/toolkit/components/url-classifier/tests/unit/test_channelClassifierService.js
new file mode 100644
index 0000000000..9d5f5edb65
--- /dev/null
+++ b/toolkit/components/url-classifier/tests/unit/test_channelClassifierService.js
@@ -0,0 +1,225 @@
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+"use strict";
+
+/* Unit tests for the nsIUrlClassifierSkipListService implementation. */
+
+var httpserver = new HttpServer();
+
+const { NetUtil } = ChromeUtils.importESModule(
+ "resource://gre/modules/NetUtil.sys.mjs"
+);
+const { UrlClassifierTestUtils } = ChromeUtils.importESModule(
+ "resource://testing-common/UrlClassifierTestUtils.sys.mjs"
+);
+
+const FEATURE_STP_PREF = "privacy.trackingprotection.socialtracking.enabled";
+const TOP_LEVEL_DOMAIN = "http://www.example.com/";
+const TRACKER_DOMAIN = "http://social-tracking.example.org/";
+
+function setupChannel(uri, topUri = TOP_LEVEL_DOMAIN) {
+ httpserver.registerPathHandler("/", null);
+ httpserver.start(-1);
+
+ let channel = NetUtil.newChannel({
+ uri: uri + ":" + httpserver.identity.primaryPort,
+ loadingPrincipal: Services.scriptSecurityManager.createContentPrincipal(
+ NetUtil.newURI(topUri),
+ {}
+ ),
+ securityFlags: Ci.nsILoadInfo.SEC_ALLOW_CROSS_ORIGIN_SEC_CONTEXT_IS_NULL,
+ contentPolicyType: Ci.nsIContentPolicy.TYPE_OTHER,
+ });
+
+ channel
+ .QueryInterface(Ci.nsIHttpChannelInternal)
+ .setTopWindowURIIfUnknown(Services.io.newURI(topUri));
+
+ return channel;
+}
+
+function waitForBeforeBlockEvent(expected, callback) {
+ return new Promise(function (resolve) {
+ let observer = function observe(aSubject, aTopic, aData) {
+ switch (aTopic) {
+ case "urlclassifier-before-block-channel":
+ let channel = aSubject.QueryInterface(
+ Ci.nsIUrlClassifierBlockedChannel
+ );
+ Assert.equal(
+ channel.reason,
+ expected.reason,
+ "verify blocked reason"
+ );
+ Assert.equal(
+ channel.url,
+ expected.url,
+ "verify url of blocked channel"
+ );
+
+ if (callback) {
+ callback(channel);
+ }
+
+ service.removeListener(observer);
+ resolve(channel);
+ break;
+ }
+ };
+
+ let service = Cc[
+ "@mozilla.org/url-classifier/channel-classifier-service;1"
+ ].getService(Ci.nsIChannelClassifierService);
+ service.addListener(observer);
+ });
+}
+
+add_task(async function test_block_channel() {
+ Services.prefs.setBoolPref(FEATURE_STP_PREF, true);
+ await UrlClassifierTestUtils.addTestTrackers();
+
+ let channel = setupChannel(TRACKER_DOMAIN);
+
+ let blockPromise = waitForBeforeBlockEvent(
+ {
+ reason: Ci.nsIUrlClassifierBlockedChannel.SOCIAL_TRACKING_PROTECTION,
+ url: channel.URI.spec,
+ },
+ null
+ );
+
+ let openPromise = new Promise((resolve, reject) => {
+ channel.asyncOpen({
+ onStartRequest: (request, context) => {},
+ onDataAvailable: (request, context, stream, offset, count) => {},
+ onStopRequest: (request, status) => {
+ dump("status = " + status + "\n");
+ if (status == 200) {
+ Assert.ok(false, "Should not successfully open the channel");
+ } else {
+ Assert.equal(
+ status,
+ Cr.NS_ERROR_SOCIALTRACKING_URI,
+ "Should fail to open the channel"
+ );
+ }
+ resolve();
+ },
+ });
+ });
+
+ // wait for block event from url-classifier
+ await blockPromise;
+
+ // wait for onStopRequest callback from AsyncOpen
+ await openPromise;
+
+ // clean up
+ UrlClassifierTestUtils.cleanupTestTrackers();
+ Services.prefs.clearUserPref(FEATURE_STP_PREF);
+ httpserver.stop();
+});
+
+add_task(async function test_unblock_channel() {
+ Services.prefs.setBoolPref(FEATURE_STP_PREF, true);
+ //Services.prefs.setBoolPref("network.dns.native-is-localhost", true);
+
+ await UrlClassifierTestUtils.addTestTrackers();
+
+ let channel = setupChannel(TRACKER_DOMAIN);
+
+ let blockPromise = waitForBeforeBlockEvent(
+ {
+ reason: Ci.nsIUrlClassifierBlockedChannel.SOCIAL_TRACKING_PROTECTION,
+ url: channel.URI.spec,
+ },
+ ch => {
+ ch.replace();
+ }
+ );
+
+ let openPromise = new Promise((resolve, reject) => {
+ channel.asyncOpen({
+ onStartRequest: (request, context) => {},
+ onDataAvailable: (request, context, stream, offset, count) => {},
+ onStopRequest: (request, status) => {
+ if (status == Cr.NS_ERROR_SOCIALTRACKING_URI) {
+ Assert.ok(false, "Classifier should not cancel this channel");
+ } else {
+ // This request is supposed to fail, but we need to ensure it
+ // is not canceled by url-classifier
+ Assert.equal(
+ status,
+ Cr.NS_ERROR_UNKNOWN_HOST,
+ "Not cancel by classifier"
+ );
+ }
+ resolve();
+ },
+ });
+ });
+
+ // wait for block event from url-classifier
+ await blockPromise;
+
+ // wait for onStopRequest callback from AsyncOpen
+ await openPromise;
+
+ // clean up
+ UrlClassifierTestUtils.cleanupTestTrackers();
+ Services.prefs.clearUserPref(FEATURE_STP_PREF);
+ httpserver.stop();
+});
+
+add_task(async function test_allow_channel() {
+ Services.prefs.setBoolPref(FEATURE_STP_PREF, true);
+ //Services.prefs.setBoolPref("network.dns.native-is-localhost", true);
+
+ await UrlClassifierTestUtils.addTestTrackers();
+
+ let channel = setupChannel(TRACKER_DOMAIN);
+
+ let blockPromise = waitForBeforeBlockEvent(
+ {
+ reason: Ci.nsIUrlClassifierBlockedChannel.SOCIAL_TRACKING_PROTECTION,
+ url: channel.URI.spec,
+ },
+ ch => {
+ ch.allow();
+ }
+ );
+
+ let openPromise = new Promise((resolve, reject) => {
+ channel.asyncOpen({
+ onStartRequest: (request, context) => {},
+ onDataAvailable: (request, context, stream, offset, count) => {},
+ onStopRequest: (request, status) => {
+ if (status == Cr.NS_ERROR_SOCIALTRACKING_URI) {
+ Assert.ok(false, "Classifier should not cancel this channel");
+ } else {
+ // This request is supposed to fail, but we need to ensure it
+ // is not canceled by url-classifier
+ Assert.equal(
+ status,
+ Cr.NS_ERROR_UNKNOWN_HOST,
+ "Not cancel by classifier"
+ );
+ }
+ resolve();
+ },
+ });
+ });
+
+ // wait for block event from url-classifier
+ await blockPromise;
+
+ // wait for onStopRequest callback from AsyncOpen
+ await openPromise;
+
+ // clean up
+ UrlClassifierTestUtils.cleanupTestTrackers();
+ Services.prefs.clearUserPref(FEATURE_STP_PREF);
+ httpserver.stop();
+});
diff --git a/toolkit/components/url-classifier/tests/unit/test_dbservice.js b/toolkit/components/url-classifier/tests/unit/test_dbservice.js
new file mode 100644
index 0000000000..70ac02021a
--- /dev/null
+++ b/toolkit/components/url-classifier/tests/unit/test_dbservice.js
@@ -0,0 +1,329 @@
+var chunk1Urls = ["test.com/aba", "test.com/foo/bar", "foo.bar.com/a/b/c"];
+var chunk1 = chunk1Urls.join("\n");
+
+var chunk2Urls = [
+ "blah.com/a",
+ "baz.com/",
+ "255.255.0.1/",
+ "www.foo.com/test2?param=1",
+];
+var chunk2 = chunk2Urls.join("\n");
+
+var chunk3Urls = ["test.com/a", "foo.bar.com/a", "blah.com/a"];
+var chunk3 = chunk3Urls.join("\n");
+
+var chunk3SubUrls = ["1:test.com/a", "1:foo.bar.com/a", "2:blah.com/a"];
+var chunk3Sub = chunk3SubUrls.join("\n");
+
+var chunk4Urls = ["a.com/b", "b.com/c"];
+var chunk4 = chunk4Urls.join("\n");
+
+var chunk5Urls = ["d.com/e", "f.com/g"];
+var chunk5 = chunk5Urls.join("\n");
+
+var chunk6Urls = ["h.com/i", "j.com/k"];
+var chunk6 = chunk6Urls.join("\n");
+
+var chunk7Urls = ["l.com/m", "n.com/o"];
+var chunk7 = chunk7Urls.join("\n");
+
+// we are going to add chunks 1, 2, 4, 5, and 6 to phish-simple,
+// chunk 2 to malware-simple, and chunk 3 to unwanted-simple,
+// and chunk 7 to block-simple.
+// Then we'll remove the urls in chunk3 from phish-simple, then
+// expire chunk 1 and chunks 4-7 from phish-simple.
+var phishExpected = {};
+var phishUnexpected = {};
+var malwareExpected = {};
+var unwantedExpected = {};
+var blockedExpected = {};
+for (let i = 0; i < chunk2Urls.length; i++) {
+ phishExpected[chunk2Urls[i]] = true;
+ malwareExpected[chunk2Urls[i]] = true;
+}
+for (let i = 0; i < chunk3Urls.length; i++) {
+ unwantedExpected[chunk3Urls[i]] = true;
+ delete phishExpected[chunk3Urls[i]];
+ phishUnexpected[chunk3Urls[i]] = true;
+}
+for (let i = 0; i < chunk1Urls.length; i++) {
+ // chunk1 urls are expired
+ phishUnexpected[chunk1Urls[i]] = true;
+}
+for (let i = 0; i < chunk4Urls.length; i++) {
+ // chunk4 urls are expired
+ phishUnexpected[chunk4Urls[i]] = true;
+}
+for (let i = 0; i < chunk5Urls.length; i++) {
+ // chunk5 urls are expired
+ phishUnexpected[chunk5Urls[i]] = true;
+}
+for (let i = 0; i < chunk6Urls.length; i++) {
+ // chunk6 urls are expired
+ phishUnexpected[chunk6Urls[i]] = true;
+}
+for (let i = 0; i < chunk7Urls.length; i++) {
+ blockedExpected[chunk7Urls[i]] = true;
+ // chunk7 urls are expired
+ phishUnexpected[chunk7Urls[i]] = true;
+}
+
+// Check that the entries hit based on sub-parts
+phishExpected["baz.com/foo/bar"] = true;
+phishExpected["foo.bar.baz.com/foo"] = true;
+phishExpected["bar.baz.com/"] = true;
+
+var numExpecting;
+
+function testFailure(arg) {
+ do_throw(arg);
+}
+
+function checkNoHost() {
+ // Looking up a no-host uri such as a data: uri should throw an exception.
+ var exception;
+ try {
+ let principal = Services.scriptSecurityManager.createContentPrincipal(
+ Services.io.newURI("data:text/html,<b>test</b>"),
+ {}
+ );
+ dbservice.lookup(principal, allTables);
+
+ exception = false;
+ } catch (e) {
+ exception = true;
+ }
+ Assert.ok(exception);
+
+ do_test_finished();
+}
+
+function tablesCallbackWithoutSub(tables) {
+ var parts = tables.split("\n");
+ parts.sort();
+
+ // there's a leading \n here because splitting left an empty string
+ // after the trailing newline, which will sort first
+ Assert.equal(
+ parts.join("\n"),
+ "\ntest-block-simple;a:1\ntest-malware-simple;a:1\ntest-phish-simple;a:2\ntest-unwanted-simple;a:1"
+ );
+
+ checkNoHost();
+}
+
+function expireSubSuccess(result) {
+ dbservice.getTables(tablesCallbackWithoutSub);
+}
+
+function tablesCallbackWithSub(tables) {
+ var parts = tables.split("\n");
+
+ let expectedChunks = [
+ "test-block-simple;a:1",
+ "test-malware-simple;a:1",
+ "test-phish-simple;a:2:s:3",
+ "test-unwanted-simple;a:1",
+ ];
+ for (let chunk of expectedChunks) {
+ Assert.ok(parts.includes(chunk));
+ }
+
+ // verify that expiring a sub chunk removes its name from the list
+ var data = "n:1000\ni:test-phish-simple\nsd:3\n";
+
+ doSimpleUpdate(data, expireSubSuccess, testFailure);
+}
+
+function checkChunksWithSub() {
+ dbservice.getTables(tablesCallbackWithSub);
+}
+
+function checkDone() {
+ if (--numExpecting == 0) {
+ checkChunksWithSub();
+ }
+}
+
+function phishExists(result) {
+ dumpn("phishExists: " + result);
+ try {
+ Assert.ok(result.includes("test-phish-simple"));
+ } finally {
+ checkDone();
+ }
+}
+
+function phishDoesntExist(result) {
+ dumpn("phishDoesntExist: " + result);
+ try {
+ Assert.ok(!result.includes("test-phish-simple"));
+ } finally {
+ checkDone();
+ }
+}
+
+function malwareExists(result) {
+ dumpn("malwareExists: " + result);
+
+ try {
+ Assert.ok(result.includes("test-malware-simple"));
+ } finally {
+ checkDone();
+ }
+}
+
+function unwantedExists(result) {
+ dumpn("unwantedExists: " + result);
+
+ try {
+ Assert.ok(result.includes("test-unwanted-simple"));
+ } finally {
+ checkDone();
+ }
+}
+
+function blockedExists(result) {
+ dumpn("blockedExists: " + result);
+
+ try {
+ Assert.ok(result.includes("test-block-simple"));
+ } finally {
+ checkDone();
+ }
+}
+
+function checkState() {
+ numExpecting = 0;
+
+ for (let key in phishExpected) {
+ let principal = Services.scriptSecurityManager.createContentPrincipal(
+ Services.io.newURI("http://" + key),
+ {}
+ );
+ dbservice.lookup(principal, allTables, phishExists, true);
+ numExpecting++;
+ }
+
+ for (let key in phishUnexpected) {
+ let principal = Services.scriptSecurityManager.createContentPrincipal(
+ Services.io.newURI("http://" + key),
+ {}
+ );
+ dbservice.lookup(principal, allTables, phishDoesntExist, true);
+ numExpecting++;
+ }
+
+ for (let key in malwareExpected) {
+ let principal = Services.scriptSecurityManager.createContentPrincipal(
+ Services.io.newURI("http://" + key),
+ {}
+ );
+ dbservice.lookup(principal, allTables, malwareExists, true);
+ numExpecting++;
+ }
+
+ for (let key in unwantedExpected) {
+ let principal = Services.scriptSecurityManager.createContentPrincipal(
+ Services.io.newURI("http://" + key),
+ {}
+ );
+ dbservice.lookup(principal, allTables, unwantedExists, true);
+ numExpecting++;
+ }
+
+ for (let key in blockedExpected) {
+ let principal = Services.scriptSecurityManager.createContentPrincipal(
+ Services.io.newURI("http://" + key),
+ {}
+ );
+ dbservice.lookup(principal, allTables, blockedExists, true);
+ numExpecting++;
+ }
+}
+
+function testSubSuccess(result) {
+ Assert.equal(result, "1000");
+ checkState();
+}
+
+function do_subs() {
+ var data =
+ "n:1000\n" +
+ "i:test-phish-simple\n" +
+ "s:3:32:" +
+ chunk3Sub.length +
+ "\n" +
+ chunk3Sub +
+ "\n" +
+ "ad:1\n" +
+ "ad:4-6\n";
+
+ doSimpleUpdate(data, testSubSuccess, testFailure);
+}
+
+function testAddSuccess(arg) {
+ Assert.equal(arg, "1000");
+
+ do_subs();
+}
+
+function do_adds() {
+ // This test relies on the fact that only -regexp tables are ungzipped,
+ // and only -hash tables are assumed to be pre-md5'd. So we use
+ // a 'simple' table type to get simple hostname-per-line semantics.
+
+ var data =
+ "n:1000\n" +
+ "i:test-phish-simple\n" +
+ "a:1:32:" +
+ chunk1.length +
+ "\n" +
+ chunk1 +
+ "\n" +
+ "a:2:32:" +
+ chunk2.length +
+ "\n" +
+ chunk2 +
+ "\n" +
+ "a:4:32:" +
+ chunk4.length +
+ "\n" +
+ chunk4 +
+ "\n" +
+ "a:5:32:" +
+ chunk5.length +
+ "\n" +
+ chunk5 +
+ "\n" +
+ "a:6:32:" +
+ chunk6.length +
+ "\n" +
+ chunk6 +
+ "\n" +
+ "i:test-malware-simple\n" +
+ "a:1:32:" +
+ chunk2.length +
+ "\n" +
+ chunk2 +
+ "\n" +
+ "i:test-unwanted-simple\n" +
+ "a:1:32:" +
+ chunk3.length +
+ "\n" +
+ chunk3 +
+ "\n" +
+ "i:test-block-simple\n" +
+ "a:1:32:" +
+ chunk7.length +
+ "\n" +
+ chunk7 +
+ "\n";
+
+ doSimpleUpdate(data, testAddSuccess, testFailure);
+}
+
+function run_test() {
+ do_adds();
+ do_test_pending();
+}
diff --git a/toolkit/components/url-classifier/tests/unit/test_digest256.js b/toolkit/components/url-classifier/tests/unit/test_digest256.js
new file mode 100644
index 0000000000..f96f13f7d1
--- /dev/null
+++ b/toolkit/components/url-classifier/tests/unit/test_digest256.js
@@ -0,0 +1,143 @@
+// Global test server for serving safebrowsing updates.
+var gHttpServ = null;
+// Global nsIUrlClassifierDBService
+var gDbService = Cc["@mozilla.org/url-classifier/dbservice;1"].getService(
+ Ci.nsIUrlClassifierDBService
+);
+
+// A map of tables to arrays of update redirect urls.
+var gTables = {};
+
+// Registers a table for which to serve update chunks. Returns a promise that
+// resolves when that chunk has been downloaded.
+function registerTableUpdate(aTable, aFilename) {
+ return new Promise(resolve => {
+ // If we haven't been given an update for this table yet, add it to the map
+ if (!(aTable in gTables)) {
+ gTables[aTable] = [];
+ }
+
+ // The number of chunks associated with this table.
+ let numChunks = gTables[aTable].length + 1;
+ let redirectPath = "/" + aTable + "-" + numChunks;
+ let redirectUrl = "localhost:4444" + redirectPath;
+
+ // Store redirect url for that table so we can return it later when we
+ // process an update request.
+ gTables[aTable].push(redirectUrl);
+
+ gHttpServ.registerPathHandler(redirectPath, function (request, response) {
+ info("Mock safebrowsing server handling request for " + redirectPath);
+ let contents = readFileToString(aFilename);
+ response.setHeader(
+ "Content-Type",
+ "application/vnd.google.safebrowsing-update",
+ false
+ );
+ response.setStatusLine(request.httpVersion, 200, "OK");
+ response.bodyOutputStream.write(contents, contents.length);
+ resolve(contents);
+ });
+ });
+}
+
+// Construct a response with redirect urls.
+function processUpdateRequest() {
+ let response = "n:1000\n";
+ for (let table in gTables) {
+ response += "i:" + table + "\n";
+ for (let i = 0; i < gTables[table].length; ++i) {
+ response += "u:" + gTables[table][i] + "\n";
+ }
+ }
+ info("Returning update response: " + response);
+ return response;
+}
+
+// Set up our test server to handle update requests.
+function run_test() {
+ gHttpServ = new HttpServer();
+ gHttpServ.registerDirectory("/", do_get_cwd());
+
+ gHttpServ.registerPathHandler("/downloads", function (request, response) {
+ let blob = processUpdateRequest();
+ response.setHeader(
+ "Content-Type",
+ "application/vnd.google.safebrowsing-update",
+ false
+ );
+ response.setStatusLine(request.httpVersion, 200, "OK");
+ response.bodyOutputStream.write(blob, blob.length);
+ });
+
+ gHttpServ.start(4444);
+ run_next_test();
+}
+
+// Just throw if we ever get an update or download error.
+function handleError(aEvent) {
+ do_throw("We didn't download or update correctly: " + aEvent);
+}
+
+add_test(function test_update() {
+ let streamUpdater = Cc[
+ "@mozilla.org/url-classifier/streamupdater;1"
+ ].getService(Ci.nsIUrlClassifierStreamUpdater);
+
+ // Load up some update chunks for the safebrowsing server to serve.
+ registerTableUpdate("goog-downloadwhite-digest256", "data/digest1.chunk");
+ registerTableUpdate("goog-downloadwhite-digest256", "data/digest2.chunk");
+
+ // Download some updates, and don't continue until the downloads are done.
+ function updateSuccess(aEvent) {
+ // Timeout of n:1000 is constructed in processUpdateRequest above and
+ // passed back in the callback in nsIUrlClassifierStreamUpdater on success.
+ Assert.equal("1000", aEvent);
+ info("All data processed");
+ run_next_test();
+ }
+ streamUpdater.downloadUpdates(
+ "goog-downloadwhite-digest256",
+ "goog-downloadwhite-digest256;\n",
+ true,
+ "http://localhost:4444/downloads",
+ updateSuccess,
+ handleError,
+ handleError
+ );
+});
+
+add_test(function test_url_not_whitelisted() {
+ let uri = Services.io.newURI("http://example.com");
+ let principal = Services.scriptSecurityManager.createContentPrincipal(
+ uri,
+ {}
+ );
+ gDbService.lookup(
+ principal,
+ "goog-downloadwhite-digest256",
+ function handleEvent(aEvent) {
+ // This URI is not on any lists.
+ Assert.equal("", aEvent);
+ run_next_test();
+ }
+ );
+});
+
+add_test(function test_url_whitelisted() {
+ // Hash of "whitelisted.com/" (canonicalized URL) is:
+ // 93CA5F48E15E9861CD37C2D95DB43D23CC6E6DE5C3F8FA6E8BE66F97CC518907
+ let uri = Services.io.newURI("http://whitelisted.com");
+ let principal = Services.scriptSecurityManager.createContentPrincipal(
+ uri,
+ {}
+ );
+ gDbService.lookup(
+ principal,
+ "goog-downloadwhite-digest256",
+ function handleEvent(aEvent) {
+ Assert.equal("goog-downloadwhite-digest256", aEvent);
+ run_next_test();
+ }
+ );
+});
diff --git a/toolkit/components/url-classifier/tests/unit/test_exceptionListService.js b/toolkit/components/url-classifier/tests/unit/test_exceptionListService.js
new file mode 100644
index 0000000000..218aec4934
--- /dev/null
+++ b/toolkit/components/url-classifier/tests/unit/test_exceptionListService.js
@@ -0,0 +1,285 @@
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+"use strict";
+
+/* Unit tests for the nsIUrlClassifierExceptionListService implementation. */
+
+const { RemoteSettings } = ChromeUtils.importESModule(
+ "resource://services-settings/remote-settings.sys.mjs"
+);
+
+const COLLECTION_NAME = "url-classifier-skip-urls";
+const FEATURE_TRACKING_NAME = "tracking-annotation-test";
+const FEATURE_TRACKING_PREF_NAME = "urlclassifier.tracking-annotation-test";
+const FEATURE_SOCIAL_NAME = "socialtracking-annotation-test";
+const FEATURE_SOCIAL_PREF_NAME = "urlclassifier.socialtracking-annotation-test";
+const FEATURE_FINGERPRINTING_NAME = "fingerprinting-annotation-test";
+const FEATURE_FINGERPRINTING_PREF_NAME =
+ "urlclassifier.fingerprinting-annotation-test";
+
+do_get_profile();
+
+class UpdateEvent extends EventTarget {}
+function waitForEvent(element, eventName) {
+ return new Promise(function (resolve) {
+ element.addEventListener(eventName, e => resolve(e.detail), { once: true });
+ });
+}
+
+add_task(async function test_list_changes() {
+ let exceptionListService = Cc[
+ "@mozilla.org/url-classifier/exception-list-service;1"
+ ].getService(Ci.nsIUrlClassifierExceptionListService);
+
+ // Make sure we have a pref initially, since the exception list service
+ // requires it.
+ Services.prefs.setStringPref(FEATURE_TRACKING_PREF_NAME, "");
+
+ let updateEvent = new UpdateEvent();
+ let obs = data => {
+ let event = new CustomEvent("update", { detail: data });
+ updateEvent.dispatchEvent(event);
+ };
+
+ let records = [
+ {
+ id: "1",
+ last_modified: 1000000000000001,
+ feature: FEATURE_TRACKING_NAME,
+ pattern: "example.com",
+ },
+ ];
+
+ // Add some initial data.
+ let db = RemoteSettings(COLLECTION_NAME).db;
+ await db.importChanges({}, Date.now(), records);
+ let promise = waitForEvent(updateEvent, "update");
+
+ exceptionListService.registerAndRunExceptionListObserver(
+ FEATURE_TRACKING_NAME,
+ FEATURE_TRACKING_PREF_NAME,
+ obs
+ );
+
+ Assert.equal(await promise, "", "No items in the list");
+
+ // Second event is from the RemoteSettings record.
+ let list = await waitForEvent(updateEvent, "update");
+ Assert.equal(list, "example.com", "Has one item in the list");
+
+ records.push(
+ {
+ id: "2",
+ last_modified: 1000000000000002,
+ feature: FEATURE_TRACKING_NAME,
+ pattern: "MOZILLA.ORG",
+ },
+ {
+ id: "3",
+ last_modified: 1000000000000003,
+ feature: "some-other-feature",
+ pattern: "noinclude.com",
+ },
+ {
+ last_modified: 1000000000000004,
+ feature: FEATURE_TRACKING_NAME,
+ pattern: "*.example.org",
+ }
+ );
+
+ promise = waitForEvent(updateEvent, "update");
+
+ await RemoteSettings(COLLECTION_NAME).emit("sync", {
+ data: { current: records },
+ });
+
+ list = await promise;
+
+ Assert.equal(
+ list,
+ "example.com,mozilla.org,*.example.org",
+ "Has several items in the list"
+ );
+
+ promise = waitForEvent(updateEvent, "update");
+
+ Services.prefs.setStringPref(FEATURE_TRACKING_PREF_NAME, "test.com");
+
+ list = await promise;
+
+ Assert.equal(
+ list,
+ "test.com,example.com,mozilla.org,*.example.org",
+ "Has several items in the list"
+ );
+
+ promise = waitForEvent(updateEvent, "update");
+
+ Services.prefs.setStringPref(
+ FEATURE_TRACKING_PREF_NAME,
+ "test.com,whatever.com,*.abc.com"
+ );
+
+ list = await promise;
+
+ Assert.equal(
+ list,
+ "test.com,whatever.com,*.abc.com,example.com,mozilla.org,*.example.org",
+ "Has several items in the list"
+ );
+
+ exceptionListService.unregisterExceptionListObserver(
+ FEATURE_TRACKING_NAME,
+ obs
+ );
+ exceptionListService.clear();
+
+ await db.clear();
+});
+
+/**
+ * This test make sure when a feature registers itself to exceptionlist service,
+ * it can get the correct initial data.
+ */
+add_task(async function test_list_init_data() {
+ let exceptionListService = Cc[
+ "@mozilla.org/url-classifier/exception-list-service;1"
+ ].getService(Ci.nsIUrlClassifierExceptionListService);
+
+ // Make sure we have a pref initially, since the exception list service
+ // requires it.
+ Services.prefs.setStringPref(FEATURE_TRACKING_PREF_NAME, "");
+
+ let updateEvent = new UpdateEvent();
+
+ let records = [
+ {
+ id: "1",
+ last_modified: 1000000000000001,
+ feature: FEATURE_TRACKING_NAME,
+ pattern: "tracking.example.com",
+ },
+ {
+ id: "2",
+ last_modified: 1000000000000002,
+ feature: FEATURE_SOCIAL_NAME,
+ pattern: "social.example.com",
+ },
+ {
+ id: "3",
+ last_modified: 1000000000000003,
+ feature: FEATURE_TRACKING_NAME,
+ pattern: "*.tracking.org",
+ },
+ {
+ id: "4",
+ last_modified: 1000000000000004,
+ feature: FEATURE_SOCIAL_NAME,
+ pattern: "MOZILLA.ORG",
+ },
+ ];
+
+ // Add some initial data.
+ let db = RemoteSettings(COLLECTION_NAME).db;
+ await db.importChanges({}, Date.now(), records);
+
+ // The first registered feature make ExceptionListService get the initial data
+ // from remote setting.
+ let promise = waitForEvent(updateEvent, "update");
+
+ let obs = data => {
+ let event = new CustomEvent("update", { detail: data });
+ updateEvent.dispatchEvent(event);
+ };
+ exceptionListService.registerAndRunExceptionListObserver(
+ FEATURE_TRACKING_NAME,
+ FEATURE_TRACKING_PREF_NAME,
+ obs
+ );
+
+ let list = await promise;
+ Assert.equal(list, "", "Empty list initially");
+
+ Assert.equal(
+ await waitForEvent(updateEvent, "update"),
+ "tracking.example.com,*.tracking.org",
+ "Has several items in the list"
+ );
+
+ // Register another feature after ExceptionListService got the initial data.
+ promise = waitForEvent(updateEvent, "update");
+
+ exceptionListService.registerAndRunExceptionListObserver(
+ FEATURE_SOCIAL_NAME,
+ FEATURE_SOCIAL_PREF_NAME,
+ obs
+ );
+
+ list = await promise;
+
+ Assert.equal(
+ list,
+ "social.example.com,mozilla.org",
+ "Has several items in the list"
+ );
+
+ // Test registering a feature after ExceptionListService recieved the synced data.
+ records.push(
+ {
+ id: "5",
+ last_modified: 1000000000000002,
+ feature: FEATURE_FINGERPRINTING_NAME,
+ pattern: "fingerprinting.example.com",
+ },
+ {
+ id: "6",
+ last_modified: 1000000000000002,
+ feature: "other-fature",
+ pattern: "not-a-fingerprinting.example.com",
+ },
+ {
+ id: "7",
+ last_modified: 1000000000000002,
+ feature: FEATURE_FINGERPRINTING_NAME,
+ pattern: "*.fingerprinting.org",
+ }
+ );
+
+ await RemoteSettings(COLLECTION_NAME).emit("sync", {
+ data: { current: records },
+ });
+
+ promise = waitForEvent(updateEvent, "update");
+
+ exceptionListService.registerAndRunExceptionListObserver(
+ FEATURE_FINGERPRINTING_NAME,
+ FEATURE_FINGERPRINTING_PREF_NAME,
+ obs
+ );
+
+ list = await promise;
+
+ Assert.equal(
+ list,
+ "fingerprinting.example.com,*.fingerprinting.org",
+ "Has several items in the list"
+ );
+
+ exceptionListService.unregisterExceptionListObserver(
+ FEATURE_TRACKING_NAME,
+ obs
+ );
+ exceptionListService.unregisterExceptionListObserver(
+ FEATURE_SOCIAL_NAME,
+ obs
+ );
+ exceptionListService.unregisterExceptionListObserver(
+ FEATURE_FINGERPRINTING_NAME,
+ obs
+ );
+ exceptionListService.clear();
+
+ await db.clear();
+});
diff --git a/toolkit/components/url-classifier/tests/unit/test_features.js b/toolkit/components/url-classifier/tests/unit/test_features.js
new file mode 100644
index 0000000000..07b22b1b21
--- /dev/null
+++ b/toolkit/components/url-classifier/tests/unit/test_features.js
@@ -0,0 +1,81 @@
+/* Any copyright is dedicated to the Public Domain.
+ http://creativecommons.org/publicdomain/zero/1.0/
+*/
+
+"use strict";
+
+add_test(async _ => {
+ ok(
+ Services.cookies,
+ "Force the cookie service to be initialized to avoid issues later. " +
+ "See https://bugzilla.mozilla.org/show_bug.cgi?id=1621759#c3"
+ );
+
+ let classifier = Cc["@mozilla.org/url-classifier/dbservice;1"].getService(
+ Ci.nsIURIClassifier
+ );
+ ok(!!classifier, "We have the URI-Classifier");
+
+ var tests = [
+ { name: "a", expectedResult: false },
+ { name: "tracking-annotation", expectedResult: true },
+ { name: "tracking-protection", expectedResult: true },
+ ];
+
+ tests.forEach(test => {
+ let feature;
+ try {
+ feature = classifier.getFeatureByName(test.name);
+ } catch (e) {}
+
+ equal(
+ !!feature,
+ test.expectedResult,
+ "Exceptected result for: " + test.name
+ );
+ if (feature) {
+ equal(feature.name, test.name, "Feature name matches");
+ }
+ });
+
+ let uri = Services.io.newURI("https://example.com");
+
+ let feature = classifier.getFeatureByName("tracking-protection");
+
+ let results = await new Promise(resolve => {
+ classifier.asyncClassifyLocalWithFeatures(
+ uri,
+ [feature],
+ Ci.nsIUrlClassifierFeature.blocklist,
+ r => {
+ resolve(r);
+ }
+ );
+ });
+ equal(results.length, 0, "No tracker");
+
+ Services.prefs.setCharPref(
+ "urlclassifier.trackingTable.testEntries",
+ "example.com"
+ );
+
+ feature = classifier.getFeatureByName("tracking-protection");
+
+ results = await new Promise(resolve => {
+ classifier.asyncClassifyLocalWithFeatures(
+ uri,
+ [feature],
+ Ci.nsIUrlClassifierFeature.blocklist,
+ r => {
+ resolve(r);
+ }
+ );
+ });
+ equal(results.length, 1, "Tracker");
+ let result = results[0];
+ equal(result.feature.name, "tracking-protection", "Correct feature");
+ equal(result.list, "tracking-blocklist-pref", "Correct list");
+
+ Services.prefs.clearUserPref("browser.safebrowsing.password.enabled");
+ run_next_test();
+});
diff --git a/toolkit/components/url-classifier/tests/unit/test_hashcompleter.js b/toolkit/components/url-classifier/tests/unit/test_hashcompleter.js
new file mode 100644
index 0000000000..b8d6c7b128
--- /dev/null
+++ b/toolkit/components/url-classifier/tests/unit/test_hashcompleter.js
@@ -0,0 +1,438 @@
+/* Any copyright is dedicated to the Public Domain.
+ http://creativecommons.org/publicdomain/zero/1.0/ */
+
+// This test ensures that the nsIUrlClassifierHashCompleter works as expected
+// and simulates an HTTP server to provide completions.
+//
+// In order to test completions, each group of completions sent as one request
+// to the HTTP server is called a completion set. There is currently not
+// support for multiple requests being sent to the server at once, in this test.
+// This tests makes a request for each element of |completionSets|, waits for
+// a response and then moves to the next element.
+// Each element of |completionSets| is an array of completions, and each
+// completion is an object with the properties:
+// hash: complete hash for the completion. Automatically right-padded
+// to be COMPLETE_LENGTH.
+// expectCompletion: boolean indicating whether the server should respond
+// with a full hash.
+// forceServerError: boolean indicating whether the server should respond
+// with a 503.
+// table: name of the table that the hash corresponds to. Only needs to be set
+// if a completion is expected.
+// chunkId: positive integer corresponding to the chunk that the hash belongs
+// to. Only needs to be set if a completion is expected.
+// multipleCompletions: boolean indicating whether the server should respond
+// with more than one full hash. If this is set to true
+// then |expectCompletion| must also be set to true and
+// |hash| must have the same prefix as all |completions|.
+// completions: an array of completions (objects with a hash, table and
+// chunkId property as described above). This property is only
+// used when |multipleCompletions| is set to true.
+
+// Basic prefixes with 2/3 completions.
+var basicCompletionSet = [
+ {
+ hash: "abcdefgh",
+ expectCompletion: true,
+ table: "test",
+ chunkId: 1234,
+ },
+ {
+ hash: "1234",
+ expectCompletion: false,
+ },
+ {
+ hash: "\u0000\u0000\u000012312",
+ expectCompletion: true,
+ table: "test",
+ chunkId: 1234,
+ },
+];
+
+// 3 prefixes with 0 completions to test HashCompleter handling a 204 status.
+var falseCompletionSet = [
+ {
+ hash: "1234",
+ expectCompletion: false,
+ },
+ {
+ hash: "",
+ expectCompletion: false,
+ },
+ {
+ hash: "abc",
+ expectCompletion: false,
+ },
+];
+
+// The current implementation (as of Mar 2011) sometimes sends duplicate
+// entries to HashCompleter and even expects responses for duplicated entries.
+var dupedCompletionSet = [
+ {
+ hash: "1234",
+ expectCompletion: true,
+ table: "test",
+ chunkId: 1,
+ },
+ {
+ hash: "5678",
+ expectCompletion: false,
+ table: "test2",
+ chunkId: 2,
+ },
+ {
+ hash: "1234",
+ expectCompletion: true,
+ table: "test",
+ chunkId: 1,
+ },
+ {
+ hash: "5678",
+ expectCompletion: false,
+ table: "test2",
+ chunkId: 2,
+ },
+];
+
+// It is possible for a hash completion request to return with multiple
+// completions, the HashCompleter should return all of these.
+var multipleResponsesCompletionSet = [
+ {
+ hash: "1234",
+ expectCompletion: true,
+ multipleCompletions: true,
+ completions: [
+ {
+ hash: "123456",
+ table: "test1",
+ chunkId: 3,
+ },
+ {
+ hash: "123478",
+ table: "test2",
+ chunkId: 4,
+ },
+ ],
+ },
+];
+
+function buildCompletionRequest(aCompletionSet) {
+ let prefixes = [];
+ let prefixSet = new Set();
+ aCompletionSet.forEach(s => {
+ let prefix = s.hash.substring(0, 4);
+ if (prefixSet.has(prefix)) {
+ return;
+ }
+ prefixSet.add(prefix);
+ prefixes.push(prefix);
+ });
+ return 4 + ":" + 4 * prefixes.length + "\n" + prefixes.join("");
+}
+
+function parseCompletionRequest(aRequest) {
+ // Format: [partial_length]:[num_of_prefix * partial_length]\n[prefixes_data]
+
+ let tokens = /(\d):(\d+)/.exec(aRequest);
+ if (tokens.length < 3) {
+ dump("Request format error.");
+ return null;
+ }
+
+ let partialLength = parseInt(tokens[1]);
+
+ let payloadStart =
+ tokens[1].length + // partial length
+ 1 + // ':'
+ tokens[2].length + // payload length
+ 1; // '\n'
+
+ let prefixSet = [];
+ for (let i = payloadStart; i < aRequest.length; i += partialLength) {
+ let prefix = aRequest.substr(i, partialLength);
+ if (prefix.length !== partialLength) {
+ dump("Header info not correct: " + aRequest.substr(0, payloadStart));
+ return null;
+ }
+ prefixSet.push(prefix);
+ }
+ prefixSet.sort();
+
+ return prefixSet;
+}
+
+// Compare the requests in string format.
+function compareCompletionRequest(aRequest1, aRequest2) {
+ let prefixSet1 = parseCompletionRequest(aRequest1);
+ let prefixSet2 = parseCompletionRequest(aRequest2);
+
+ return equal(JSON.stringify(prefixSet1), JSON.stringify(prefixSet2));
+}
+
+// The fifth completion set is added at runtime by getRandomCompletionSet.
+// Each completion in the set only has one response and its purpose is to
+// provide an easy way to test the HashCompleter handling an arbitrarily large
+// completion set (determined by SIZE_OF_RANDOM_SET).
+const SIZE_OF_RANDOM_SET = 16;
+function getRandomCompletionSet(forceServerError) {
+ let completionSet = [];
+ let hashPrefixes = [];
+
+ let seed = Math.floor(Math.random() * Math.pow(2, 32));
+ dump("Using seed of " + seed + " for random completion set.\n");
+ let rand = new LFSRgenerator(seed);
+
+ for (let i = 0; i < SIZE_OF_RANDOM_SET; i++) {
+ let completion = {
+ expectCompletion: false,
+ forceServerError: false,
+ _finished: false,
+ };
+
+ // Generate a random 256 bit hash. First we get a random number and then
+ // convert it to a string.
+ let hash;
+ let prefix;
+ do {
+ hash = "";
+ let length = 1 + rand.nextNum(5);
+ for (let j = 0; j < length; j++) {
+ hash += String.fromCharCode(rand.nextNum(8));
+ }
+ prefix = hash.substring(0, 4);
+ } while (hashPrefixes.includes(prefix));
+
+ hashPrefixes.push(prefix);
+ completion.hash = hash;
+
+ if (!forceServerError) {
+ completion.expectCompletion = rand.nextNum(1) == 1;
+ } else {
+ completion.forceServerError = true;
+ }
+ if (completion.expectCompletion) {
+ // Generate a random alpha-numeric string of length start with "test" for the
+ // table name.
+ completion.table = "test" + rand.nextNum(31).toString(36);
+
+ completion.chunkId = rand.nextNum(16);
+ }
+ completionSet.push(completion);
+ }
+
+ return completionSet;
+}
+
+var completionSets = [
+ basicCompletionSet,
+ falseCompletionSet,
+ dupedCompletionSet,
+ multipleResponsesCompletionSet,
+];
+var currentCompletionSet = -1;
+var finishedCompletions = 0;
+
+const SERVER_PATH = "/hash-completer";
+var server;
+
+// Completion hashes are automatically right-padded with null chars to have a
+// length of COMPLETE_LENGTH.
+// Taken from nsUrlClassifierDBService.h
+const COMPLETE_LENGTH = 32;
+
+var completer = Cc["@mozilla.org/url-classifier/hashcompleter;1"].getService(
+ Ci.nsIUrlClassifierHashCompleter
+);
+
+var gethashUrl;
+
+// Expected highest completion set for which the server sends a response.
+var expectedMaxServerCompletionSet = 0;
+var maxServerCompletionSet = 0;
+
+function run_test() {
+ // This test case exercises the backoff functionality so we can't leave it disabled.
+ Services.prefs.setBoolPref(
+ "browser.safebrowsing.provider.test.disableBackoff",
+ false
+ );
+ // Generate a random completion set that return successful responses.
+ completionSets.push(getRandomCompletionSet(false));
+ // We backoff after receiving an error, so requests shouldn't reach the
+ // server after that.
+ expectedMaxServerCompletionSet = completionSets.length;
+ // Generate some completion sets that return 503s.
+ for (let j = 0; j < 10; ++j) {
+ completionSets.push(getRandomCompletionSet(true));
+ }
+
+ // Fix up the completions before running the test.
+ for (let completionSet of completionSets) {
+ for (let completion of completionSet) {
+ // Pad the right of each |hash| so that the length is COMPLETE_LENGTH.
+ if (completion.multipleCompletions) {
+ for (let responseCompletion of completion.completions) {
+ let numChars = COMPLETE_LENGTH - responseCompletion.hash.length;
+ responseCompletion.hash += new Array(numChars + 1).join("\u0000");
+ }
+ } else {
+ let numChars = COMPLETE_LENGTH - completion.hash.length;
+ completion.hash += new Array(numChars + 1).join("\u0000");
+ }
+ }
+ }
+ do_test_pending();
+
+ server = new HttpServer();
+ server.registerPathHandler(SERVER_PATH, hashCompleterServer);
+
+ server.start(-1);
+ const SERVER_PORT = server.identity.primaryPort;
+
+ gethashUrl = "http://localhost:" + SERVER_PORT + SERVER_PATH;
+
+ runNextCompletion();
+}
+
+function runNextCompletion() {
+ // The server relies on currentCompletionSet to send the correct response, so
+ // don't increment it until we start the new set of callbacks.
+ currentCompletionSet++;
+ if (currentCompletionSet >= completionSets.length) {
+ finish();
+ return;
+ }
+
+ dump(
+ "Now on completion set index " +
+ currentCompletionSet +
+ ", length " +
+ completionSets[currentCompletionSet].length +
+ "\n"
+ );
+ // Number of finished completions for this set.
+ finishedCompletions = 0;
+ for (let completion of completionSets[currentCompletionSet]) {
+ completer.complete(
+ completion.hash.substring(0, 4),
+ gethashUrl,
+ "test-phish-shavar", // Could be arbitrary v2 table name.
+ new callback(completion)
+ );
+ }
+}
+
+function hashCompleterServer(aRequest, aResponse) {
+ let stream = aRequest.bodyInputStream;
+ let wrapperStream = Cc["@mozilla.org/binaryinputstream;1"].createInstance(
+ Ci.nsIBinaryInputStream
+ );
+ wrapperStream.setInputStream(stream);
+
+ let len = stream.available();
+ let data = wrapperStream.readBytes(len);
+
+ // Check if we got the expected completion request.
+ let expectedRequest = buildCompletionRequest(
+ completionSets[currentCompletionSet]
+ );
+ compareCompletionRequest(data, expectedRequest);
+
+ // To avoid a response with duplicate hash completions, we keep track of all
+ // completed hash prefixes so far.
+ let completedHashes = [];
+ let responseText = "";
+
+ function responseForCompletion(x) {
+ return x.table + ":" + x.chunkId + ":" + x.hash.length + "\n" + x.hash;
+ }
+ // As per the spec, a server should response with a 204 if there are no
+ // full-length hashes that match the prefixes.
+ let httpStatus = 204;
+ for (let completion of completionSets[currentCompletionSet]) {
+ if (
+ completion.expectCompletion &&
+ !completedHashes.includes(completion.hash)
+ ) {
+ completedHashes.push(completion.hash);
+
+ if (completion.multipleCompletions) {
+ responseText += completion.completions
+ .map(responseForCompletion)
+ .join("");
+ } else {
+ responseText += responseForCompletion(completion);
+ }
+ }
+ if (completion.forceServerError) {
+ httpStatus = 503;
+ }
+ }
+
+ dump("Server sending response for " + currentCompletionSet + "\n");
+ maxServerCompletionSet = currentCompletionSet;
+ if (responseText && httpStatus != 503) {
+ aResponse.write(responseText);
+ } else {
+ aResponse.setStatusLine(null, httpStatus, null);
+ }
+}
+
+function callback(completion) {
+ this._completion = completion;
+}
+
+callback.prototype = {
+ completionV2: function completionV2(hash, table, chunkId, trusted) {
+ Assert.ok(this._completion.expectCompletion);
+ if (this._completion.multipleCompletions) {
+ for (let completion of this._completion.completions) {
+ if (completion.hash == hash) {
+ Assert.equal(JSON.stringify(hash), JSON.stringify(completion.hash));
+ Assert.equal(table, completion.table);
+ Assert.equal(chunkId, completion.chunkId);
+
+ completion._completed = true;
+
+ if (this._completion.completions.every(x => x._completed)) {
+ this._completed = true;
+ }
+
+ break;
+ }
+ }
+ } else {
+ // Hashes are not actually strings and can contain arbitrary data.
+ Assert.equal(JSON.stringify(hash), JSON.stringify(this._completion.hash));
+ Assert.equal(table, this._completion.table);
+ Assert.equal(chunkId, this._completion.chunkId);
+
+ this._completed = true;
+ }
+ },
+
+ completionFinished: function completionFinished(status) {
+ finishedCompletions++;
+ Assert.equal(!!this._completion.expectCompletion, !!this._completed);
+ this._completion._finished = true;
+
+ // currentCompletionSet can mutate before all of the callbacks are complete.
+ if (
+ currentCompletionSet < completionSets.length &&
+ finishedCompletions == completionSets[currentCompletionSet].length
+ ) {
+ runNextCompletion();
+ }
+ },
+};
+
+function finish() {
+ Services.prefs.clearUserPref(
+ "browser.safebrowsing.provider.test.disableBackoff"
+ );
+
+ Assert.equal(expectedMaxServerCompletionSet, maxServerCompletionSet);
+ server.stop(function () {
+ do_test_finished();
+ });
+}
diff --git a/toolkit/components/url-classifier/tests/unit/test_hashcompleter_v4.js b/toolkit/components/url-classifier/tests/unit/test_hashcompleter_v4.js
new file mode 100644
index 0000000000..5910acb0eb
--- /dev/null
+++ b/toolkit/components/url-classifier/tests/unit/test_hashcompleter_v4.js
@@ -0,0 +1,292 @@
+const { XPCOMUtils } = ChromeUtils.importESModule(
+ "resource://gre/modules/XPCOMUtils.sys.mjs"
+);
+
+// These tables have a different update URL (for v4).
+const TEST_TABLE_DATA_V4 = {
+ tableName: "test-phish-proto",
+ providerName: "google4",
+ updateUrl: "http://localhost:5555/safebrowsing/update?",
+ gethashUrl: "http://localhost:5555/safebrowsing/gethash-v4?",
+};
+
+const PREF_NEXTUPDATETIME_V4 =
+ "browser.safebrowsing.provider.google4.nextupdatetime";
+const GETHASH_PATH = "/safebrowsing/gethash-v4";
+
+// The protobuf binary represention of gethash response:
+// minimumWaitDuration : 12 secs 10 nanosecs
+// negativeCacheDuration : 120 secs 9 nanosecs
+//
+// { CompleteHash, ThreatType, CacheDuration { secs, nanos } };
+// { nsCString("01234567890123456789012345678901"), SOCIAL_ENGINEERING_PUBLIC, { 8, 500 } },
+// { nsCString("12345678901234567890123456789012"), SOCIAL_ENGINEERING_PUBLIC, { 7, 100} },
+// { nsCString("23456789012345678901234567890123"), SOCIAL_ENGINEERING_PUBLIC, { 1, 20 } },
+
+const GETHASH_RESPONSE_CONTENT =
+ "\x0A\x2D\x08\x02\x1A\x22\x0A\x20\x30\x31\x32\x33\x34\x35\x36\x37\x38\x39\x30\x31\x32\x33\x34\x35\x36\x37\x38\x39\x30\x31\x32\x33\x34\x35\x36\x37\x38\x39\x30\x31\x2A\x05\x08\x08\x10\xF4\x03\x0A\x2C\x08\x02\x1A\x22\x0A\x20\x31\x32\x33\x34\x35\x36\x37\x38\x39\x30\x31\x32\x33\x34\x35\x36\x37\x38\x39\x30\x31\x32\x33\x34\x35\x36\x37\x38\x39\x30\x31\x32\x2A\x04\x08\x07\x10\x64\x0A\x2C\x08\x02\x1A\x22\x0A\x20\x32\x33\x34\x35\x36\x37\x38\x39\x30\x31\x32\x33\x34\x35\x36\x37\x38\x39\x30\x31\x32\x33\x34\x35\x36\x37\x38\x39\x30\x31\x32\x33\x2A\x04\x08\x01\x10\x14\x12\x04\x08\x0C\x10\x0A\x1A\x04\x08\x78\x10\x09";
+
+// The protobuf binary represention of update response:
+//
+// [
+// {
+// 'threat_type': 2, // SOCIAL_ENGINEERING_PUBLIC
+// 'response_type': 2, // FULL_UPDATE
+// 'new_client_state': 'sta\x00te', // NEW_CLIENT_STATE
+// 'checksum': { "sha256": CHECKSUM }, // CHECKSUM
+// 'additions': { 'compression_type': RAW,
+// 'prefix_size': 4,
+// 'raw_hashes': "00000001000000020000000300000004"}
+// }
+// ]
+//
+const UPDATE_RESPONSE_CONTENT =
+ "\x0A\x4A\x08\x02\x20\x02\x2A\x18\x08\x01\x12\x14\x08\x04\x12\x10\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x02\x00\x00\x00\x03\x3A\x06\x73\x74\x61\x00\x74\x65\x42\x22\x0A\x20\x30\x67\xC7\x2C\x5E\x50\x1C\x31\xE3\xFE\xCA\x73\xF0\x47\xDC\x34\x1A\x95\x63\x99\xEC\x70\x5E\x0A\xEE\x9E\xFB\x17\xA1\x55\x35\x78\x12\x08\x08\x08\x10\x80\x94\xEB\xDC\x03";
+const UPDATE_PATH = "/safebrowsing/update";
+
+let gListManager = Cc["@mozilla.org/url-classifier/listmanager;1"].getService(
+ Ci.nsIUrlListManager
+);
+
+let gCompleter = Cc["@mozilla.org/url-classifier/hashcompleter;1"].getService(
+ Ci.nsIUrlClassifierHashCompleter
+);
+
+XPCOMUtils.defineLazyServiceGetter(
+ this,
+ "gUrlUtil",
+ "@mozilla.org/url-classifier/utils;1",
+ "nsIUrlClassifierUtils"
+);
+
+// Handles request for TEST_TABLE_DATA_V4.
+let gHttpServV4 = null;
+
+const NEW_CLIENT_STATE = "sta\0te";
+const CHECKSUM =
+ "\x30\x67\xc7\x2c\x5e\x50\x1c\x31\xe3\xfe\xca\x73\xf0\x47\xdc\x34\x1a\x95\x63\x99\xec\x70\x5e\x0a\xee\x9e\xfb\x17\xa1\x55\x35\x78";
+
+Services.prefs.setBoolPref("browser.safebrowsing.debug", true);
+
+// The "\xFF\xFF" is to generate a base64 string with "/".
+Services.prefs.setCharPref("browser.safebrowsing.id", "Firefox\xFF\xFF");
+
+// Register tables.
+gListManager.registerTable(
+ TEST_TABLE_DATA_V4.tableName,
+ TEST_TABLE_DATA_V4.providerName,
+ TEST_TABLE_DATA_V4.updateUrl,
+ TEST_TABLE_DATA_V4.gethashUrl
+);
+
+// This is unfortunately needed since v4 gethash request
+// requires the threat type (table name) as well as the
+// state it's associated with. We have to run the update once
+// to have the state written.
+add_test(function test_update_v4() {
+ gListManager.disableUpdate(TEST_TABLE_DATA_V4.tableName);
+ gListManager.enableUpdate(TEST_TABLE_DATA_V4.tableName);
+
+ // Force table update.
+ Services.prefs.setCharPref(PREF_NEXTUPDATETIME_V4, "1");
+ gListManager.maybeToggleUpdateChecking();
+});
+
+add_test(function test_getHashRequestV4() {
+ let request = gUrlUtil.makeFindFullHashRequestV4(
+ [TEST_TABLE_DATA_V4.tableName],
+ [btoa(NEW_CLIENT_STATE)],
+ [btoa("0123"), btoa("1234567"), btoa("1111")].sort()
+ );
+ registerHandlerGethashV4("&$req=" + request);
+ let completeFinishedCnt = 0;
+
+ gCompleter.complete(
+ "0123",
+ TEST_TABLE_DATA_V4.gethashUrl,
+ TEST_TABLE_DATA_V4.tableName,
+ {
+ completionV4(hash, table, duration, fullhashes) {
+ equal(hash, "0123");
+ equal(table, TEST_TABLE_DATA_V4.tableName);
+ equal(duration, 120);
+ equal(fullhashes.length, 1);
+
+ let match = fullhashes
+ .QueryInterface(Ci.nsIArray)
+ .queryElementAt(0, Ci.nsIFullHashMatch);
+
+ equal(match.fullHash, "01234567890123456789012345678901");
+ equal(match.cacheDuration, 8);
+ info("completion: " + match.fullHash + ", " + table);
+ },
+
+ completionFinished(status) {
+ equal(status, Cr.NS_OK);
+ completeFinishedCnt++;
+ if (3 === completeFinishedCnt) {
+ run_next_test();
+ }
+ },
+ }
+ );
+
+ gCompleter.complete(
+ "1234567",
+ TEST_TABLE_DATA_V4.gethashUrl,
+ TEST_TABLE_DATA_V4.tableName,
+ {
+ completionV4(hash, table, duration, fullhashes) {
+ equal(hash, "1234567");
+ equal(table, TEST_TABLE_DATA_V4.tableName);
+ equal(duration, 120);
+ equal(fullhashes.length, 1);
+
+ let match = fullhashes
+ .QueryInterface(Ci.nsIArray)
+ .queryElementAt(0, Ci.nsIFullHashMatch);
+
+ equal(match.fullHash, "12345678901234567890123456789012");
+ equal(match.cacheDuration, 7);
+ info("completion: " + match.fullHash + ", " + table);
+ },
+
+ completionFinished(status) {
+ equal(status, Cr.NS_OK);
+ completeFinishedCnt++;
+ if (3 === completeFinishedCnt) {
+ run_next_test();
+ }
+ },
+ }
+ );
+
+ gCompleter.complete(
+ "1111",
+ TEST_TABLE_DATA_V4.gethashUrl,
+ TEST_TABLE_DATA_V4.tableName,
+ {
+ completionV4(hash, table, duration, fullhashes) {
+ equal(hash, "1111");
+ equal(table, TEST_TABLE_DATA_V4.tableName);
+ equal(duration, 120);
+ equal(fullhashes.length, 0);
+ },
+
+ completionFinished(status) {
+ equal(status, Cr.NS_OK);
+ completeFinishedCnt++;
+ if (3 === completeFinishedCnt) {
+ run_next_test();
+ }
+ },
+ }
+ );
+});
+
+add_test(function test_minWaitDuration() {
+ let failedComplete = function () {
+ gCompleter.complete(
+ "0123",
+ TEST_TABLE_DATA_V4.gethashUrl,
+ TEST_TABLE_DATA_V4.tableName,
+ {
+ completionFinished(status) {
+ equal(status, Cr.NS_ERROR_ABORT);
+ },
+ }
+ );
+ };
+
+ let successComplete = function () {
+ gCompleter.complete(
+ "1234567",
+ TEST_TABLE_DATA_V4.gethashUrl,
+ TEST_TABLE_DATA_V4.tableName,
+ {
+ completionV4(hash, table, duration, fullhashes) {
+ equal(hash, "1234567");
+ equal(table, TEST_TABLE_DATA_V4.tableName);
+ equal(fullhashes.length, 1);
+
+ let match = fullhashes
+ .QueryInterface(Ci.nsIArray)
+ .queryElementAt(0, Ci.nsIFullHashMatch);
+
+ equal(match.fullHash, "12345678901234567890123456789012");
+ equal(match.cacheDuration, 7);
+ info("completion: " + match.fullHash + ", " + table);
+ },
+
+ completionFinished(status) {
+ equal(status, Cr.NS_OK);
+ run_next_test();
+ },
+ }
+ );
+ };
+
+ let request = gUrlUtil.makeFindFullHashRequestV4(
+ [TEST_TABLE_DATA_V4.tableName],
+ [btoa(NEW_CLIENT_STATE)],
+ [btoa("1234567")]
+ );
+ registerHandlerGethashV4("&$req=" + request);
+
+ // The last gethash response contained a min wait duration 12 secs 10 nano
+ // So subsequent requests can happen only after the min wait duration
+ do_timeout(1000, failedComplete);
+ do_timeout(2000, failedComplete);
+ do_timeout(4000, failedComplete);
+ do_timeout(13000, successComplete);
+});
+
+function registerHandlerGethashV4(aExpectedQuery) {
+ gHttpServV4.registerPathHandler(GETHASH_PATH, null);
+ // V4 gethash handler.
+ gHttpServV4.registerPathHandler(GETHASH_PATH, function (request, response) {
+ equal(request.queryString, aExpectedQuery);
+
+ response.setStatusLine(request.httpVersion, 200, "OK");
+ response.bodyOutputStream.write(
+ GETHASH_RESPONSE_CONTENT,
+ GETHASH_RESPONSE_CONTENT.length
+ );
+ });
+}
+
+function registerHandlerUpdateV4() {
+ // Update handler. Will respond a valid state to be verified in the
+ // gethash handler.
+ gHttpServV4.registerPathHandler(UPDATE_PATH, function (request, response) {
+ response.setHeader(
+ "Content-Type",
+ "application/vnd.google.safebrowsing-update",
+ false
+ );
+ response.setStatusLine(request.httpVersion, 200, "OK");
+ response.bodyOutputStream.write(
+ UPDATE_RESPONSE_CONTENT,
+ UPDATE_RESPONSE_CONTENT.length
+ );
+
+ waitUntilMetaDataSaved(NEW_CLIENT_STATE, CHECKSUM, () => {
+ run_next_test();
+ });
+ });
+}
+
+function run_test() {
+ throwOnUpdateErrors();
+
+ gHttpServV4 = new HttpServer();
+ gHttpServV4.registerDirectory("/", do_get_cwd());
+
+ registerHandlerUpdateV4();
+ gHttpServV4.start(5555);
+ run_next_test();
+}
+
+registerCleanupFunction(function () {
+ stopThrowingOnUpdateErrors();
+});
diff --git a/toolkit/components/url-classifier/tests/unit/test_listmanager.js b/toolkit/components/url-classifier/tests/unit/test_listmanager.js
new file mode 100644
index 0000000000..751320f161
--- /dev/null
+++ b/toolkit/components/url-classifier/tests/unit/test_listmanager.js
@@ -0,0 +1,355 @@
+// These tables share the same updateURL.
+const TEST_TABLE_DATA_LIST = [
+ // 0:
+ {
+ tableName: "test-listmanager0-digest256",
+ providerName: "google",
+ updateUrl: "http://localhost:4444/safebrowsing/update",
+ gethashUrl: "http://localhost:4444/safebrowsing/gethash0",
+ },
+
+ // 1:
+ {
+ tableName: "test-listmanager1-digest256",
+ providerName: "google",
+ updateUrl: "http://localhost:4444/safebrowsing/update",
+ gethashUrl: "http://localhost:4444/safebrowsing/gethash1",
+ },
+
+ // 2.
+ {
+ tableName: "test-listmanager2-digest256",
+ providerName: "google",
+ updateUrl: "http://localhost:4444/safebrowsing/update",
+ gethashUrl: "http://localhost:4444/safebrowsing/gethash2",
+ },
+];
+
+// These tables have a different update URL (for v4).
+const TEST_TABLE_DATA_V4 = {
+ tableName: "test-phish-proto",
+ providerName: "google4",
+ updateUrl: "http://localhost:5555/safebrowsing/update?",
+ gethashUrl: "http://localhost:5555/safebrowsing/gethash-v4",
+};
+const TEST_TABLE_DATA_V4_DISABLED = {
+ tableName: "test-unwanted-proto",
+ providerName: "google4",
+ updateUrl: "http://localhost:5555/safebrowsing/update?",
+ gethashUrl: "http://localhost:5555/safebrowsing/gethash-v4",
+};
+
+const PREF_NEXTUPDATETIME =
+ "browser.safebrowsing.provider.google.nextupdatetime";
+const PREF_NEXTUPDATETIME_V4 =
+ "browser.safebrowsing.provider.google4.nextupdatetime";
+
+let gListManager = Cc["@mozilla.org/url-classifier/listmanager;1"].getService(
+ Ci.nsIUrlListManager
+);
+
+let gUrlUtils = Cc["@mozilla.org/url-classifier/utils;1"].getService(
+ Ci.nsIUrlClassifierUtils
+);
+
+// Global test server for serving safebrowsing updates.
+let gHttpServ = null;
+let gUpdateResponse = "";
+let gExpectedUpdateRequest = "";
+let gExpectedQueryV4 = "";
+
+// Handles request for TEST_TABLE_DATA_V4.
+let gHttpServV4 = null;
+
+// These two variables are used to synchronize the last two racing updates
+// (in terms of "update URL") in test_update_all_tables().
+let gUpdatedCntForTableData = 0; // For TEST_TABLE_DATA_LIST.
+let gIsV4Updated = false; // For TEST_TABLE_DATA_V4.
+
+const NEW_CLIENT_STATE = "sta\0te";
+const CHECKSUM =
+ "\x30\x67\xc7\x2c\x5e\x50\x1c\x31\xe3\xfe\xca\x73\xf0\x47\xdc\x34\x1a\x95\x63\x99\xec\x70\x5e\x0a\xee\x9e\xfb\x17\xa1\x55\x35\x78";
+
+Services.prefs.setBoolPref("browser.safebrowsing.debug", true);
+
+// The "\xFF\xFF" is to generate a base64 string with "/".
+Services.prefs.setCharPref("browser.safebrowsing.id", "Firefox\xFF\xFF");
+
+// Register tables.
+TEST_TABLE_DATA_LIST.forEach(function (t) {
+ gListManager.registerTable(
+ t.tableName,
+ t.providerName,
+ t.updateUrl,
+ t.gethashUrl
+ );
+});
+
+gListManager.registerTable(
+ TEST_TABLE_DATA_V4.tableName,
+ TEST_TABLE_DATA_V4.providerName,
+ TEST_TABLE_DATA_V4.updateUrl,
+ TEST_TABLE_DATA_V4.gethashUrl
+);
+
+// To test Bug 1302044.
+gListManager.registerTable(
+ TEST_TABLE_DATA_V4_DISABLED.tableName,
+ TEST_TABLE_DATA_V4_DISABLED.providerName,
+ TEST_TABLE_DATA_V4_DISABLED.updateUrl,
+ TEST_TABLE_DATA_V4_DISABLED.gethashUrl
+);
+
+const SERVER_INVOLVED_TEST_CASE_LIST = [
+ // - Do table0 update.
+ // - Server would respond "a:5:32:32\n[DATA]".
+ function test_update_table0() {
+ disableAllUpdates();
+
+ gListManager.enableUpdate(TEST_TABLE_DATA_LIST[0].tableName);
+ gExpectedUpdateRequest = TEST_TABLE_DATA_LIST[0].tableName + ";\n";
+
+ gUpdateResponse = "n:1000\ni:" + TEST_TABLE_DATA_LIST[0].tableName + "\n";
+ gUpdateResponse += readFileToString("data/digest2.chunk");
+
+ forceTableUpdate();
+ },
+
+ // - Do table0 update again. Since chunk 5 was added to table0 in the last
+ // update, the expected request contains "a:5".
+ // - Server would respond "s;2-12\n[DATA]".
+ function test_update_table0_with_existing_chunks() {
+ disableAllUpdates();
+
+ gListManager.enableUpdate(TEST_TABLE_DATA_LIST[0].tableName);
+ gExpectedUpdateRequest = TEST_TABLE_DATA_LIST[0].tableName + ";a:5\n";
+
+ gUpdateResponse = "n:1000\ni:" + TEST_TABLE_DATA_LIST[0].tableName + "\n";
+ gUpdateResponse += readFileToString("data/digest1.chunk");
+
+ forceTableUpdate();
+ },
+
+ // - Do all-table update.
+ // - Server would respond no chunk control.
+ //
+ // Note that this test MUST be the last one in the array since we rely on
+ // the number of sever-involved test case to synchronize the racing last
+ // two udpates for different URL.
+ function test_update_all_tables() {
+ disableAllUpdates();
+
+ // Enable all tables including TEST_TABLE_DATA_V4!
+ TEST_TABLE_DATA_LIST.forEach(function (t) {
+ gListManager.enableUpdate(t.tableName);
+ });
+
+ // We register two v4 tables but only enable one of them
+ // to verify that the disabled tables are not updated.
+ // See Bug 1302044.
+ gListManager.enableUpdate(TEST_TABLE_DATA_V4.tableName);
+ gListManager.disableUpdate(TEST_TABLE_DATA_V4_DISABLED.tableName);
+
+ // Expected results for v2.
+ gExpectedUpdateRequest =
+ TEST_TABLE_DATA_LIST[0].tableName +
+ ";a:5:s:2-12\n" +
+ TEST_TABLE_DATA_LIST[1].tableName +
+ ";\n" +
+ TEST_TABLE_DATA_LIST[2].tableName +
+ ";\n";
+ gUpdateResponse = "n:1000\n";
+
+ // We test the request against the query string since v4 request
+ // would be appened to the query string. The request is generated
+ // by protobuf API (binary) then encoded to base64 format.
+ let requestV4 = gUrlUtils.makeUpdateRequestV4(
+ [TEST_TABLE_DATA_V4.tableName],
+ [""]
+ );
+ gExpectedQueryV4 = "&$req=" + requestV4;
+
+ forceTableUpdate();
+ },
+];
+
+SERVER_INVOLVED_TEST_CASE_LIST.forEach(t => add_test(t));
+
+add_test(function test_partialUpdateV4() {
+ disableAllUpdates();
+
+ gListManager.enableUpdate(TEST_TABLE_DATA_V4.tableName);
+
+ // Since the new client state has been responded and saved in
+ // test_update_all_tables, this update request should send
+ // a partial update to the server.
+ let requestV4 = gUrlUtils.makeUpdateRequestV4(
+ [TEST_TABLE_DATA_V4.tableName],
+ [btoa(NEW_CLIENT_STATE)]
+ );
+ gExpectedQueryV4 = "&$req=" + requestV4;
+
+ forceTableUpdate();
+});
+
+// Tests nsIUrlListManager.getGethashUrl.
+add_test(function test_getGethashUrl() {
+ TEST_TABLE_DATA_LIST.forEach(function (t) {
+ equal(gListManager.getGethashUrl(t.tableName), t.gethashUrl);
+ });
+ equal(
+ gListManager.getGethashUrl(TEST_TABLE_DATA_V4.tableName),
+ TEST_TABLE_DATA_V4.gethashUrl
+ );
+ run_next_test();
+});
+
+function run_test() {
+ // Setup primary testing server.
+ gHttpServ = new HttpServer();
+ gHttpServ.registerDirectory("/", do_get_cwd());
+
+ gHttpServ.registerPathHandler(
+ "/safebrowsing/update",
+ function (request, response) {
+ let body = NetUtil.readInputStreamToString(
+ request.bodyInputStream,
+ request.bodyInputStream.available()
+ );
+
+ // Verify if the request is as expected.
+ equal(body, gExpectedUpdateRequest);
+
+ // Respond the update which is controlled by the test case.
+ response.setHeader(
+ "Content-Type",
+ "application/vnd.google.safebrowsing-update",
+ false
+ );
+ response.setStatusLine(request.httpVersion, 200, "OK");
+ response.bodyOutputStream.write(gUpdateResponse, gUpdateResponse.length);
+
+ gUpdatedCntForTableData++;
+
+ if (gUpdatedCntForTableData !== SERVER_INVOLVED_TEST_CASE_LIST.length) {
+ // This is not the last test case so run the next once upon the
+ // the update success.
+ waitForUpdateSuccess(run_next_test);
+ return;
+ }
+
+ if (gIsV4Updated) {
+ run_next_test(); // All tests are done. Just finish.
+ return;
+ }
+
+ info("Waiting for TEST_TABLE_DATA_V4 to be tested ...");
+ }
+ );
+
+ gHttpServ.start(4444);
+
+ // Setup v4 testing server for the different update URL.
+ gHttpServV4 = new HttpServer();
+ gHttpServV4.registerDirectory("/", do_get_cwd());
+
+ gHttpServV4.registerPathHandler(
+ "/safebrowsing/update",
+ function (request, response) {
+ // V4 update request body should be empty.
+ equal(request.bodyInputStream.available(), 0);
+
+ // Not on the spec. Found in Chromium source code...
+ equal(request.getHeader("X-HTTP-Method-Override"), "POST");
+
+ // V4 update request uses GET.
+ equal(request.method, "GET");
+
+ // V4 append the base64 encoded request to the query string.
+ equal(request.queryString, gExpectedQueryV4);
+ equal(request.queryString.indexOf("+"), -1);
+ equal(request.queryString.indexOf("/"), -1);
+
+ // Respond a V2 compatible content for now. In the future we can
+ // send a meaningful response to test Bug 1284178 to see if the
+ // update is successfully stored to database.
+ response.setHeader(
+ "Content-Type",
+ "application/vnd.google.safebrowsing-update",
+ false
+ );
+ response.setStatusLine(request.httpVersion, 200, "OK");
+
+ // The protobuf binary represention of response:
+ //
+ // [
+ // {
+ // 'threat_type': 2, // SOCIAL_ENGINEERING_PUBLIC
+ // 'response_type': 2, // FULL_UPDATE
+ // 'new_client_state': 'sta\x00te', // NEW_CLIENT_STATE
+ // 'checksum': { "sha256": CHECKSUM }, // CHECKSUM
+ // 'additions': { 'compression_type': RAW,
+ // 'prefix_size': 4,
+ // 'raw_hashes': "00000001000000020000000300000004"}
+ // }
+ // ]
+ //
+ let content =
+ "\x0A\x4A\x08\x02\x20\x02\x2A\x18\x08\x01\x12\x14\x08\x04\x12\x10\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x02\x00\x00\x00\x03\x3A\x06\x73\x74\x61\x00\x74\x65\x42\x22\x0A\x20\x30\x67\xC7\x2C\x5E\x50\x1C\x31\xE3\xFE\xCA\x73\xF0\x47\xDC\x34\x1A\x95\x63\x99\xEC\x70\x5E\x0A\xEE\x9E\xFB\x17\xA1\x55\x35\x78\x12\x08\x08\x08\x10\x80\x94\xEB\xDC\x03";
+
+ response.bodyOutputStream.write(content, content.length);
+
+ if (gIsV4Updated) {
+ // This falls to the case where test_partialUpdateV4 is running.
+ // We are supposed to have verified the update request contains
+ // the state we set in the previous request.
+ waitForUpdateSuccess(run_next_test);
+ return;
+ }
+
+ waitUntilMetaDataSaved(NEW_CLIENT_STATE, CHECKSUM, () => {
+ gIsV4Updated = true;
+
+ if (gUpdatedCntForTableData === SERVER_INVOLVED_TEST_CASE_LIST.length) {
+ // All tests are done!
+ run_next_test();
+ return;
+ }
+
+ info("Wait for all sever-involved tests to be done ...");
+ });
+ }
+ );
+
+ gHttpServV4.start(5555);
+
+ registerCleanupFunction(function () {
+ return (async function () {
+ await Promise.all([gHttpServ.stop(), gHttpServV4.stop()]);
+ })();
+ });
+
+ run_next_test();
+}
+
+// A trick to force updating tables. However, before calling this, we have to
+// call disableAllUpdates() first to clean up the updateCheckers in listmanager.
+function forceTableUpdate() {
+ throwOnUpdateErrors();
+ Services.prefs.setCharPref(PREF_NEXTUPDATETIME, "1");
+ Services.prefs.setCharPref(PREF_NEXTUPDATETIME_V4, "1");
+ gListManager.maybeToggleUpdateChecking();
+}
+
+function disableAllUpdates() {
+ stopThrowingOnUpdateErrors();
+ TEST_TABLE_DATA_LIST.forEach(t => gListManager.disableUpdate(t.tableName));
+ gListManager.disableUpdate(TEST_TABLE_DATA_V4.tableName);
+}
+
+function waitForUpdateSuccess(callback) {
+ Services.obs.addObserver(function listener() {
+ Services.obs.removeObserver(listener, "safebrowsing-update-finished");
+ callback();
+ }, "safebrowsing-update-finished");
+}
diff --git a/toolkit/components/url-classifier/tests/unit/test_malwaretable_pref.js b/toolkit/components/url-classifier/tests/unit/test_malwaretable_pref.js
new file mode 100644
index 0000000000..c7118f6e49
--- /dev/null
+++ b/toolkit/components/url-classifier/tests/unit/test_malwaretable_pref.js
@@ -0,0 +1,4 @@
+// Ensure that the default value of malwareTable is always in sorted order
+let originalValue = Services.prefs.getCharPref("urlclassifier.malwareTable");
+let sortedValue = originalValue.split(",").sort().join(",");
+Assert.equal(originalValue, sortedValue);
diff --git a/toolkit/components/url-classifier/tests/unit/test_partial.js b/toolkit/components/url-classifier/tests/unit/test_partial.js
new file mode 100644
index 0000000000..1220665063
--- /dev/null
+++ b/toolkit/components/url-classifier/tests/unit/test_partial.js
@@ -0,0 +1,607 @@
+/**
+ * DummyCompleter() lets tests easily specify the results of a partial
+ * hash completion request.
+ */
+function DummyCompleter() {
+ this.fragments = {};
+ this.queries = [];
+ this.tableName = "test-phish-simple";
+}
+
+DummyCompleter.prototype = {
+ QueryInterface: ChromeUtils.generateQI(["nsIUrlClassifierHashCompleter"]),
+
+ complete(partialHash, gethashUrl, tableName, cb) {
+ this.queries.push(partialHash);
+ var fragments = this.fragments;
+ var self = this;
+ var doCallback = function () {
+ if (self.alwaysFail) {
+ cb.completionFinished(Cr.NS_ERROR_FAILURE);
+ return;
+ }
+ if (fragments[partialHash]) {
+ for (var i = 0; i < fragments[partialHash].length; i++) {
+ var chunkId = fragments[partialHash][i][0];
+ var hash = fragments[partialHash][i][1];
+ cb.completionV2(hash, self.tableName, chunkId);
+ }
+ }
+ cb.completionFinished(0);
+ };
+ executeSoon(doCallback);
+ },
+
+ getHash(fragment) {
+ var data = new TextEncoder().encode(fragment);
+ var ch = Cc["@mozilla.org/security/hash;1"].createInstance(
+ Ci.nsICryptoHash
+ );
+ ch.init(ch.SHA256);
+ ch.update(data, data.length);
+ var hash = ch.finish(false);
+ return hash.slice(0, 32);
+ },
+
+ addFragment(chunkId, fragment) {
+ this.addHash(chunkId, this.getHash(fragment));
+ },
+
+ // This method allows the caller to generate complete hashes that match the
+ // prefix of a real fragment, but have different complete hashes.
+ addConflict(chunkId, fragment) {
+ var realHash = this.getHash(fragment);
+ var invalidHash = this.getHash("blah blah blah blah blah");
+ this.addHash(chunkId, realHash.slice(0, 4) + invalidHash.slice(4, 32));
+ },
+
+ addHash(chunkId, hash) {
+ var partial = hash.slice(0, 4);
+ if (this.fragments[partial]) {
+ this.fragments[partial].push([chunkId, hash]);
+ } else {
+ this.fragments[partial] = [[chunkId, hash]];
+ }
+ },
+
+ compareQueries(fragments) {
+ var expectedQueries = [];
+ for (let i = 0; i < fragments.length; i++) {
+ expectedQueries.push(this.getHash(fragments[i]).slice(0, 4));
+ }
+ Assert.equal(this.queries.length, expectedQueries.length);
+ expectedQueries.sort();
+ this.queries.sort();
+ for (let i = 0; i < this.queries.length; i++) {
+ Assert.equal(this.queries[i], expectedQueries[i]);
+ }
+ },
+};
+
+function setupCompleter(table, hits, conflicts) {
+ var completer = new DummyCompleter();
+ completer.tableName = table;
+ for (let i = 0; i < hits.length; i++) {
+ let chunkId = hits[i][0];
+ let fragments = hits[i][1];
+ for (let j = 0; j < fragments.length; j++) {
+ completer.addFragment(chunkId, fragments[j]);
+ }
+ }
+ for (let i = 0; i < conflicts.length; i++) {
+ let chunkId = conflicts[i][0];
+ let fragments = conflicts[i][1];
+ for (let j = 0; j < fragments.length; j++) {
+ completer.addConflict(chunkId, fragments[j]);
+ }
+ }
+
+ dbservice.setHashCompleter(table, completer);
+
+ return completer;
+}
+
+function installCompleter(table, fragments, conflictFragments) {
+ return setupCompleter(table, fragments, conflictFragments);
+}
+
+function installFailingCompleter(table) {
+ var completer = setupCompleter(table, [], []);
+ completer.alwaysFail = true;
+ return completer;
+}
+
+// Helper assertion for checking dummy completer queries
+gAssertions.completerQueried = function (data, cb) {
+ var completer = data[0];
+ completer.compareQueries(data[1]);
+ cb();
+};
+
+function doTest(updates, assertions) {
+ doUpdateTest(updates, assertions, runNextTest, updateError);
+}
+
+// Test an add of two partial urls to a fresh database
+function testPartialAdds() {
+ var addUrls = ["foo.com/a", "foo.com/b", "bar.com/c"];
+ var update = buildPhishingUpdate([{ chunkNum: 1, urls: addUrls }], 4);
+
+ var completer = installCompleter("test-phish-simple", [[1, addUrls]], []);
+
+ var assertions = {
+ tableData: "test-phish-simple;a:1",
+ urlsExist: addUrls,
+ completerQueried: [completer, addUrls],
+ };
+
+ doTest([update], assertions);
+}
+
+function testPartialAddsWithConflicts() {
+ var addUrls = ["foo.com/a", "foo.com/b", "bar.com/c"];
+ var update = buildPhishingUpdate([{ chunkNum: 1, urls: addUrls }], 4);
+
+ // Each result will have both a real match and a conflict
+ var completer = installCompleter(
+ "test-phish-simple",
+ [[1, addUrls]],
+ [[1, addUrls]]
+ );
+
+ var assertions = {
+ tableData: "test-phish-simple;a:1",
+ urlsExist: addUrls,
+ completerQueried: [completer, addUrls],
+ };
+
+ doTest([update], assertions);
+}
+
+// Test whether the fragmenting code does not cause duplicated completions
+function testFragments() {
+ var addUrls = ["foo.com/a/b/c", "foo.net/", "foo.com/c/"];
+ var update = buildPhishingUpdate([{ chunkNum: 1, urls: addUrls }], 4);
+
+ var completer = installCompleter("test-phish-simple", [[1, addUrls]], []);
+
+ var assertions = {
+ tableData: "test-phish-simple;a:1",
+ urlsExist: addUrls,
+ completerQueried: [completer, addUrls],
+ };
+
+ doTest([update], assertions);
+}
+
+// Test http://code.google.com/p/google-safe-browsing/wiki/Protocolv2Spec
+// section 6.2 example 1
+function testSpecFragments() {
+ var probeUrls = ["a.b.c/1/2.html?param=1"];
+
+ var addUrls = [
+ "a.b.c/1/2.html",
+ "a.b.c/",
+ "a.b.c/1/",
+ "b.c/1/2.html?param=1",
+ "b.c/1/2.html",
+ "b.c/",
+ "b.c/1/",
+ "a.b.c/1/2.html?param=1",
+ ];
+
+ var update = buildPhishingUpdate([{ chunkNum: 1, urls: addUrls }], 4);
+
+ var completer = installCompleter("test-phish-simple", [[1, addUrls]], []);
+
+ var assertions = {
+ tableData: "test-phish-simple;a:1",
+ urlsExist: probeUrls,
+ completerQueried: [completer, addUrls],
+ };
+
+ doTest([update], assertions);
+}
+
+// Test http://code.google.com/p/google-safe-browsing/wiki/Protocolv2Spec
+// section 6.2 example 2
+function testMoreSpecFragments() {
+ var probeUrls = ["a.b.c.d.e.f.g/1.html"];
+
+ var addUrls = [
+ "a.b.c.d.e.f.g/1.html",
+ "a.b.c.d.e.f.g/",
+ "c.d.e.f.g/1.html",
+ "c.d.e.f.g/",
+ "d.e.f.g/1.html",
+ "d.e.f.g/",
+ "e.f.g/1.html",
+ "e.f.g/",
+ "f.g/1.html",
+ "f.g/",
+ ];
+
+ var update = buildPhishingUpdate([{ chunkNum: 1, urls: addUrls }], 4);
+
+ var completer = installCompleter("test-phish-simple", [[1, addUrls]], []);
+
+ var assertions = {
+ tableData: "test-phish-simple;a:1",
+ urlsExist: probeUrls,
+ completerQueried: [completer, addUrls],
+ };
+
+ doTest([update], assertions);
+}
+
+function testFalsePositives() {
+ var addUrls = ["foo.com/a", "foo.com/b", "bar.com/c"];
+ var update = buildPhishingUpdate([{ chunkNum: 1, urls: addUrls }], 4);
+
+ // Each result will have no matching complete hashes and a non-matching
+ // conflict
+ var completer = installCompleter("test-phish-simple", [], [[1, addUrls]]);
+
+ var assertions = {
+ tableData: "test-phish-simple;a:1",
+ urlsDontExist: addUrls,
+ completerQueried: [completer, addUrls],
+ };
+
+ doTest([update], assertions);
+}
+
+function testEmptyCompleter() {
+ var addUrls = ["foo.com/a", "foo.com/b", "bar.com/c"];
+ var update = buildPhishingUpdate([{ chunkNum: 1, urls: addUrls }], 4);
+
+ // Completer will never return full hashes
+ var completer = installCompleter("test-phish-simple", [], []);
+
+ var assertions = {
+ tableData: "test-phish-simple;a:1",
+ urlsDontExist: addUrls,
+ completerQueried: [completer, addUrls],
+ };
+
+ doTest([update], assertions);
+}
+
+function testCompleterFailure() {
+ var addUrls = ["foo.com/a", "foo.com/b", "bar.com/c"];
+ var update = buildPhishingUpdate([{ chunkNum: 1, urls: addUrls }], 4);
+
+ // Completer will never return full hashes
+ var completer = installFailingCompleter("test-phish-simple");
+
+ var assertions = {
+ tableData: "test-phish-simple;a:1",
+ urlsDontExist: addUrls,
+ completerQueried: [completer, addUrls],
+ };
+
+ doTest([update], assertions);
+}
+
+function testMixedSizesSameDomain() {
+ var add1Urls = ["foo.com/a"];
+ var add2Urls = ["foo.com/b"];
+
+ var update1 = buildPhishingUpdate([{ chunkNum: 1, urls: add1Urls }], 4);
+ var update2 = buildPhishingUpdate([{ chunkNum: 2, urls: add2Urls }], 32);
+
+ // We should only need to complete the partial hashes
+ var completer = installCompleter("test-phish-simple", [[1, add1Urls]], []);
+
+ var assertions = {
+ tableData: "test-phish-simple;a:1-2",
+ // both urls should match...
+ urlsExist: add1Urls.concat(add2Urls),
+ // ... but the completer should only be queried for the partial entry
+ completerQueried: [completer, add1Urls],
+ };
+
+ doTest([update1, update2], assertions);
+}
+
+function testMixedSizesDifferentDomains() {
+ var add1Urls = ["foo.com/a"];
+ var add2Urls = ["bar.com/b"];
+
+ var update1 = buildPhishingUpdate([{ chunkNum: 1, urls: add1Urls }], 4);
+ var update2 = buildPhishingUpdate([{ chunkNum: 2, urls: add2Urls }], 32);
+
+ // We should only need to complete the partial hashes
+ var completer = installCompleter("test-phish-simple", [[1, add1Urls]], []);
+
+ var assertions = {
+ tableData: "test-phish-simple;a:1-2",
+ // both urls should match...
+ urlsExist: add1Urls.concat(add2Urls),
+ // ... but the completer should only be queried for the partial entry
+ completerQueried: [completer, add1Urls],
+ };
+
+ doTest([update1, update2], assertions);
+}
+
+function testInvalidHashSize() {
+ var addUrls = ["foo.com/a", "foo.com/b", "bar.com/c"];
+ var update = buildPhishingUpdate([{ chunkNum: 1, urls: addUrls }], 12); // only 4 and 32 are legal hash sizes
+
+ var addUrls2 = ["zaz.com/a", "xyz.com/b"];
+ var update2 = buildPhishingUpdate([{ chunkNum: 2, urls: addUrls2 }], 4);
+
+ installCompleter("test-phish-simple", [[1, addUrls]], []);
+
+ var assertions = {
+ tableData: "test-phish-simple;a:2",
+ urlsDontExist: addUrls,
+ };
+
+ // A successful update will trigger an error
+ doUpdateTest([update2, update], assertions, updateError, runNextTest);
+}
+
+function testWrongTable() {
+ var addUrls = ["foo.com/a"];
+ var update = buildPhishingUpdate([{ chunkNum: 1, urls: addUrls }], 4);
+ var completer = installCompleter(
+ "test-malware-simple", // wrong table
+ [[1, addUrls]],
+ []
+ );
+
+ // The above installCompleter installs the completer for test-malware-simple,
+ // we want it to be used for test-phish-simple too.
+ dbservice.setHashCompleter("test-phish-simple", completer);
+
+ var assertions = {
+ tableData: "test-phish-simple;a:1",
+ // The urls were added as phishing urls, but the completer is claiming
+ // that they are malware urls, and we trust the completer in this case.
+ // The result will be discarded, so we can only check for non-existence.
+ urlsDontExist: addUrls,
+ // Make sure the completer was actually queried.
+ completerQueried: [completer, addUrls],
+ };
+
+ doUpdateTest(
+ [update],
+ assertions,
+ function () {
+ // Give the dbservice a chance to (not) cache the result.
+ do_timeout(3000, function () {
+ // The miss earlier will have caused a miss to be cached.
+ // Resetting the completer does not count as an update,
+ // so we will not be probed again.
+ var newCompleter = installCompleter(
+ "test-malware-simple",
+ [[1, addUrls]],
+ []
+ );
+ dbservice.setHashCompleter("test-phish-simple", newCompleter);
+
+ var assertions1 = {
+ urlsDontExist: addUrls,
+ };
+ checkAssertions(assertions1, runNextTest);
+ });
+ },
+ updateError
+ );
+}
+
+function setupCachedResults(addUrls, part2) {
+ var update = buildPhishingUpdate([{ chunkNum: 1, urls: addUrls }], 4);
+
+ var completer = installCompleter("test-phish-simple", [[1, addUrls]], []);
+
+ var assertions = {
+ tableData: "test-phish-simple;a:1",
+ // Request the add url. This should cause the completion to be cached.
+ urlsExist: addUrls,
+ // Make sure the completer was actually queried.
+ completerQueried: [completer, addUrls],
+ };
+
+ doUpdateTest(
+ [update],
+ assertions,
+ function () {
+ // Give the dbservice a chance to cache the result.
+ do_timeout(3000, part2);
+ },
+ updateError
+ );
+}
+
+function testCachedResults() {
+ setupCachedResults(["foo.com/a"], function (add) {
+ // This is called after setupCachedResults(). Verify that
+ // checking the url again does not cause a completer request.
+
+ // install a new completer, this one should never be queried.
+ var newCompleter = installCompleter("test-phish-simple", [[1, []]], []);
+
+ var assertions = {
+ urlsExist: ["foo.com/a"],
+ completerQueried: [newCompleter, []],
+ };
+ checkAssertions(assertions, runNextTest);
+ });
+}
+
+function testCachedResultsWithSub() {
+ setupCachedResults(["foo.com/a"], function () {
+ // install a new completer, this one should never be queried.
+ var newCompleter = installCompleter("test-phish-simple", [[1, []]], []);
+
+ var removeUpdate = buildPhishingUpdate(
+ [{ chunkNum: 2, chunkType: "s", urls: ["1:foo.com/a"] }],
+ 4
+ );
+
+ var assertions = {
+ urlsDontExist: ["foo.com/a"],
+ completerQueried: [newCompleter, []],
+ };
+
+ doTest([removeUpdate], assertions);
+ });
+}
+
+function testCachedResultsWithExpire() {
+ setupCachedResults(["foo.com/a"], function () {
+ // install a new completer, this one should never be queried.
+ var newCompleter = installCompleter("test-phish-simple", [[1, []]], []);
+
+ var expireUpdate = "n:1000\ni:test-phish-simple\nad:1\n";
+
+ var assertions = {
+ urlsDontExist: ["foo.com/a"],
+ completerQueried: [newCompleter, []],
+ };
+ doTest([expireUpdate], assertions);
+ });
+}
+
+function testCachedResultsFailure() {
+ var existUrls = ["foo.com/a"];
+ setupCachedResults(existUrls, function () {
+ // This is called after setupCachedResults(). Verify that
+ // checking the url again does not cause a completer request.
+
+ // install a new completer, this one should never be queried.
+ var newCompleter = installCompleter("test-phish-simple", [[1, []]], []);
+
+ var assertions = {
+ urlsExist: existUrls,
+ completerQueried: [newCompleter, []],
+ };
+
+ checkAssertions(assertions, function () {
+ // Apply the update. The cached completes should be gone.
+ doErrorUpdate(
+ "test-phish-simple,test-malware-simple",
+ function () {
+ // Now the completer gets queried again.
+ var newCompleter2 = installCompleter(
+ "test-phish-simple",
+ [[1, existUrls]],
+ []
+ );
+ var assertions2 = {
+ tableData: "test-phish-simple;a:1",
+ urlsExist: existUrls,
+ completerQueried: [newCompleter2, existUrls],
+ };
+ checkAssertions(assertions2, runNextTest);
+ },
+ updateError
+ );
+ });
+ });
+}
+
+function testErrorList() {
+ var addUrls = ["foo.com/a", "foo.com/b", "bar.com/c"];
+ var update = buildPhishingUpdate([{ chunkNum: 1, urls: addUrls }], 4);
+ // The update failure should will kill the completes, so the above
+ // must be a prefix to get any hit at all past the update failure.
+
+ var completer = installCompleter("test-phish-simple", [[1, addUrls]], []);
+
+ var assertions = {
+ tableData: "test-phish-simple;a:1",
+ urlsExist: addUrls,
+ // These are complete urls, and will only be completed if the
+ // list is stale.
+ completerQueried: [completer, addUrls],
+ };
+
+ // Apply the update.
+ doStreamUpdate(
+ update,
+ function () {
+ // Now the test-phish-simple and test-malware-simple tables are marked
+ // as fresh. Fake an update failure to mark them stale.
+ doErrorUpdate(
+ "test-phish-simple,test-malware-simple",
+ function () {
+ // Now the lists should be marked stale. Check assertions.
+ checkAssertions(assertions, runNextTest);
+ },
+ updateError
+ );
+ },
+ updateError
+ );
+}
+
+// Verify that different lists (test-phish-simple,
+// test-malware-simple) maintain their freshness separately.
+function testErrorListIndependent() {
+ var phishUrls = ["phish.com/a"];
+ var malwareUrls = ["attack.com/a"];
+ var update = buildPhishingUpdate([{ chunkNum: 1, urls: phishUrls }], 4);
+ // These have to persist past the update failure, so they must be prefixes,
+ // not completes.
+
+ update += buildMalwareUpdate([{ chunkNum: 2, urls: malwareUrls }], 32);
+
+ var completer = installCompleter("test-phish-simple", [[1, phishUrls]], []);
+
+ var assertions = {
+ tableData: "test-malware-simple;a:2\ntest-phish-simple;a:1",
+ urlsExist: phishUrls,
+ malwareUrlsExist: malwareUrls,
+ // Only this phishing urls should be completed, because only the phishing
+ // urls will be stale.
+ completerQueried: [completer, phishUrls],
+ };
+
+ // Apply the update.
+ doStreamUpdate(
+ update,
+ function () {
+ // Now the test-phish-simple and test-malware-simple tables are
+ // marked as fresh. Fake an update failure to mark *just*
+ // phishing data as stale.
+ doErrorUpdate(
+ "test-phish-simple",
+ function () {
+ // Now the lists should be marked stale. Check assertions.
+ checkAssertions(assertions, runNextTest);
+ },
+ updateError
+ );
+ },
+ updateError
+ );
+}
+
+function run_test() {
+ runTests([
+ testPartialAdds,
+ testPartialAddsWithConflicts,
+ testFragments,
+ testSpecFragments,
+ testMoreSpecFragments,
+ testFalsePositives,
+ testEmptyCompleter,
+ testCompleterFailure,
+ testMixedSizesSameDomain,
+ testMixedSizesDifferentDomains,
+ testInvalidHashSize,
+ testWrongTable,
+ testCachedResults,
+ testCachedResultsWithSub,
+ testCachedResultsWithExpire,
+ testCachedResultsFailure,
+ testErrorList,
+ testErrorListIndependent,
+ ]);
+}
+
+do_test_pending();
diff --git a/toolkit/components/url-classifier/tests/unit/test_platform_specific_threats.js b/toolkit/components/url-classifier/tests/unit/test_platform_specific_threats.js
new file mode 100644
index 0000000000..499c9e478c
--- /dev/null
+++ b/toolkit/components/url-classifier/tests/unit/test_platform_specific_threats.js
@@ -0,0 +1,104 @@
+/* Any copyright is dedicated to the Public Domain.
+ http://creativecommons.org/publicdomain/zero/1.0/ */
+
+const { AppConstants } = ChromeUtils.importESModule(
+ "resource://gre/modules/AppConstants.sys.mjs"
+);
+
+let urlUtils = Cc["@mozilla.org/url-classifier/utils;1"].getService(
+ Ci.nsIUrlClassifierUtils
+);
+
+function testMobileOnlyThreats() {
+ // Mobile-only threat type(s):
+ // - goog-harmful-proto (POTENTIALLY_HARMFUL_APPLICATION)
+
+ (function testUpdateRequest() {
+ let requestWithPHA = urlUtils.makeUpdateRequestV4(
+ ["goog-phish-proto", "goog-harmful-proto"],
+ ["AAAAAA", "AAAAAA"]
+ );
+
+ let requestNoPHA = urlUtils.makeUpdateRequestV4(
+ ["goog-phish-proto"],
+ ["AAAAAA"]
+ );
+
+ if (AppConstants.platform === "android") {
+ notEqual(
+ requestWithPHA,
+ requestNoPHA,
+ "PHA (i.e. goog-harmful-proto) shouldn't be filtered on mobile platform."
+ );
+ } else {
+ equal(
+ requestWithPHA,
+ requestNoPHA,
+ "PHA (i.e. goog-harmful-proto) should be filtered on non-mobile platform."
+ );
+ }
+ })();
+
+ (function testFullHashRequest() {
+ let requestWithPHA = urlUtils.makeFindFullHashRequestV4(
+ ["goog-phish-proto", "goog-harmful-proto"],
+ ["", ""], // state.
+ [btoa("0123")]
+ ); // prefix.
+
+ let requestNoPHA = urlUtils.makeFindFullHashRequestV4(
+ ["goog-phish-proto"],
+ [""], // state.
+ [btoa("0123")]
+ ); // prefix.
+
+ if (AppConstants.platform === "android") {
+ notEqual(
+ requestWithPHA,
+ requestNoPHA,
+ "PHA (i.e. goog-harmful-proto) shouldn't be filtered on mobile platform."
+ );
+ } else {
+ equal(
+ requestWithPHA,
+ requestNoPHA,
+ "PHA (i.e. goog-harmful-proto) should be filtered on non-mobile platform."
+ );
+ }
+ })();
+}
+
+function testDesktopOnlyThreats() {
+ // Desktop-only threats:
+ // - goog-downloadwhite-proto (CSD_WHITELIST)
+ // - goog-badbinurl-proto (MALICIOUS_BINARY)
+
+ let requestWithDesktopOnlyThreats = urlUtils.makeUpdateRequestV4(
+ ["goog-phish-proto", "goog-downloadwhite-proto", "goog-badbinurl-proto"],
+ ["", "", ""]
+ );
+
+ let requestNoDesktopOnlyThreats = urlUtils.makeUpdateRequestV4(
+ ["goog-phish-proto"],
+ [""]
+ );
+
+ if (AppConstants.platform === "android") {
+ equal(
+ requestWithDesktopOnlyThreats,
+ requestNoDesktopOnlyThreats,
+ "Android shouldn't contain 'goog-downloadwhite-proto' and 'goog-badbinurl-proto'."
+ );
+ } else {
+ notEqual(
+ requestWithDesktopOnlyThreats,
+ requestNoDesktopOnlyThreats,
+ "Desktop should contain 'goog-downloadwhite-proto' and 'goog-badbinurl-proto'."
+ );
+ }
+}
+
+function run_test() {
+ testMobileOnlyThreats();
+ testDesktopOnlyThreats();
+}
diff --git a/toolkit/components/url-classifier/tests/unit/test_pref.js b/toolkit/components/url-classifier/tests/unit/test_pref.js
new file mode 100644
index 0000000000..3a72eceb8e
--- /dev/null
+++ b/toolkit/components/url-classifier/tests/unit/test_pref.js
@@ -0,0 +1,15 @@
+function run_test() {
+ let urlUtils = Cc["@mozilla.org/url-classifier/utils;1"].getService(
+ Ci.nsIUrlClassifierUtils
+ );
+
+ // The google protocol version should be "2.2" until we enable SB v4
+ // by default.
+ equal(urlUtils.getProtocolVersion("google"), "2.2");
+
+ // Mozilla protocol version will stick to "2.2".
+ equal(urlUtils.getProtocolVersion("mozilla"), "2.2");
+
+ // Unknown provider version will be "2.2".
+ equal(urlUtils.getProtocolVersion("unknown-provider"), "2.2");
+}
diff --git a/toolkit/components/url-classifier/tests/unit/test_prefixset.js b/toolkit/components/url-classifier/tests/unit/test_prefixset.js
new file mode 100644
index 0000000000..b45d4b6771
--- /dev/null
+++ b/toolkit/components/url-classifier/tests/unit/test_prefixset.js
@@ -0,0 +1,178 @@
+// newPset: returns an empty nsIUrlClassifierPrefixSet.
+function newPset() {
+ let pset = Cc["@mozilla.org/url-classifier/prefixset;1"].createInstance(
+ Ci.nsIUrlClassifierPrefixSet
+ );
+ pset.init("all");
+ return pset;
+}
+
+// arrContains: returns true if |arr| contains the element |target|. Uses binary
+// search and requires |arr| to be sorted.
+function arrContains(arr, target) {
+ let start = 0;
+ let end = arr.length - 1;
+ let i = 0;
+
+ while (end > start) {
+ i = start + ((end - start) >> 1);
+ let value = arr[i];
+
+ if (value < target) {
+ start = i + 1;
+ } else if (value > target) {
+ end = i - 1;
+ } else {
+ break;
+ }
+ }
+ if (start == end) {
+ i = start;
+ }
+
+ return !(i < 0 || i >= arr.length) && arr[i] == target;
+}
+
+// checkContents: Check whether the PrefixSet pset contains
+// the prefixes in the passed array.
+function checkContents(pset, prefixes) {
+ var outcount = {},
+ outset = {};
+ outset = pset.getPrefixes(outcount);
+ let inset = prefixes;
+ Assert.equal(inset.length, outset.length);
+ inset.sort((x, y) => x - y);
+ for (let i = 0; i < inset.length; i++) {
+ Assert.equal(inset[i], outset[i]);
+ }
+}
+
+function wrappedProbe(pset, prefix) {
+ return pset.contains(prefix);
+}
+
+// doRandomLookups: we use this to test for false membership with random input
+// over the range of prefixes (unsigned 32-bits integers).
+// pset: a nsIUrlClassifierPrefixSet to test.
+// prefixes: an array of prefixes supposed to make up the prefix set.
+// N: number of random lookups to make.
+function doRandomLookups(pset, prefixes, N) {
+ for (let i = 0; i < N; i++) {
+ let randInt = prefixes[0];
+ while (arrContains(prefixes, randInt)) {
+ randInt = Math.floor(Math.random() * Math.pow(2, 32));
+ }
+
+ Assert.ok(!wrappedProbe(pset, randInt));
+ }
+}
+
+// doExpectedLookups: we use this to test expected membership.
+// pset: a nsIUrlClassifierPrefixSet to test.
+// prefixes:
+function doExpectedLookups(pset, prefixes, N) {
+ for (let i = 0; i < N; i++) {
+ prefixes.forEach(function (x) {
+ dump("Checking " + x + "\n");
+ Assert.ok(wrappedProbe(pset, x));
+ });
+ }
+}
+
+// testBasicPset: A very basic test of the prefix set to make sure that it
+// exists and to give a basic example of its use.
+function testBasicPset() {
+ let pset = Cc["@mozilla.org/url-classifier/prefixset;1"].createInstance(
+ Ci.nsIUrlClassifierPrefixSet
+ );
+ let prefixes = [2, 50, 100, 2000, 78000, 1593203];
+ pset.setPrefixes(prefixes, prefixes.length);
+
+ Assert.ok(wrappedProbe(pset, 100));
+ Assert.ok(!wrappedProbe(pset, 100000));
+ Assert.ok(wrappedProbe(pset, 1593203));
+ Assert.ok(!wrappedProbe(pset, 999));
+ Assert.ok(!wrappedProbe(pset, 0));
+
+ checkContents(pset, prefixes);
+}
+
+function testDuplicates() {
+ let pset = Cc["@mozilla.org/url-classifier/prefixset;1"].createInstance(
+ Ci.nsIUrlClassifierPrefixSet
+ );
+ let prefixes = [1, 1, 2, 2, 2, 3, 3, 3, 3, 3, 3, 5, 6, 6, 7, 7, 9, 9, 9];
+ pset.setPrefixes(prefixes, prefixes.length);
+
+ Assert.ok(wrappedProbe(pset, 1));
+ Assert.ok(wrappedProbe(pset, 2));
+ Assert.ok(wrappedProbe(pset, 5));
+ Assert.ok(wrappedProbe(pset, 9));
+ Assert.ok(!wrappedProbe(pset, 4));
+ Assert.ok(!wrappedProbe(pset, 8));
+
+ checkContents(pset, prefixes);
+}
+
+function testSimplePset() {
+ let pset = newPset();
+ let prefixes = [1, 2, 100, 400, 123456789];
+ pset.setPrefixes(prefixes, prefixes.length);
+
+ doRandomLookups(pset, prefixes, 100);
+ doExpectedLookups(pset, prefixes, 1);
+
+ checkContents(pset, prefixes);
+}
+
+function testReSetPrefixes() {
+ let pset = newPset();
+ let prefixes = [1, 5, 100, 1000, 150000];
+ pset.setPrefixes(prefixes, prefixes.length);
+
+ doExpectedLookups(pset, prefixes, 1);
+
+ let secondPrefixes = [12, 50, 300, 2000, 5000, 200000];
+ pset.setPrefixes(secondPrefixes, secondPrefixes.length);
+
+ doExpectedLookups(pset, secondPrefixes, 1);
+ for (let i = 0; i < prefixes.length; i++) {
+ Assert.ok(!wrappedProbe(pset, prefixes[i]));
+ }
+
+ checkContents(pset, secondPrefixes);
+}
+
+function testTinySet() {
+ let pset = Cc["@mozilla.org/url-classifier/prefixset;1"].createInstance(
+ Ci.nsIUrlClassifierPrefixSet
+ );
+ let prefixes = [1];
+ pset.setPrefixes(prefixes, prefixes.length);
+
+ Assert.ok(wrappedProbe(pset, 1));
+ Assert.ok(!wrappedProbe(pset, 100000));
+ checkContents(pset, prefixes);
+
+ prefixes = [];
+ pset.setPrefixes(prefixes, prefixes.length);
+ Assert.ok(!wrappedProbe(pset, 1));
+ checkContents(pset, prefixes);
+}
+
+var tests = [
+ testBasicPset,
+ testSimplePset,
+ testReSetPrefixes,
+ testDuplicates,
+ testTinySet,
+];
+
+function run_test() {
+ // None of the tests use |executeSoon| or any sort of callbacks, so we can
+ // just run them in succession.
+ for (let i = 0; i < tests.length; i++) {
+ dump("Running " + tests[i].name + "\n");
+ tests[i]();
+ }
+}
diff --git a/toolkit/components/url-classifier/tests/unit/test_provider_url.js b/toolkit/components/url-classifier/tests/unit/test_provider_url.js
new file mode 100644
index 0000000000..8229448a9c
--- /dev/null
+++ b/toolkit/components/url-classifier/tests/unit/test_provider_url.js
@@ -0,0 +1,32 @@
+const { updateAppInfo } = ChromeUtils.importESModule(
+ "resource://testing-common/AppInfo.sys.mjs"
+);
+
+function updateVersion(version) {
+ updateAppInfo({ version });
+}
+
+add_test(function test_provider_url() {
+ let urls = [
+ "browser.safebrowsing.provider.google.updateURL",
+ "browser.safebrowsing.provider.google.gethashURL",
+ "browser.safebrowsing.provider.mozilla.updateURL",
+ "browser.safebrowsing.provider.mozilla.gethashURL",
+ ];
+
+ // FIXME: Most of these only worked in the past because calling
+ // `updateAppInfo` did not actually replace `Services.appinfo`, which
+ // the URL formatter uses.
+ // let versions = ["49.0", "49.0.1", "49.0a1", "49.0b1", "49.0esr", "49.0.1esr"];
+ let versions = ["49.0", "49.0.1"];
+
+ for (let version of versions) {
+ for (let url of urls) {
+ updateVersion(version);
+ let value = Services.urlFormatter.formatURLPref(url);
+ Assert.notEqual(value.indexOf("&appver=49.0&"), -1);
+ }
+ }
+
+ run_next_test();
+});
diff --git a/toolkit/components/url-classifier/tests/unit/test_rsListService.js b/toolkit/components/url-classifier/tests/unit/test_rsListService.js
new file mode 100644
index 0000000000..cd70f92885
--- /dev/null
+++ b/toolkit/components/url-classifier/tests/unit/test_rsListService.js
@@ -0,0 +1,424 @@
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+"use strict";
+
+/* Unit tests for the nsIUrlClassifierRemoteSettingsService implementation. */
+
+const { RemoteSettings } = ChromeUtils.importESModule(
+ "resource://services-settings/remote-settings.sys.mjs"
+);
+const { SBRS_UPDATE_MINIMUM_DELAY } = ChromeUtils.importESModule(
+ "resource://gre/modules/UrlClassifierRemoteSettingsService.sys.mjs"
+);
+
+const COLLECTION_NAME = "tracking-protection-lists";
+
+const REMOTE_SETTINGS_DATA = [
+ {
+ Name: "content-fingerprinting-track-digest256",
+ attachment: {
+ hash: "96a4a850a1a475001148fa8a3a5efea58951f7176d3624ad7614fbf32732ee48",
+ size: 948,
+ filename: "content-fingerprinting-track-digest256",
+ location:
+ "main-workspace/tracking-protection-lists/content-fingerprinting-track-digest256",
+ mimetype: "text/plain",
+ },
+ id: "content-fingerprinting-track-digest256",
+ Version: 1597417364,
+ },
+ {
+ Name: "mozplugin-block-digest256",
+ attachment: {
+ hash: "dd2b800c7e4bad17e1c79f3e530c0b94e0a039adf4566f30bc3c285a547fa4fc",
+ size: 3029,
+ filename: "mozplugin-block-digest256",
+ location:
+ "main-workspace/tracking-protection-lists/mozplugin-block-digest256",
+ mimetype: "text/plain",
+ },
+ id: "mozplugin-block-digest256",
+ Version: 1575583456,
+ },
+ {
+ Name: "google-trackwhite-digest256",
+ attachment: {
+ hash: "1cd6d9353e97d66ac737a9716cd3a33416d6a4884dd12dcd1d65266e4c81dfad",
+ size: 1470328,
+ filename: "google-trackwhite-digest256",
+ location:
+ "main-workspace/tracking-protection-lists/google-trackwhite-digest256",
+ mimetype: "text/plain",
+ },
+ id: "google-trackwhite-digest256",
+ Version: 1575583456,
+ },
+ // Entry with non-exist attachment
+ {
+ Name: "social-track-digest256",
+ attachment: {
+ location: "main-workspace/tracking-protection-lists/not-exist",
+ },
+ id: "social-track-digest256",
+ Version: 1111111111,
+ },
+ // Entry with corrupted attachment
+ {
+ Name: "analytic-track-digest256",
+ attachment: {
+ hash: "644a0662bcf7313570ee68490e3805f5cc7a0503c097f040525c28dc5bfe4c97",
+ size: 58,
+ filename: "invalid.chunk",
+ location: "main-workspace/tracking-protection-lists/invalid.chunk",
+ mimetype: "text/plain",
+ },
+ id: "analytic-track-digest256",
+ Version: 1111111111,
+ },
+];
+
+let gListService = Cc["@mozilla.org/url-classifier/list-service;1"].getService(
+ Ci.nsIUrlClassifierRemoteSettingsService
+);
+let gDbService = Cc["@mozilla.org/url-classifier/dbservice;1"].getService(
+ Ci.nsIUrlClassifierDBService
+);
+
+class UpdateEvent extends EventTarget {}
+function waitForEvent(element, eventName) {
+ return new Promise(function (resolve) {
+ element.addEventListener(eventName, e => resolve(e.detail), { once: true });
+ });
+}
+
+function buildPayload(tables) {
+ let payload = ``;
+ for (let table of tables) {
+ payload += table[0];
+ if (table[1] != null) {
+ payload += `;a:${table[1]}`;
+ }
+ payload += `\n`;
+ }
+ return payload;
+}
+
+let server;
+add_setup(async function init() {
+ Services.prefs.setCharPref(
+ "browser.safebrowsing.provider.mozilla.updateURL",
+ `moz-sbrs://tracking-protection-list`
+ );
+ // Setup HTTP server for remote setting
+ server = new HttpServer();
+ server.start(-1);
+ registerCleanupFunction(() => server.stop(() => {}));
+
+ server.registerDirectory(
+ "/cdn/main-workspace/tracking-protection-lists/",
+ do_get_file("data")
+ );
+
+ server.registerPathHandler("/v1/", (request, response) => {
+ response.write(
+ JSON.stringify({
+ capabilities: {
+ attachments: {
+ base_url: `http://localhost:${server.identity.primaryPort}/cdn/`,
+ },
+ },
+ })
+ );
+ response.setHeader("Content-Type", "application/json; charset=UTF-8");
+ response.setStatusLine(null, 200, "OK");
+ });
+
+ Services.prefs.setCharPref(
+ "services.settings.server",
+ `http://localhost:${server.identity.primaryPort}/v1`
+ );
+
+ // Setup remote setting initial data
+ let db = await RemoteSettings(COLLECTION_NAME).db;
+ await db.importChanges({}, 42, REMOTE_SETTINGS_DATA);
+
+ registerCleanupFunction(() => {
+ Services.prefs.clearUserPref(
+ "browser.safebrowsing.provider.mozilla.updateURL"
+ );
+ Services.prefs.clearUserPref("services.settings.server");
+ });
+});
+
+// Test updates from RemoteSettings when there is no local data
+add_task(async function test_empty_update() {
+ let updateEvent = new UpdateEvent();
+ let promise = waitForEvent(updateEvent, "update");
+
+ const TEST_TABLES = [
+ ["mozplugin-block-digest256", null], // empty
+ ["content-fingerprinting-track-digest256", null], // empty
+ ];
+
+ gListService.fetchList(buildPayload(TEST_TABLES), {
+ // nsIStreamListener observer
+ onStartRequest(request) {},
+ onDataAvailable(aRequest, aStream, aOffset, aCount) {
+ let stream = Cc["@mozilla.org/scriptableinputstream;1"].createInstance(
+ Ci.nsIScriptableInputStream
+ );
+ stream.init(aStream);
+ let event = new CustomEvent("update", {
+ detail: stream.readBytes(aCount),
+ });
+ updateEvent.dispatchEvent(event);
+ },
+ onStopRequest(request, status) {},
+ });
+
+ let expected = "n:" + SBRS_UPDATE_MINIMUM_DELAY + "\n";
+ for (const table of TEST_TABLES) {
+ expected += `i:${table[0]}\n` + readFileToString(`data/${table[0]}`);
+ }
+
+ Assert.equal(
+ await promise,
+ expected,
+ "Receive expected data from onDataAvailable"
+ );
+ gListService.clear();
+});
+
+// Test updates from RemoteSettings when we have an empty table,
+// a table with an older version, and a table which is up-to-date.
+add_task(async function test_update() {
+ let updateEvent = new UpdateEvent();
+ let promise = waitForEvent(updateEvent, "update");
+
+ const TEST_TABLES = [
+ ["mozplugin-block-digest256", 1575583456], // up-to-date
+ ["content-fingerprinting-track-digest256", 1575583456 - 1], // older version
+ ];
+
+ gListService.fetchList(buildPayload(TEST_TABLES), {
+ // observer
+ // nsIStreamListener observer
+ onStartRequest(request) {},
+ onDataAvailable(aRequest, aStream, aOffset, aCount) {
+ let stream = Cc["@mozilla.org/scriptableinputstream;1"].createInstance(
+ Ci.nsIScriptableInputStream
+ );
+ stream.init(aStream);
+ let event = new CustomEvent("update", {
+ detail: stream.readBytes(aCount),
+ });
+ updateEvent.dispatchEvent(event);
+ },
+ onStopRequest(request, status) {},
+ });
+
+ // Build request with no version
+ let expected = "n:" + SBRS_UPDATE_MINIMUM_DELAY + "\n";
+ for (const table of TEST_TABLES) {
+ if (["content-fingerprinting-track-digest256"].includes(table[0])) {
+ expected += `i:${table[0]}\n` + readFileToString(`data/${table[0]}`);
+ }
+ }
+
+ Assert.equal(
+ await promise,
+ expected,
+ "Receive expected data from onDataAvailable"
+ );
+ gListService.clear();
+});
+
+// Test updates from RemoteSettings service when all tables are up-to-date.
+add_task(async function test_no_update() {
+ let updateEvent = new UpdateEvent();
+ let promise = waitForEvent(updateEvent, "update");
+
+ const TEST_TABLES = [
+ ["mozplugin-block-digest256", 1575583456], // up-to-date
+ ["content-fingerprinting-track-digest256", 1597417364], // up-to-date
+ ];
+
+ gListService.fetchList(buildPayload(TEST_TABLES), {
+ // nsIStreamListener observer
+ onStartRequest(request) {},
+ onDataAvailable(aRequest, aStream, aOffset, aCount) {
+ let stream = Cc["@mozilla.org/scriptableinputstream;1"].createInstance(
+ Ci.nsIScriptableInputStream
+ );
+ stream.init(aStream);
+ let event = new CustomEvent("update", {
+ detail: stream.readBytes(aCount),
+ });
+ updateEvent.dispatchEvent(event);
+ },
+ onStopRequest(request, status) {},
+ });
+
+ // No data is expected
+ let expected = "n:" + SBRS_UPDATE_MINIMUM_DELAY + "\n";
+
+ Assert.equal(
+ await promise,
+ expected,
+ "Receive expected data from onDataAvailable"
+ );
+ gListService.clear();
+});
+
+add_test(function test_update() {
+ let streamUpdater = Cc[
+ "@mozilla.org/url-classifier/streamupdater;1"
+ ].getService(Ci.nsIUrlClassifierStreamUpdater);
+
+ // Download some updates, and don't continue until the downloads are done.
+ function updateSuccess(aEvent) {
+ Assert.equal(SBRS_UPDATE_MINIMUM_DELAY, aEvent);
+ info("All data processed");
+ run_next_test();
+ }
+ // Just throw if we ever get an update or download error.
+ function handleError(aEvent) {
+ do_throw("We didn't download or update correctly: " + aEvent);
+ }
+
+ streamUpdater.downloadUpdates(
+ "content-fingerprinting-track-digest256",
+ "content-fingerprinting-track-digest256;\n",
+ true,
+ "moz-sbrs://remote-setting",
+ updateSuccess,
+ handleError,
+ handleError
+ );
+});
+
+add_test(function test_url_not_denylisted() {
+ let uri = Services.io.newURI("http://example.com");
+ let principal = Services.scriptSecurityManager.createContentPrincipal(
+ uri,
+ {}
+ );
+ gDbService.lookup(
+ principal,
+ "content-fingerprinting-track-digest256",
+ function handleEvent(aEvent) {
+ // This URI is not on any lists.
+ Assert.equal("", aEvent);
+ run_next_test();
+ }
+ );
+});
+
+add_test(function test_url_denylisted() {
+ let uri = Services.io.newURI("https://www.foresee.com");
+ let principal = Services.scriptSecurityManager.createContentPrincipal(
+ uri,
+ {}
+ );
+ gDbService.lookup(
+ principal,
+ "content-fingerprinting-track-digest256",
+ function handleEvent(aEvent) {
+ Assert.equal("content-fingerprinting-track-digest256", aEvent);
+ run_next_test();
+ }
+ );
+});
+
+add_test(function test_update_download_error() {
+ let streamUpdater = Cc[
+ "@mozilla.org/url-classifier/streamupdater;1"
+ ].getService(Ci.nsIUrlClassifierStreamUpdater);
+
+ // Download some updates, and don't continue until the downloads are done.
+ function updateSuccessOrError(aEvent) {
+ do_throw("Should be downbload error");
+ }
+ // Just throw if we ever get an update or download error.
+ function downloadError(aEvent) {
+ run_next_test();
+ }
+
+ streamUpdater.downloadUpdates(
+ "social-track-digest256",
+ "social-track-digest256;\n",
+ true,
+ "moz-sbrs://remote-setting",
+ updateSuccessOrError,
+ updateSuccessOrError,
+ downloadError
+ );
+});
+
+add_test(function test_update_update_error() {
+ let streamUpdater = Cc[
+ "@mozilla.org/url-classifier/streamupdater;1"
+ ].getService(Ci.nsIUrlClassifierStreamUpdater);
+
+ // Download some updates, and don't continue until the downloads are done.
+ function updateSuccessOrDownloadError(aEvent) {
+ do_throw("Should be update error");
+ }
+ // Just throw if we ever get an update or download error.
+ function updateError(aEvent) {
+ run_next_test();
+ }
+
+ streamUpdater.downloadUpdates(
+ "analytic-track-digest256",
+ "analytic-track-digest256;\n",
+ true,
+ "moz-sbrs://remote-setting",
+ updateSuccessOrDownloadError,
+ updateError,
+ updateSuccessOrDownloadError
+ );
+});
+
+add_task(async function test_update_large_file() {
+ let updateEvent = new UpdateEvent();
+ let promise = waitForEvent(updateEvent, "update");
+
+ const TEST_TABLES = [
+ ["google-trackwhite-digest256", 1575583456 - 1], // up-to-date
+ ];
+
+ gListService.fetchList(buildPayload(TEST_TABLES), {
+ // observer
+ // nsIStreamListener observer
+ onStartRequest(request) {},
+ onDataAvailable(aRequest, aStream, aOffset, aCount) {
+ let stream = Cc["@mozilla.org/scriptableinputstream;1"].createInstance(
+ Ci.nsIScriptableInputStream
+ );
+ stream.init(aStream);
+ let event = new CustomEvent("update", {
+ detail: stream.readBytes(aCount),
+ });
+ updateEvent.dispatchEvent(event);
+ },
+ onStopRequest(request, status) {},
+ });
+
+ // Build request with no version
+ let expected = "n:" + SBRS_UPDATE_MINIMUM_DELAY + "\n";
+ for (const table of TEST_TABLES) {
+ if (["google-trackwhite-digest256"].includes(table[0])) {
+ expected += `i:${table[0]}\n` + readFileToString(`data/${table[0]}`);
+ }
+ }
+
+ Assert.equal(
+ await promise,
+ expected,
+ "Receive expected data from onDataAvailable"
+ );
+ gListService.clear();
+});
diff --git a/toolkit/components/url-classifier/tests/unit/test_safebrowsing_protobuf.js b/toolkit/components/url-classifier/tests/unit/test_safebrowsing_protobuf.js
new file mode 100644
index 0000000000..73426751cb
--- /dev/null
+++ b/toolkit/components/url-classifier/tests/unit/test_safebrowsing_protobuf.js
@@ -0,0 +1,29 @@
+function run_test() {
+ let urlUtils = Cc["@mozilla.org/url-classifier/utils;1"].getService(
+ Ci.nsIUrlClassifierUtils
+ );
+
+ // No list at all.
+ let requestNoList = urlUtils.makeUpdateRequestV4([], []);
+
+ // Only one valid list name.
+ let requestOneValid = urlUtils.makeUpdateRequestV4(
+ ["goog-phish-proto"],
+ ["AAAAAA"]
+ );
+
+ // Only one invalid list name.
+ let requestOneInvalid = urlUtils.makeUpdateRequestV4(
+ ["bad-list-name"],
+ ["AAAAAA"]
+ );
+
+ // One valid and one invalid list name.
+ let requestOneInvalidOneValid = urlUtils.makeUpdateRequestV4(
+ ["goog-phish-proto", "bad-list-name"],
+ ["AAAAAA", "AAAAAA"]
+ );
+
+ equal(requestNoList, requestOneInvalid);
+ equal(requestOneValid, requestOneInvalidOneValid);
+}
diff --git a/toolkit/components/url-classifier/tests/unit/test_shouldclassify.js b/toolkit/components/url-classifier/tests/unit/test_shouldclassify.js
new file mode 100644
index 0000000000..6abdbb5f85
--- /dev/null
+++ b/toolkit/components/url-classifier/tests/unit/test_shouldclassify.js
@@ -0,0 +1,166 @@
+/* Any copyright is dedicated to the Public Domain.
+ http://creativecommons.org/publicdomain/zero/1.0/
+*/
+
+"use strict";
+
+const { NetUtil } = ChromeUtils.importESModule(
+ "resource://gre/modules/NetUtil.sys.mjs"
+);
+const { UrlClassifierTestUtils } = ChromeUtils.importESModule(
+ "resource://testing-common/UrlClassifierTestUtils.sys.mjs"
+);
+
+const defaultTopWindowURI = NetUtil.newURI("http://www.example.com/");
+
+var httpServer;
+var trackingOrigin;
+
+// ShouldClassify algorithm uses the following parameters:
+// 1. Ci.nsIChannel.LOAD_ BYPASS_URL_CLASSIFIER loadflags
+// 2. Content type
+// 3. triggering principal
+// 4. be Conservative
+// We test are the combinations here to make sure the algorithm is correct
+
+// const PARAM_LOAD_BYPASS_URL_CLASSIFIER = 1 << 0;
+const PARAM_CONTENT_POLICY_TYPE_DOCUMENT = 1 << 1;
+const PARAM_TRIGGERING_PRINCIPAL_SYSTEM = 1 << 2;
+const PARAM_CAP_BE_CONSERVATIVE = 1 << 3;
+const PARAM_MAX = 1 << 4;
+
+function getParameters(bitFlags) {
+ var params = {
+ loadFlags: Ci.nsIRequest.LOAD_NORMAL,
+ contentType: Ci.nsIContentPolicy.TYPE_OTHER,
+ system: false,
+ beConservative: false,
+ };
+
+ if (bitFlags & PARAM_TRIGGERING_PRINCIPAL_SYSTEM) {
+ params.loadFlags = Ci.nsIChannel.LOAD_BYPASS_URL_CLASSIFIER;
+ }
+
+ if (bitFlags & PARAM_CONTENT_POLICY_TYPE_DOCUMENT) {
+ params.contentType = Ci.nsIContentPolicy.TYPE_DOCUMENT;
+ }
+
+ if (bitFlags & PARAM_TRIGGERING_PRINCIPAL_SYSTEM) {
+ params.system = true;
+ }
+
+ if (bitFlags & PARAM_CAP_BE_CONSERVATIVE) {
+ params.beConservative = true;
+ }
+
+ return params;
+}
+
+function getExpectedResult(params) {
+ if (params.loadFlags & Ci.nsIChannel.LOAD_BYPASS_URL_CLASSIFIER) {
+ return false;
+ }
+ if (params.beConservative) {
+ return false;
+ }
+ if (
+ params.system &&
+ params.contentType != Ci.nsIContentPolicy.TYPE_DOCUMENT
+ ) {
+ return false;
+ }
+
+ return true;
+}
+
+function setupHttpServer() {
+ httpServer = new HttpServer();
+ httpServer.start(-1);
+ httpServer.identity.setPrimary(
+ "http",
+ "tracking.example.org",
+ httpServer.identity.primaryPort
+ );
+ httpServer.identity.add(
+ "http",
+ "example.org",
+ httpServer.identity.primaryPort
+ );
+ trackingOrigin =
+ "http://tracking.example.org:" + httpServer.identity.primaryPort;
+}
+
+function setupChannel(params) {
+ var channel;
+
+ if (params.system) {
+ channel = NetUtil.newChannel({
+ uri: trackingOrigin + "/evil.js",
+ loadUsingSystemPrincipal: true,
+ contentPolicyType: params.contentType,
+ });
+ } else {
+ let principal = Services.scriptSecurityManager.createContentPrincipal(
+ NetUtil.newURI(trackingOrigin),
+ {}
+ );
+ channel = NetUtil.newChannel({
+ uri: trackingOrigin + "/evil.js",
+ loadingPrincipal: principal,
+ securityFlags: Ci.nsILoadInfo.SEC_ALLOW_CROSS_ORIGIN_SEC_CONTEXT_IS_NULL,
+ contentPolicyType: params.contentType,
+ });
+ }
+
+ channel.QueryInterface(Ci.nsIHttpChannel);
+ channel.requestMethod = "GET";
+ channel.loadFlags |= params.loadFlags;
+ channel
+ .QueryInterface(Ci.nsIHttpChannelInternal)
+ .setTopWindowURIIfUnknown(defaultTopWindowURI);
+ channel.QueryInterface(Ci.nsIHttpChannelInternal).beConservative =
+ params.beConservative;
+
+ return channel;
+}
+
+add_task(async function testShouldClassify() {
+ Services.prefs.setBoolPref(
+ "privacy.trackingprotection.annotate_channels",
+ true
+ );
+ Services.prefs.setBoolPref("network.dns.native-is-localhost", true);
+
+ setupHttpServer();
+
+ await UrlClassifierTestUtils.addTestTrackers();
+
+ for (let i = 0; i < PARAM_MAX; i++) {
+ let params = getParameters(i);
+ let channel = setupChannel(params);
+
+ await new Promise(resolve => {
+ channel.asyncOpen({
+ onStartRequest: (request, context) => {
+ Assert.equal(
+ !!(
+ request.QueryInterface(Ci.nsIClassifiedChannel)
+ .classificationFlags &
+ Ci.nsIClassifiedChannel.CLASSIFIED_ANY_BASIC_TRACKING
+ ),
+ getExpectedResult(params)
+ );
+ request.cancel(Cr.NS_ERROR_ABORT);
+ resolve();
+ },
+
+ onDataAvailable: (request, context, stream, offset, count) => {},
+ onStopRequest: (request, context, status) => {},
+ });
+ });
+ }
+
+ UrlClassifierTestUtils.cleanupTestTrackers();
+
+ httpServer.stop(do_test_finished);
+});
diff --git a/toolkit/components/url-classifier/tests/unit/test_streamupdater.js b/toolkit/components/url-classifier/tests/unit/test_streamupdater.js
new file mode 100644
index 0000000000..1a2ee847d1
--- /dev/null
+++ b/toolkit/components/url-classifier/tests/unit/test_streamupdater.js
@@ -0,0 +1,244 @@
+function doTest(updates, assertions, expectError) {
+ if (expectError) {
+ doUpdateTest(updates, assertions, updateError, runNextTest);
+ } else {
+ doUpdateTest(updates, assertions, runNextTest, updateError);
+ }
+}
+
+// Never use the same URLs for multiple tests, because we aren't guaranteed
+// to reset the database between tests.
+function testFillDb() {
+ var add1Urls = ["zaz.com/a", "yxz.com/c"];
+
+ var update = "n:1000\n";
+ update += "i:test-phish-simple\n";
+
+ var update1 = buildBareUpdate([{ chunkNum: 1, urls: add1Urls }]);
+ update += "u:data:," + encodeURIComponent(update1) + "\n";
+
+ var assertions = {
+ tableData: "test-phish-simple;a:1",
+ urlsExist: add1Urls,
+ };
+
+ doTest([update], assertions, false);
+}
+
+function testSimpleForward() {
+ var add1Urls = ["foo-simple.com/a", "bar-simple.com/c"];
+ var add2Urls = ["foo-simple.com/b"];
+ var add3Urls = ["bar-simple.com/d"];
+
+ var update = "n:1000\n";
+ update += "i:test-phish-simple\n";
+
+ var update1 = buildBareUpdate([{ chunkNum: 1, urls: add1Urls }]);
+ update += "u:data:," + encodeURIComponent(update1) + "\n";
+
+ var update2 = buildBareUpdate([{ chunkNum: 2, urls: add2Urls }]);
+ update += "u:data:," + encodeURIComponent(update2) + "\n";
+
+ var update3 = buildBareUpdate([{ chunkNum: 3, urls: add3Urls }]);
+ update += "u:data:," + encodeURIComponent(update3) + "\n";
+
+ var assertions = {
+ tableData: "test-phish-simple;a:1-3",
+ urlsExist: add1Urls.concat(add2Urls).concat(add3Urls),
+ };
+
+ doTest([update], assertions, false);
+}
+
+// Make sure that a nested forward (a forward within a forward) causes
+// the update to fail.
+function testNestedForward() {
+ var add1Urls = ["foo-nested.com/a", "bar-nested.com/c"];
+ var add2Urls = ["foo-nested.com/b"];
+
+ var update = "n:1000\n";
+ update += "i:test-phish-simple\n";
+
+ var update1 = buildBareUpdate([{ chunkNum: 1, urls: add1Urls }]);
+ update += "u:data:," + encodeURIComponent(update1) + "\n";
+
+ var update2 = buildBareUpdate([{ chunkNum: 2 }]);
+ var update3 = buildBareUpdate([{ chunkNum: 3, urls: add1Urls }]);
+
+ update2 += "u:data:," + encodeURIComponent(update3) + "\n";
+
+ update += "u:data:," + encodeURIComponent(update2) + "\n";
+
+ var assertions = {
+ tableData: "",
+ urlsDontExist: add1Urls.concat(add2Urls),
+ };
+
+ doTest([update], assertions, true);
+}
+
+// An invalid URL forward causes the update to fail.
+function testInvalidUrlForward() {
+ var add1Urls = ["foo-invalid.com/a", "bar-invalid.com/c"];
+
+ var update = buildPhishingUpdate([{ chunkNum: 1, urls: add1Urls }]);
+ update += "u:asdf://blah/blah\n"; // invalid URL scheme
+
+ // add1Urls is present, but that is an artifact of the way we do the test.
+ var assertions = {
+ tableData: "test-phish-simple;a:1",
+ urlsExist: add1Urls,
+ };
+
+ doTest([update], assertions, true);
+}
+
+// A failed network request causes the update to fail.
+function testErrorUrlForward() {
+ var add1Urls = ["foo-forward.com/a", "bar-forward.com/c"];
+
+ var update = buildPhishingUpdate([{ chunkNum: 1, urls: add1Urls }]);
+ update += "u:http://test.invalid/asdf/asdf\n"; // invalid URL scheme
+
+ // add1Urls is present, but that is an artifact of the way we do the test.
+ var assertions = {
+ tableData: "test-phish-simple;a:1",
+ urlsExist: add1Urls,
+ };
+
+ doTest([update], assertions, true);
+}
+
+function testMultipleTables() {
+ var add1Urls = ["foo-multiple.com/a", "bar-multiple.com/c"];
+ var add2Urls = ["foo-multiple.com/b"];
+ var add3Urls = ["bar-multiple.com/d"];
+ var add4Urls = ["bar-multiple.com/e"];
+ var add6Urls = ["bar-multiple.com/g"];
+
+ var update = "n:1000\n";
+ update += "i:test-phish-simple\n";
+
+ var update1 = buildBareUpdate([{ chunkNum: 1, urls: add1Urls }]);
+ update += "u:data:," + encodeURIComponent(update1) + "\n";
+
+ var update2 = buildBareUpdate([{ chunkNum: 2, urls: add2Urls }]);
+ update += "u:data:," + encodeURIComponent(update2) + "\n";
+
+ update += "i:test-malware-simple\n";
+
+ var update3 = buildBareUpdate([{ chunkNum: 3, urls: add3Urls }]);
+ update += "u:data:," + encodeURIComponent(update3) + "\n";
+
+ update += "i:test-unwanted-simple\n";
+ var update4 = buildBareUpdate([{ chunkNum: 4, urls: add4Urls }]);
+ update += "u:data:," + encodeURIComponent(update4) + "\n";
+
+ update += "i:test-block-simple\n";
+ var update6 = buildBareUpdate([{ chunkNum: 6, urls: add6Urls }]);
+ update += "u:data:," + encodeURIComponent(update6) + "\n";
+
+ var assertions = {
+ tableData:
+ "test-block-simple;a:6\ntest-malware-simple;a:3\ntest-phish-simple;a:1-2\ntest-unwanted-simple;a:4",
+ urlsExist: add1Urls.concat(add2Urls),
+ malwareUrlsExist: add3Urls,
+ unwantedUrlsExist: add4Urls,
+ blockedUrlsExist: add6Urls,
+ };
+
+ doTest([update], assertions, false);
+}
+
+function testUrlInMultipleTables() {
+ var add1Urls = ["foo-forward.com/a"];
+
+ var update = "n:1000\n";
+ update += "i:test-phish-simple\n";
+
+ var update1 = buildBareUpdate([{ chunkNum: 1, urls: add1Urls }]);
+ update += "u:data:," + encodeURIComponent(update1) + "\n";
+
+ update += "i:test-malware-simple\n";
+ var update2 = buildBareUpdate([{ chunkNum: 2, urls: add1Urls }]);
+ update += "u:data:," + encodeURIComponent(update2) + "\n";
+
+ update += "i:test-unwanted-simple\n";
+ var update3 = buildBareUpdate([{ chunkNum: 3, urls: add1Urls }]);
+ update += "u:data:," + encodeURIComponent(update3) + "\n";
+
+ var assertions = {
+ tableData:
+ "test-malware-simple;a:2\ntest-phish-simple;a:1\ntest-unwanted-simple;a:3",
+ urlExistInMultipleTables: {
+ url: add1Urls,
+ tables: "test-malware-simple,test-phish-simple,test-unwanted-simple",
+ },
+ };
+
+ doTest([update], assertions, false);
+}
+
+function Observer(callback) {
+ this.observe = callback;
+}
+
+Observer.prototype = {
+ QueryInterface: ChromeUtils.generateQI(["nsIObserver"]),
+};
+
+// Tests a database reset request.
+function testReset() {
+ // The moz-phish-simple table is populated separately from the other update in
+ // a separate update request. Therefore it should not be reset when we run the
+ // updates later in this function.
+ var mozAddUrls = ["moz-reset.com/a"];
+ var mozUpdate = buildMozPhishingUpdate([{ chunkNum: 1, urls: mozAddUrls }]);
+
+ var dataUpdate = "data:," + encodeURIComponent(mozUpdate);
+
+ streamUpdater.downloadUpdates(
+ mozTables,
+ "",
+ true,
+ dataUpdate,
+ () => {},
+ updateError,
+ updateError
+ );
+
+ var addUrls1 = ["foo-reset.com/a", "foo-reset.com/b"];
+ var update1 = buildPhishingUpdate([{ chunkNum: 1, urls: addUrls1 }]);
+
+ var update2 = "n:1000\nr:pleasereset\n";
+
+ var addUrls3 = ["bar-reset.com/a", "bar-reset.com/b"];
+ var update3 = buildPhishingUpdate([{ chunkNum: 3, urls: addUrls3 }]);
+
+ var assertions = {
+ tableData: "moz-phish-simple;a:1\ntest-phish-simple;a:3", // tables that should still be there.
+ mozPhishingUrlsExist: mozAddUrls, // mozAddUrls added prior to the reset
+ // but it should still exist after reset.
+ urlsExist: addUrls3, // addUrls3 added after the reset.
+ urlsDontExist: addUrls1, // addUrls1 added prior to the reset
+ };
+
+ // Use these update responses in order. The update request only
+ // contains test-*-simple tables so the reset will only apply to these.
+ doTest([update1, update2, update3], assertions, false);
+}
+
+function run_test() {
+ runTests([
+ testFillDb,
+ testSimpleForward,
+ testNestedForward,
+ testInvalidUrlForward,
+ testErrorUrlForward,
+ testMultipleTables,
+ testUrlInMultipleTables,
+ testReset,
+ ]);
+}
+
+do_test_pending();
diff --git a/toolkit/components/url-classifier/tests/unit/test_threat_type_conversion.js b/toolkit/components/url-classifier/tests/unit/test_threat_type_conversion.js
new file mode 100644
index 0000000000..b1bec40b4f
--- /dev/null
+++ b/toolkit/components/url-classifier/tests/unit/test_threat_type_conversion.js
@@ -0,0 +1,50 @@
+function run_test() {
+ let urlUtils = Cc["@mozilla.org/url-classifier/utils;1"].getService(
+ Ci.nsIUrlClassifierUtils
+ );
+
+ // Test list name to threat type conversion.
+
+ equal(urlUtils.convertListNameToThreatType("goog-malware-proto"), 1);
+ equal(urlUtils.convertListNameToThreatType("googpub-phish-proto"), 2);
+ equal(urlUtils.convertListNameToThreatType("goog-unwanted-proto"), 3);
+ equal(urlUtils.convertListNameToThreatType("goog-harmful-proto"), 4);
+ equal(urlUtils.convertListNameToThreatType("goog-phish-proto"), 5);
+ equal(urlUtils.convertListNameToThreatType("goog-badbinurl-proto"), 7);
+ equal(urlUtils.convertListNameToThreatType("goog-downloadwhite-proto"), 9);
+
+ try {
+ urlUtils.convertListNameToThreatType("bad-list-name");
+ ok(false, "Bad list name should lead to exception.");
+ } catch (e) {}
+
+ try {
+ urlUtils.convertListNameToThreatType("bad-list-name");
+ ok(false, "Bad list name should lead to exception.");
+ } catch (e) {}
+
+ // Test threat type to list name conversion.
+ equal(urlUtils.convertThreatTypeToListNames(1), "goog-malware-proto");
+ equal(
+ urlUtils.convertThreatTypeToListNames(2),
+ "googpub-phish-proto,moztest-phish-proto,test-phish-proto"
+ );
+ equal(
+ urlUtils.convertThreatTypeToListNames(3),
+ "goog-unwanted-proto,moztest-unwanted-proto,test-unwanted-proto"
+ );
+ equal(urlUtils.convertThreatTypeToListNames(4), "goog-harmful-proto");
+ equal(urlUtils.convertThreatTypeToListNames(5), "goog-phish-proto");
+ equal(urlUtils.convertThreatTypeToListNames(7), "goog-badbinurl-proto");
+ equal(urlUtils.convertThreatTypeToListNames(9), "goog-downloadwhite-proto");
+
+ try {
+ urlUtils.convertThreatTypeToListNames(0);
+ ok(false, "Bad threat type should lead to exception.");
+ } catch (e) {}
+
+ try {
+ urlUtils.convertThreatTypeToListNames(100);
+ ok(false, "Bad threat type should lead to exception.");
+ } catch (e) {}
+}
diff --git a/toolkit/components/url-classifier/tests/unit/xpcshell.toml b/toolkit/components/url-classifier/tests/unit/xpcshell.toml
new file mode 100644
index 0000000000..561188ceb3
--- /dev/null
+++ b/toolkit/components/url-classifier/tests/unit/xpcshell.toml
@@ -0,0 +1,56 @@
+[DEFAULT]
+head = "head_urlclassifier.js"
+tags = "condprof"
+skip-if = ["os == 'android'"]
+support-files = ["data/**"]
+
+["test_addsub.js"]
+
+["test_backoff.js"]
+
+["test_bug1274685_unowned_list.js"]
+
+["test_canonicalization.js"]
+
+["test_channelClassifierService.js"]
+
+["test_dbservice.js"]
+skip-if = ["condprof"] # Bug 1769828
+
+["test_digest256.js"]
+run-sequentially = "very high failure rate in parallel"
+
+["test_exceptionListService.js"]
+tags = "remote-settings"
+
+["test_features.js"]
+
+["test_hashcompleter.js"]
+
+["test_hashcompleter_v4.js"]
+
+["test_listmanager.js"]
+run-sequentially = "very high failure rate in parallel"
+
+["test_malwaretable_pref.js"]
+
+["test_partial.js"]
+
+["test_platform_specific_threats.js"]
+
+["test_pref.js"]
+
+["test_prefixset.js"]
+
+["test_provider_url.js"]
+
+["test_rsListService.js"]
+tags = "remote-settings"
+
+["test_safebrowsing_protobuf.js"]
+
+["test_shouldclassify.js"]
+
+["test_streamupdater.js"]
+
+["test_threat_type_conversion.js"]