summaryrefslogtreecommitdiffstats
path: root/dom/performance/tests
diff options
context:
space:
mode:
authorDaniel Baumann <daniel.baumann@progress-linux.org>2024-04-07 19:33:14 +0000
committerDaniel Baumann <daniel.baumann@progress-linux.org>2024-04-07 19:33:14 +0000
commit36d22d82aa202bb199967e9512281e9a53db42c9 (patch)
tree105e8c98ddea1c1e4784a60a5a6410fa416be2de /dom/performance/tests
parentInitial commit. (diff)
downloadfirefox-esr-36d22d82aa202bb199967e9512281e9a53db42c9.tar.xz
firefox-esr-36d22d82aa202bb199967e9512281e9a53db42c9.zip
Adding upstream version 115.7.0esr.upstream/115.7.0esrupstream
Signed-off-by: Daniel Baumann <daniel.baumann@progress-linux.org>
Diffstat (limited to 'dom/performance/tests')
-rw-r--r--dom/performance/tests/empty.js1
-rw-r--r--dom/performance/tests/logo.pngbin0 -> 21901 bytes
-rw-r--r--dom/performance/tests/mochitest.ini44
-rw-r--r--dom/performance/tests/serverTiming.sjs41
-rw-r--r--dom/performance/tests/sharedworker_performance_user_timing.js38
-rw-r--r--dom/performance/tests/test_performance_navigation_timing.html104
-rw-r--r--dom/performance/tests/test_performance_observer.html142
-rw-r--r--dom/performance/tests/test_performance_observer.js286
-rw-r--r--dom/performance/tests/test_performance_paint_observer.html40
-rw-r--r--dom/performance/tests/test_performance_paint_observer_helper.html35
-rw-r--r--dom/performance/tests/test_performance_paint_timing.html38
-rw-r--r--dom/performance/tests/test_performance_paint_timing_helper.html65
-rw-r--r--dom/performance/tests/test_performance_server_timing.html58
-rw-r--r--dom/performance/tests/test_performance_server_timing_plain_http.html42
-rw-r--r--dom/performance/tests/test_performance_timing_json.html32
-rw-r--r--dom/performance/tests/test_performance_user_timing.html49
-rw-r--r--dom/performance/tests/test_performance_user_timing.js318
-rw-r--r--dom/performance/tests/test_performance_user_timing_dying_global.html61
-rw-r--r--dom/performance/tests/test_sharedWorker_performance_user_timing.html30
-rw-r--r--dom/performance/tests/test_timeOrigin.html76
-rw-r--r--dom/performance/tests/test_worker_observer.html41
-rw-r--r--dom/performance/tests/test_worker_performance_entries.html39
-rw-r--r--dom/performance/tests/test_worker_performance_entries.js120
-rw-r--r--dom/performance/tests/test_worker_performance_entries.sjs11
-rw-r--r--dom/performance/tests/test_worker_performance_now.html31
-rw-r--r--dom/performance/tests/test_worker_performance_now.js68
-rw-r--r--dom/performance/tests/test_worker_user_timing.html30
-rw-r--r--dom/performance/tests/worker_performance_observer.js4
-rw-r--r--dom/performance/tests/worker_performance_user_timing.js32
29 files changed, 1876 insertions, 0 deletions
diff --git a/dom/performance/tests/empty.js b/dom/performance/tests/empty.js
new file mode 100644
index 0000000000..3b44754e30
--- /dev/null
+++ b/dom/performance/tests/empty.js
@@ -0,0 +1 @@
+/* Nothing here */
diff --git a/dom/performance/tests/logo.png b/dom/performance/tests/logo.png
new file mode 100644
index 0000000000..a05926bcd7
--- /dev/null
+++ b/dom/performance/tests/logo.png
Binary files differ
diff --git a/dom/performance/tests/mochitest.ini b/dom/performance/tests/mochitest.ini
new file mode 100644
index 0000000000..9fbdac9efe
--- /dev/null
+++ b/dom/performance/tests/mochitest.ini
@@ -0,0 +1,44 @@
+[DEFAULT]
+support-files =
+ test_performance_observer.js
+ test_performance_user_timing.js
+ test_worker_performance_now.js
+ worker_performance_user_timing.js
+ worker_performance_observer.js
+ sharedworker_performance_user_timing.js
+ test_worker_performance_entries.js
+ test_worker_performance_entries.sjs
+ empty.js
+ serverTiming.sjs
+
+[test_performance_observer.html]
+[test_performance_user_timing.html]
+[test_performance_user_timing_dying_global.html]
+[test_performance_navigation_timing.html]
+[test_performance_paint_timing.html]
+support-files =
+ test_performance_paint_timing_helper.html
+ logo.png
+[test_performance_paint_observer.html]
+support-files =
+ test_performance_paint_observer_helper.html
+ logo.png
+[test_worker_user_timing.html]
+[test_worker_observer.html]
+[test_sharedWorker_performance_user_timing.html]
+skip-if = true # Bug 1571904
+[test_worker_performance_now.html]
+[test_timeOrigin.html]
+skip-if = toolkit == 'android' # Bug 1525959
+[test_worker_performance_entries.html]
+skip-if =
+ toolkit == 'android' # Bug 1525959
+ http3
+[test_performance_timing_json.html]
+[test_performance_server_timing.html]
+scheme = https
+skip-if =
+ http3
+[test_performance_server_timing_plain_http.html]
+skip-if =
+ http3
diff --git a/dom/performance/tests/serverTiming.sjs b/dom/performance/tests/serverTiming.sjs
new file mode 100644
index 0000000000..8a93829fa5
--- /dev/null
+++ b/dom/performance/tests/serverTiming.sjs
@@ -0,0 +1,41 @@
+var responseServerTiming = [
+ { metric: "metric1", duration: "123.4", description: "description1" },
+ { metric: "metric2", duration: "456.78", description: "description2" },
+];
+var trailerServerTiming = [
+ { metric: "metric3", duration: "789.11", description: "description3" },
+ { metric: "metric4", duration: "1112.13", description: "description4" },
+];
+
+function createServerTimingHeader(headerData) {
+ var header = "";
+ for (var i = 0; i < headerData.length; i++) {
+ header +=
+ "Server-Timing:" +
+ headerData[i].metric +
+ ";" +
+ "dur=" +
+ headerData[i].duration +
+ ";" +
+ "desc=" +
+ headerData[i].description +
+ "\r\n";
+ }
+ return header;
+}
+
+function handleRequest(request, response) {
+ var body = "c\r\ndata reached\r\n3\r\nhej\r\n0\r\n";
+
+ response.seizePower();
+ response.write("HTTP/1.1 200 OK\r\n");
+ response.write("Content-Type: text/plain\r\n");
+ response.write(createServerTimingHeader(responseServerTiming));
+
+ response.write("Transfer-Encoding: chunked\r\n");
+ response.write("\r\n");
+ response.write(body);
+ response.write(createServerTimingHeader(trailerServerTiming));
+ response.write("\r\n");
+ response.finish();
+}
diff --git a/dom/performance/tests/sharedworker_performance_user_timing.js b/dom/performance/tests/sharedworker_performance_user_timing.js
new file mode 100644
index 0000000000..6dcbd5d7d9
--- /dev/null
+++ b/dom/performance/tests/sharedworker_performance_user_timing.js
@@ -0,0 +1,38 @@
+var port;
+
+function ok(a, msg) {
+ dump("OK: " + !!a + " => " + a + " " + msg + "\n");
+ port.postMessage({ type: "status", status: !!a, msg: a + ": " + msg });
+}
+
+function is(a, b, msg) {
+ dump("IS: " + (a === b) + " => " + a + " | " + b + " " + msg + "\n");
+ port.postMessage({
+ type: "status",
+ status: a === b,
+ msg: a + " === " + b + ": " + msg,
+ });
+}
+
+function isnot(a, b, msg) {
+ dump("ISNOT: " + (a === b) + " => " + a + " | " + b + " " + msg + "\n");
+ port.postMessage({
+ type: "status",
+ status: a != b,
+ msg: a + " != " + b + ": " + msg,
+ });
+}
+
+importScripts("test_performance_user_timing.js");
+
+onconnect = function (evt) {
+ port = evt.ports[0];
+
+ for (var i = 0; i < steps.length; ++i) {
+ performance.clearMarks();
+ performance.clearMeasures();
+ steps[i]();
+ }
+
+ port.postMessage({ type: "finish" });
+};
diff --git a/dom/performance/tests/test_performance_navigation_timing.html b/dom/performance/tests/test_performance_navigation_timing.html
new file mode 100644
index 0000000000..abcf9fd340
--- /dev/null
+++ b/dom/performance/tests/test_performance_navigation_timing.html
@@ -0,0 +1,104 @@
+<!DOCTYPE HTML>
+<html>
+ <!--
+ https://bugzilla.mozilla.org/show_bug.cgi?id=1462891
+ -->
+ <head>
+ <title>Test for Bug 1462891</title>
+ <meta http-equiv="content-type" content="text/html; charset=UTF-8">
+ <script src="/tests/SimpleTest/SimpleTest.js"></script>
+ </head>
+ <body>
+ <a target="_blank" href="https://bugzilla.mozilla.org/show_bug.cgi?id=1462891">Mozilla Bug 1462891 - Navigation Timing API</a>
+ <div id="content">
+ </div>
+ <pre id="test">
+ <script class="testbody" type="text/javascript">
+ var index = 0;
+ let isRounded = (x, shouldRound, expectedPrecision) => {
+ if (!shouldRound)
+ return true;
+
+ let rounded = (Math.floor(x / expectedPrecision) * expectedPrecision);
+ // First we do the perfectly normal check that should work just fine
+ if (rounded === x || x === 0)
+ return true;
+
+ // When we're diving by non-whole numbers, we may not get perfect
+ // multiplication/division because of floating points.
+ // When dealing with ms since epoch, a double's precision is on the order
+ // of 1/5 of a microsecond, so we use a value a little higher than that as
+ // our epsilon.
+ // To be clear, this error is introduced in our re-calculation of 'rounded'
+ // above in JavaScript.
+ if (Math.abs(rounded - x + expectedPrecision) < .0005) {
+ return true;
+ } else if (Math.abs(rounded - x) < .0005) {
+ return true;
+ }
+
+ // Then we handle the case where you're sub-millisecond and the timer is not
+ // We check that the timer is not sub-millisecond by assuming it is not if it
+ // returns an even number of milliseconds
+ if (expectedPrecision < 1 && Math.round(x) == x) {
+ if (Math.round(rounded) == x) {
+ return true;
+ }
+ }
+
+ ok(false, "Looming Test Failure, Additional Debugging Info: Expected Precision: " + expectedPrecision + " Measured Value: " + x +
+ " Rounded Vaue: " + rounded + " Fuzzy1: " + Math.abs(rounded - x + expectedPrecision) +
+ " Fuzzy 2: " + Math.abs(rounded - x));
+
+ return false;
+ };
+
+ var metrics = [
+ "unloadEventStart",
+ "unloadEventEnd",
+ "domInteractive",
+ "domContentLoadedEventStart",
+ "domContentLoadedEventEnd",
+ "domComplete",
+ "loadEventStart",
+ "loadEventEnd"
+ ];
+
+ async function runTests(resistFingerprinting, reduceTimerPrecision, expectedPrecision) {
+ await SpecialPowers.pushPrefEnv({
+ "set": [["privacy.resistFingerprinting", resistFingerprinting],
+ ["privacy.reduceTimerPrecision", reduceTimerPrecision],
+ ["privacy.resistFingerprinting.reduceTimerPrecision.microseconds", expectedPrecision * 1000]
+ ]});
+ var entries = performance.getEntriesByType("navigation");
+ is(entries.length, 1, "Checking PerformanceNavigationEntry count");
+
+ for (let i=0; i<entries.length; i++) {
+ for (let j=0; j<metrics.length; j++) {
+ ok(isRounded(entries[i][metrics[j]], reduceTimerPrecision, expectedPrecision),
+ "Testing " + metrics[j] + " with value " + entries[i][metrics[j]] +
+ " with resistFingerprinting " + resistFingerprinting + " reduceTimerPrecision " +
+ reduceTimerPrecision + " precision " + expectedPrecision);
+ }
+ }
+ }
+
+ async function startTests() {
+ await runTests(false, false, 2);
+ await runTests(true, false, 2);
+ await runTests(true, true, 2);
+ await runTests(false, true, 1000);
+ await runTests(false, true, 133);
+ await runTests(false, true, 13);
+ await runTests(false, true, 2);
+ await runTests(false, true, 1);
+
+ SimpleTest.finish();
+ }
+
+ SimpleTest.waitForExplicitFinish();
+ addLoadEvent(startTests);
+ </script>
+ </pre>
+ </body>
+</html>
diff --git a/dom/performance/tests/test_performance_observer.html b/dom/performance/tests/test_performance_observer.html
new file mode 100644
index 0000000000..86c780c56c
--- /dev/null
+++ b/dom/performance/tests/test_performance_observer.html
@@ -0,0 +1,142 @@
+<!--
+ Any copyright is dedicated to the Public Domain.
+ http://creativecommons.org/publicdomain/zero/1.0/
+-->
+<!DOCTYPE html>
+<html>
+<head>
+<meta charset=utf-8>
+<title>Test for performance observer</title>
+<script src="/tests/SimpleTest/SimpleTest.js"></script>
+<link rel="stylesheet" type="text/css" href="/tests/SimpleTest/test.css" />
+</head>
+<body>
+<div id="log"></div>
+<script>
+SimpleTest.requestFlakyTimeout("For testing when observer callbacks should not be called.");
+SimpleTest.waitForExplicitFinish();
+
+let _tests = [];
+
+let test = promise_test = fn => {
+ let cleanups = [];
+ _tests.push(async () => {
+ try {
+ await fn({
+ add_cleanup: f => { cleanups.push(f); },
+ step_timeout(f, timeout) {
+ var test_this = this;
+ var args = Array.prototype.slice.call(arguments, 2);
+ return setTimeout(() => {
+ return f.apply(test_this, args);
+ }, timeout);
+ }
+ });
+ } catch(e) {
+ ok(false, `got unexpected exception ${e}`);
+ }
+ try {
+ for (const f of cleanups) {
+ f();
+ }
+ runNextTest();
+ } catch (e) {
+ ok(false, `got unexpected exception during cleanup ${e}`);
+ }
+ });
+}
+
+function runNextTest() {
+ if (!_tests.length) {
+ SimpleTest.finish()
+ return;
+ }
+ _tests.shift()();
+}
+
+function assert_equals(actual, expected, description) {
+ ok(typeof actual == typeof expected,
+ `${description} expected (${typeof expected}) ${expected} but got (${typeof actual}) ${actual}`);
+ ok(Object.is(actual, expected),
+ `${description} expected ${expected} but got ${actual}`);
+}
+
+function assert_array_equals(actual, expected, description) {
+ ok(actual.length === expected.length,
+ `${description} lengths differ, expected ${expected.length} but got ${actual.length}`);
+ for (var i = 0; i < actual.length; i++) {
+ ok(actual.hasOwnProperty(i) === expected.hasOwnProperty(i),
+ `${description} property expected to be ${expected[i]} but got ${actual[i]}`);
+ }
+}
+
+function assert_throws(expected_exc, func, desc) {
+ try {
+ func.call(this);
+ } catch(e) {
+ var actual = e.name || e.type;
+ var expected = expected_exc.name || expected_exc.type;
+ ok(actual == expected,
+ `Expected '${expected}', got '${actual}'.`);
+ return;
+ }
+ ok(false, "Expected exception, but none was thrown");
+}
+
+function assert_unreached(description) {
+ ok(false, `${description} reached unreachable code`);
+}
+</script>
+<script src="test_performance_observer.js"></script>
+<script>
+function makeXHR(aUrl) {
+ var xmlhttp = new XMLHttpRequest();
+ xmlhttp.open("get", aUrl, true);
+ xmlhttp.send();
+}
+
+let waitForConsole = new Promise(resolve => {
+ SimpleTest.monitorConsole(resolve, [{
+ message: /JavaScript Warning: "Ignoring unsupported entryTypes: invalid."/,
+ }]);
+});
+
+promise_test(t => {
+ var promise = new Promise(resolve => {
+ performance.clearResourceTimings();
+
+ var observer = new PerformanceObserver(list => resolve(list));
+ observer.observe({entryTypes: ['resource']});
+ t.add_cleanup(() => observer.disconnect());
+ });
+
+ makeXHR("test-data.json");
+
+ return promise.then(async list => {
+ assert_equals(list.getEntries().length, 1);
+ assert_array_equals(list.getEntries(),
+ performance.getEntriesByType("resource"),
+ "Observed 'resource' entries should equal to entries obtained by getEntriesByType.");
+
+ // getEntries filtering tests
+ assert_array_equals(list.getEntries({name: "http://mochi.test:8888/tests/dom/base/test/test-data.json"}),
+ performance.getEntriesByName("http://mochi.test:8888/tests/dom/base/test/test-data.json"),
+ "getEntries with name filter should return correct results.");
+ assert_array_equals(list.getEntries({entryType: "resource"}),
+ performance.getEntriesByType("resource"),
+ "getEntries with entryType filter should return correct results.");
+ assert_array_equals(list.getEntries({initiatorType: "xmlhttprequest"}),
+ performance.getEntriesByType("resource"),
+ "getEntries with initiatorType filter should return correct results.");
+ assert_array_equals(list.getEntries({initiatorType: "link"}),
+ [],
+ "getEntries with non-existent initiatorType filter should return an empty array.");
+
+ SimpleTest.endMonitorConsole();
+ await waitForConsole;
+ });
+}, "resource-timing test");
+
+runNextTest();
+</script>
+</body>
diff --git a/dom/performance/tests/test_performance_observer.js b/dom/performance/tests/test_performance_observer.js
new file mode 100644
index 0000000000..ddc57d4096
--- /dev/null
+++ b/dom/performance/tests/test_performance_observer.js
@@ -0,0 +1,286 @@
+test(t => {
+ assert_throws(
+ { name: "TypeError" },
+ function () {
+ new PerformanceObserver();
+ },
+ "PerformanceObserver constructor should throw TypeError if no argument is specified."
+ );
+
+ assert_throws(
+ { name: "TypeError" },
+ function () {
+ new PerformanceObserver({});
+ },
+ "PerformanceObserver constructor should throw TypeError if the argument is not a function."
+ );
+}, "Test that PerformanceObserver constructor throws exception");
+
+test(t => {
+ var observer = new PerformanceObserver(() => {});
+
+ assert_throws(
+ { name: "TypeError" },
+ function () {
+ observer.observe();
+ },
+ "observe() should throw TypeError exception if no option specified."
+ );
+
+ assert_throws(
+ { name: "TypeError" },
+ function () {
+ observer.observe({ unsupportedAttribute: "unsupported" });
+ },
+ "obsrve() should throw TypeError exception if the option has no 'entryTypes' attribute."
+ );
+
+ assert_equals(
+ undefined,
+ observer.observe({ entryTypes: [] }),
+ "observe() should silently ignore empty 'entryTypes' sequence."
+ );
+
+ assert_throws(
+ { name: "TypeError" },
+ function () {
+ observer.observe({ entryTypes: null });
+ },
+ "obsrve() should throw TypeError exception if 'entryTypes' attribute is null."
+ );
+
+ assert_equals(
+ undefined,
+ observer.observe({ entryTypes: ["invalid"] }),
+ "observe() should silently ignore invalid 'entryTypes' values."
+ );
+}, "Test that PerformanceObserver.observe throws exception");
+
+function promiseObserve(test, options) {
+ return new Promise(resolve => {
+ performance.clearMarks();
+ performance.clearMeasures();
+
+ var observer = new PerformanceObserver(list => resolve(list));
+ observer.observe(options);
+ test.add_cleanup(() => observer.disconnect());
+ });
+}
+
+promise_test(t => {
+ var promise = promiseObserve(t, { entryTypes: ["mark", "measure"] });
+
+ performance.mark("test-start");
+ performance.mark("test-end");
+ performance.measure("test-measure", "test-start", "test-end");
+
+ return promise.then(list => {
+ assert_equals(
+ list.getEntries().length,
+ 3,
+ "There should be three observed entries."
+ );
+
+ var markEntries = list.getEntries().filter(entry => {
+ return entry.entryType == "mark";
+ });
+ assert_array_equals(
+ markEntries,
+ performance.getEntriesByType("mark"),
+ "Observed 'mark' entries should equal to entries obtained by getEntriesByType."
+ );
+
+ var measureEntries = list.getEntries().filter(entry => {
+ return entry.entryType == "measure";
+ });
+ assert_array_equals(
+ measureEntries,
+ performance.getEntriesByType("measure"),
+ "Observed 'measure' entries should equal to entries obtained by getEntriesByType."
+ );
+ });
+}, "Test for user-timing with PerformanceObserver");
+
+promise_test(t => {
+ var promise = new Promise((resolve, reject) => {
+ performance.clearMarks();
+ performance.clearMeasures();
+
+ var observer = new PerformanceObserver(list => reject(list));
+ observer.observe({ entryTypes: ["mark", "measure"] });
+ observer.disconnect();
+ t.step_timeout(resolve, 100);
+ });
+
+ performance.mark("test-start");
+ performance.mark("test-end");
+ performance.measure("test-measure", "test-start", "test-end");
+
+ return promise.then(
+ () => {
+ assert_equals(performance.getEntriesByType("mark").length, 2);
+ assert_equals(performance.getEntriesByType("measure").length, 1);
+ },
+ list => {
+ assert_unreached("Observer callback should never be called.");
+ }
+ );
+}, "Nothing should be notified after disconnecting observer");
+
+promise_test(t => {
+ var promise = promiseObserve(t, { entryTypes: ["mark"] });
+
+ performance.mark("test");
+
+ return promise.then(list => {
+ assert_array_equals(
+ list.getEntries({ entryType: "mark" }),
+ performance.getEntriesByType("mark"),
+ "getEntries with entryType filter should return correct results."
+ );
+
+ assert_array_equals(
+ list.getEntries({ name: "test" }),
+ performance.getEntriesByName("test"),
+ "getEntries with name filter should return correct results."
+ );
+
+ assert_array_equals(
+ list.getEntries({ name: "test", entryType: "mark" }),
+ performance.getEntriesByName("test"),
+ "getEntries with name and entryType filter should return correct results."
+ );
+
+ assert_array_equals(
+ list.getEntries({ name: "invalid" }),
+ [],
+ "getEntries with non-existent name filter should return an empty array."
+ );
+
+ assert_array_equals(
+ list.getEntries({ name: "test", entryType: "measure" }),
+ [],
+ "getEntries with name filter and non-existent entryType should return an empty array."
+ );
+
+ assert_array_equals(
+ list.getEntries({ name: "invalid", entryType: "mark" }),
+ [],
+ "getEntries with non-existent name and entryType filter should return an empty array."
+ );
+
+ assert_array_equals(
+ list.getEntries({ initiatorType: "xmlhttprequest" }),
+ [],
+ "getEntries with initiatorType filter should return an empty array."
+ );
+ });
+}, "Test for PerformanceObserverEntryList.getEntries");
+
+promise_test(t => {
+ var promise = promiseObserve(t, { entryTypes: ["mark", "measure"] });
+
+ performance.mark("test");
+ performance.measure("test-measure", "test", "test");
+
+ return promise.then(list => {
+ assert_array_equals(
+ list.getEntriesByType("mark"),
+ performance.getEntriesByType("mark")
+ );
+ assert_array_equals(
+ list.getEntriesByType("measure"),
+ performance.getEntriesByType("measure")
+ );
+ });
+}, "Test for PerformanceObserverEntryList.getEntriesByType");
+
+promise_test(t => {
+ var promise = promiseObserve(t, { entryTypes: ["mark", "measure"] });
+
+ performance.mark("test");
+ performance.measure("test-measure", "test", "test");
+
+ return promise.then(list => {
+ assert_array_equals(
+ list.getEntriesByName("test"),
+ performance.getEntriesByName("test")
+ );
+ assert_array_equals(
+ list.getEntriesByName("test-measure"),
+ performance.getEntriesByName("test-measure")
+ );
+ });
+}, "Test for PerformanceObserverEntryList.getEntriesByName");
+
+promise_test(t => {
+ var promise = new Promise(resolve => {
+ performance.clearMarks();
+ performance.clearMeasures();
+
+ var observer = new PerformanceObserver(list => resolve(list));
+ observer.observe({ entryTypes: ["mark", "measure"] });
+ observer.observe({ entryTypes: ["mark", "measure"] });
+ t.add_cleanup(() => observer.disconnect());
+ });
+
+ performance.mark("test-start");
+ performance.mark("test-end");
+ performance.measure("test-measure", "test-start", "test-end");
+
+ return promise.then(list => {
+ assert_equals(
+ list.getEntries().length,
+ 3,
+ "Observed user timing entries should have only three entries."
+ );
+ });
+}, "Test that invoking observe method twice affects nothing");
+
+promise_test(t => {
+ var promise = new Promise(resolve => {
+ performance.clearMarks();
+ performance.clearMeasures();
+
+ var observer = new PerformanceObserver(list => resolve(list));
+ observer.observe({ entryTypes: ["mark", "measure"] });
+ observer.observe({ entryTypes: ["mark"] });
+ t.add_cleanup(() => observer.disconnect());
+ });
+
+ performance.mark("test-start");
+ performance.mark("test-end");
+ performance.measure("test-measure", "test-start", "test-end");
+
+ return promise.then(list => {
+ assert_equals(
+ list.getEntries().length,
+ 2,
+ "Observed user timing entries should have only two entries."
+ );
+ });
+}, "Test that observing filter is replaced by a new filter");
+
+promise_test(t => {
+ var promise = new Promise(resolve => {
+ performance.clearMarks();
+ performance.clearMeasures();
+
+ var observer = new PerformanceObserver(list => resolve(list));
+ observer.observe({ entryTypes: ["mark"] });
+ observer.observe({ entryTypes: ["measure"] });
+ t.add_cleanup(() => observer.disconnect());
+ });
+
+ performance.mark("test-start");
+ performance.mark("test-end");
+ performance.measure("test-measure", "test-start", "test-end");
+
+ return promise.then(list => {
+ assert_equals(
+ list.getEntries().length,
+ 1,
+ "Observed user timing entries should have only 1 entries."
+ );
+ });
+}, "Test that observing filter is replaced by a new filter");
diff --git a/dom/performance/tests/test_performance_paint_observer.html b/dom/performance/tests/test_performance_paint_observer.html
new file mode 100644
index 0000000000..2ded1db797
--- /dev/null
+++ b/dom/performance/tests/test_performance_paint_observer.html
@@ -0,0 +1,40 @@
+<!--
+ Any copyright is dedicated to the Public Domain.
+ http://creativecommons.org/publicdomain/zero/1.0/
+-->
+<!DOCTYPE HTML>
+<html>
+ <!--
+ https://bugzilla.mozilla.org/show_bug.cgi?id=1518999
+ -->
+ <head>
+ <title>Test for Bug 1518999 (Observer API) </title>
+ <meta http-equiv="content-type" content="text/html; charset=UTF-8">
+ <script src="/tests/SimpleTest/SimpleTest.js"></script>
+ <link rel="stylesheet" type="text/css" href="/tests/SimpleTest/test.css"/>
+ </head>
+ <body>
+ <a target="_blank"
+ href="https://bugzilla.mozilla.org/show_bug.cgi?id=1518999">Mozilla
+ Bug 1518999 - Paint Timing API For Observers</a>
+ <p id="display"></p>
+ <div id="content" style="display: none">
+ <pre id="test">
+ <script class="testbody" type="text/javascript">
+ let tab;
+ function runTest() {
+ tab = window.open("test_performance_paint_observer_helper.html");
+ }
+
+ function done() {
+ tab.close();
+ SimpleTest.finish();
+ }
+
+ SimpleTest.waitForExplicitFinish();
+ addLoadEvent(runTest);
+ </script>
+ </pre>
+ </div>
+ </body>
+</html>
diff --git a/dom/performance/tests/test_performance_paint_observer_helper.html b/dom/performance/tests/test_performance_paint_observer_helper.html
new file mode 100644
index 0000000000..ae27c9480d
--- /dev/null
+++ b/dom/performance/tests/test_performance_paint_observer_helper.html
@@ -0,0 +1,35 @@
+<!--
+ Any copyright is dedicated to the Public Domain.
+ http://creativecommons.org/publicdomain/zero/1.0/
+-->
+<!DOCTYPE html>
+<html>
+ <body>
+ </body>
+ <script>
+ var promise = new Promise(resolve => {
+ var observer = new PerformanceObserver(list => resolve(list));
+ observer.observe({entryTypes: ["paint"]});
+ });
+
+ promise.then(list => {
+ var perfEntries = list.getEntries();
+ opener.is(list.getEntries().length, 1);
+ opener.isDeeply(list.getEntries(),
+ performance.getEntriesByType("paint"),
+ "Observed 'paint' entries should equal to entries obtained by getEntriesByType.");
+ opener.isDeeply(list.getEntries({name: "paint"}),
+ performance.getEntriesByName("paint"),
+ "getEntries with name filter should return correct results.");
+ opener.isDeeply(list.getEntries({entryType: "paint"}),
+ performance.getEntriesByType("paint"),
+ "getEntries with entryType filter should return correct results.");
+ opener.done();
+ });
+
+ const img = document.createElement("IMG");
+ img.src = "http://example.org/tests/dom/performance/tests/logo.png";
+ document.body.appendChild(img);
+
+ </script>
+</html>
diff --git a/dom/performance/tests/test_performance_paint_timing.html b/dom/performance/tests/test_performance_paint_timing.html
new file mode 100644
index 0000000000..f8784ecf26
--- /dev/null
+++ b/dom/performance/tests/test_performance_paint_timing.html
@@ -0,0 +1,38 @@
+<!--
+ Any copyright is dedicated to the Public Domain.
+ http://creativecommons.org/publicdomain/zero/1.0/
+-->
+<!DOCTYPE HTML>
+<html>
+ <!--
+ https://bugzilla.mozilla.org/show_bug.cgi?id=1518999
+ -->
+ <head>
+ <title>Test for Bug 1518999</title>
+ <meta http-equiv="content-type" content="text/html; charset=UTF-8">
+ <script src="/tests/SimpleTest/SimpleTest.js"></script>
+ <link rel="stylesheet" type="text/css" href="/tests/SimpleTest/test.css"/>
+ </head>
+ <body>
+ <a target="_blank"
+ href="https://bugzilla.mozilla.org/show_bug.cgi?id=1518999">Mozilla
+ Bug 1518999 - Paint Timing API</a>
+ <p id="display"></p>
+ <div id="content" style="display: none">
+ <pre id="test">
+ <script class="testbody" type="text/javascript">
+ let tab;
+ function runTest() {
+ tab = window.open("test_performance_paint_timing_helper.html");
+ }
+ function done() {
+ tab.close();
+ SimpleTest.finish();
+ }
+ SimpleTest.waitForExplicitFinish();
+ addLoadEvent(runTest);
+ </script>
+ </pre>
+ </div>
+ </body>
+</html>
diff --git a/dom/performance/tests/test_performance_paint_timing_helper.html b/dom/performance/tests/test_performance_paint_timing_helper.html
new file mode 100644
index 0000000000..c05b38cac0
--- /dev/null
+++ b/dom/performance/tests/test_performance_paint_timing_helper.html
@@ -0,0 +1,65 @@
+<!--
+ Any copyright is dedicated to the Public Domain.
+ http://creativecommons.org/publicdomain/zero/1.0/
+-->
+<!DOCTYPE HTML>
+<html>
+ <!--
+ https://bugzilla.mozilla.org/show_bug.cgi?id=1518999
+ -->
+ <head>
+ <title>Test for Bug 1518999</title>
+ <meta http-equiv="content-type" content="text/html; charset=UTF-8">
+ </head>
+ <body>
+ <div id="main"></div>
+ <div id="image"></div>
+ <div id="test">
+ <script class="testbody" type="text/javascript">
+ async function runTest() {
+ const paintEntries = performance.getEntriesByType('paint');
+ opener.is(paintEntries.length, 0, "No paint entries yet");
+
+ const img = document.createElement("img");
+ img.src = "http://example.org/tests/dom/performance/tests/logo.png";
+
+ img.onload = function() {
+ function getAndTestEntries(runCount) {
+ function testEntries(entries) {
+ opener.is(entries.length, 1, "FCP Only returns");
+ opener.is(entries[0].entryType, "paint", "entryType is paint");
+ opener.is(entries[0].name, "first-contentful-paint",
+ "Returned entry should be first-contentful-paint" );
+ const fcpEntriesGotByName =
+ performance.getEntriesByName('first-contentful-paint');
+ opener.is(fcpEntriesGotByName.length, 1, "entries length should match");
+ opener.is(entries[0], fcpEntriesGotByName[0], "should be the same entry");
+ opener.done();
+ }
+ const entries = performance.getEntriesByType('paint');
+ if (entries.length < 1) {
+ if (runCount < 4) {
+ opener.SimpleTest.requestFlakyTimeout("FCP is being registered asynchronously, so wait a bit of time");
+ setTimeout(function() {
+ getAndTestEntries(runCount + 1);
+ }, 20);
+ } else {
+ opener.ok(false, "Unable to find paint entries within a reasonable amount of time");
+ opener.done();
+ }
+ } else {
+ testEntries(entries);
+ }
+ }
+ getAndTestEntries(1);
+ }
+ document.body.appendChild(img);
+ }
+ window.onload = function() {
+ runTest();
+ }
+ </script>
+ </div>
+ </div>
+ </body>
+</html>
diff --git a/dom/performance/tests/test_performance_server_timing.html b/dom/performance/tests/test_performance_server_timing.html
new file mode 100644
index 0000000000..cba11a5fdd
--- /dev/null
+++ b/dom/performance/tests/test_performance_server_timing.html
@@ -0,0 +1,58 @@
+<!--
+ Any copyright is dedicated to the Public Domain.
+ http://creativecommons.org/publicdomain/zero/1.0/
+-->
+<!DOCTYPE html>
+<html>
+<head>
+<meta charset=utf-8>
+<title>Test for PerformanceServerTiming</title>
+<script src="/resources/testharness.js"></script>
+<script src="/resources/testharnessreport.js"></script>
+</head>
+<body>
+<div id="log"></div>
+<script>
+function makeXHR(aUrl) {
+ var xmlhttp = new XMLHttpRequest();
+ xmlhttp.open("get", aUrl, true);
+ xmlhttp.send();
+}
+
+// Note that |responseServerTiming| and |trailerServerTiming| SHOULD be synced with
+// the ones in serverTiming.sjs.
+var responseServerTiming = [{metric:"metric1", duration:"123.4", description:"description1"},
+ {metric:"metric2", duration:"456.78", description:"description2"}];
+var trailerServerTiming = [{metric:"metric3", duration:"789.11", description:"description3"},
+ {metric:"metric4", duration:"1112.13", description:"description4"}];
+
+function checkServerTimingContent(serverTiming) {
+ var expectedResult = responseServerTiming.concat(trailerServerTiming);
+ assert_equals(serverTiming.length, expectedResult.length);
+
+ for (var i = 0; i < expectedResult.length; i++) {
+ assert_equals(serverTiming[i].name, expectedResult[i].metric);
+ assert_equals(serverTiming[i].description, expectedResult[i].description);
+ assert_equals(serverTiming[i].duration, parseFloat(expectedResult[i].duration));
+ }
+}
+
+promise_test(t => {
+ var promise = new Promise(resolve => {
+ performance.clearResourceTimings();
+
+ var observer = new PerformanceObserver(list => resolve(list));
+ observer.observe({entryTypes: ['resource']});
+ t.add_cleanup(() => observer.disconnect());
+ });
+
+ makeXHR("serverTiming.sjs");
+
+ return promise.then(list => {
+ assert_equals(list.getEntries().length, 1);
+ checkServerTimingContent(list.getEntries()[0].serverTiming);
+ });
+}, "server-timing test");
+
+</script>
+</body>
diff --git a/dom/performance/tests/test_performance_server_timing_plain_http.html b/dom/performance/tests/test_performance_server_timing_plain_http.html
new file mode 100644
index 0000000000..7dcb8bd38d
--- /dev/null
+++ b/dom/performance/tests/test_performance_server_timing_plain_http.html
@@ -0,0 +1,42 @@
+<!--
+ Any copyright is dedicated to the Public Domain.
+ http://creativecommons.org/publicdomain/zero/1.0/
+-->
+<!DOCTYPE html>
+<html>
+<head>
+<meta charset=utf-8>
+<title>Plain HTTP Test for PerformanceServerTiming</title>
+<script src="/resources/testharness.js"></script>
+<script src="/resources/testharnessreport.js"></script>
+</head>
+<body>
+<div id="log"></div>
+<script>
+function makeXHR(aUrl) {
+ var xmlhttp = new XMLHttpRequest();
+ xmlhttp.open("get", aUrl, true);
+ xmlhttp.send();
+}
+
+promise_test(t => {
+ var promise = new Promise(resolve => {
+ performance.clearResourceTimings();
+
+ var observer = new PerformanceObserver(list => resolve(list));
+ observer.observe({entryTypes: ['resource']});
+ t.add_cleanup(() => observer.disconnect());
+ });
+
+ makeXHR("serverTiming.sjs");
+
+ return promise.then(list => {
+ assert_equals(list.getEntries().length, 1);
+ assert_equals(list.getEntries()[0].serverTiming, undefined);
+ assert_equals(list.getEntries()[0].toJSON().serverTiming, undefined,
+ "toJSON should not pick up properties that aren't on the object");
+ });
+}, "server-timing test");
+
+</script>
+</body>
diff --git a/dom/performance/tests/test_performance_timing_json.html b/dom/performance/tests/test_performance_timing_json.html
new file mode 100644
index 0000000000..97079c0d2f
--- /dev/null
+++ b/dom/performance/tests/test_performance_timing_json.html
@@ -0,0 +1,32 @@
+<!DOCTYPE HTML>
+<html>
+<!--
+https://bugzilla.mozilla.org/show_bug.cgi?id=1375829
+-->
+<head>
+ <meta charset="utf-8">
+ <title>Test for Bug 1375829</title>
+ <script src="/tests/SimpleTest/SimpleTest.js"></script>
+ <link rel="stylesheet" type="text/css" href="/tests/SimpleTest/test.css"/>
+ <script type="application/javascript">
+
+ /** Test for Bug 1375829 **/
+ var json = performance.timing.toJSON();
+
+ // Ensure it doesn't have any attributes that performance.timing doesn't have
+ for (let key of Object.keys(json)) {
+ ok(key in performance.timing, key + " should be a property of performance.timing");
+ }
+
+ </script>
+</head>
+<body>
+<a target="_blank" href="https://bugzilla.mozilla.org/show_bug.cgi?id=1375829">Mozilla Bug 1375829</a>
+<p id="display"></p>
+<div id="content" style="display: none">
+
+</div>
+<pre id="test">
+</pre>
+</body>
+</html>
diff --git a/dom/performance/tests/test_performance_user_timing.html b/dom/performance/tests/test_performance_user_timing.html
new file mode 100644
index 0000000000..fa0aaceb4e
--- /dev/null
+++ b/dom/performance/tests/test_performance_user_timing.html
@@ -0,0 +1,49 @@
+<!DOCTYPE HTML>
+<html>
+ <!--
+ https://bugzilla.mozilla.org/show_bug.cgi?id=782751
+ -->
+ <head>
+ <title>Test for Bug 782751</title>
+ <meta http-equiv="content-type" content="text/html; charset=UTF-8">
+ <script src="/tests/SimpleTest/SimpleTest.js"></script>
+ <script type="text/javascript" src="test_performance_user_timing.js"></script>
+ </head>
+ <body>
+ <a target="_blank" href="https://bugzilla.mozilla.org/show_bug.cgi?id=782751">Mozilla Bug 782751 - User Timing API</a>
+ <div id="content">
+ </div>
+ <pre id="test">
+ <script class="testbody" type="text/javascript">
+ var index = 0;
+
+ function next() {
+ ok(true, "Begin!");
+ var arr;
+ for (var i = 0; i < steps.length; ++i) {
+ try {
+ performance.clearMarks();
+ performance.clearMeasures();
+ performance.clearResourceTimings();
+ is(performance.getEntriesByType("resource").length, 0, "clearing performance resource entries");
+ is(performance.getEntriesByType("mark").length, 0, "clearing performance mark entries");
+ is(performance.getEntriesByType("measure").length, 0, "clearing performance measure entries");
+ steps[i]();
+ } catch(ex) {
+ ok(false, "Caught exception", ex);
+ }
+ }
+
+ SpecialPowers.setBoolPref("privacy.reduceTimerPrecision", reduceTimePrecisionPrevPrefValue);
+ SimpleTest.finish();
+ }
+
+ var reduceTimePrecisionPrevPrefValue = SpecialPowers.getBoolPref("privacy.reduceTimerPrecision");
+ SpecialPowers.setBoolPref("privacy.reduceTimerPrecision", false);
+
+ SimpleTest.waitForExplicitFinish();
+ addLoadEvent(next);
+ </script>
+ </pre>
+ </body>
+</html>
diff --git a/dom/performance/tests/test_performance_user_timing.js b/dom/performance/tests/test_performance_user_timing.js
new file mode 100644
index 0000000000..c98bee6f11
--- /dev/null
+++ b/dom/performance/tests/test_performance_user_timing.js
@@ -0,0 +1,318 @@
+var steps = [
+ // Test single mark addition
+ function () {
+ ok(true, "Running mark addition test");
+ performance.mark("test");
+ var marks = performance.getEntriesByType("mark");
+ is(marks.length, 1, "Number of marks should be 1");
+ var mark = marks[0];
+ is(mark.name, "test", "mark name should be 'test'");
+ is(mark.entryType, "mark", "mark type should be 'mark'");
+ isnot(mark.startTime, 0, "mark start time should not be 0");
+ is(mark.duration, 0, "mark duration should be 0");
+ },
+ // Test multiple mark addition
+ function () {
+ ok(true, "Running multiple mark with same name addition test");
+ performance.mark("test");
+ performance.mark("test");
+ performance.mark("test");
+ var marks_type = performance.getEntriesByType("mark");
+ is(marks_type.length, 3, "Number of marks by type should be 3");
+ var marks_name = performance.getEntriesByName("test");
+ is(marks_name.length, 3, "Number of marks by name should be 3");
+ var mark = marks_name[0];
+ is(mark.name, "test", "mark name should be 'test'");
+ is(mark.entryType, "mark", "mark type should be 'mark'");
+ isnot(mark.startTime, 0, "mark start time should not be 0");
+ is(mark.duration, 0, "mark duration should be 0");
+ var times = [];
+ // This also tests the chronological ordering specified as
+ // required for getEntries in the performance timeline spec.
+ marks_name.forEach(function (s) {
+ times.forEach(function (time) {
+ ok(
+ s.startTime >= time.startTime,
+ "Times should be equal or increasing between similarly named marks: " +
+ s.startTime +
+ " >= " +
+ time.startTime
+ );
+ });
+ times.push(s);
+ });
+ },
+ // Test all marks removal
+ function () {
+ ok(true, "Running all mark removal test");
+ performance.mark("test");
+ performance.mark("test2");
+ var marks = performance.getEntriesByType("mark");
+ is(marks.length, 2, "number of marks before all removal");
+ performance.clearMarks();
+ marks = performance.getEntriesByType("mark");
+ is(marks.length, 0, "number of marks after all removal");
+ },
+ // Test single mark removal
+ function () {
+ ok(true, "Running removal test (0 'test' marks with other marks)");
+ performance.mark("test2");
+ var marks = performance.getEntriesByType("mark");
+ is(marks.length, 1, "number of marks before all removal");
+ performance.clearMarks("test");
+ marks = performance.getEntriesByType("mark");
+ is(marks.length, 1, "number of marks after all removal");
+ },
+ // Test single mark removal
+ function () {
+ ok(true, "Running removal test (0 'test' marks with no other marks)");
+ var marks = performance.getEntriesByType("mark");
+ is(marks.length, 0, "number of marks before all removal");
+ performance.clearMarks("test");
+ marks = performance.getEntriesByType("mark");
+ is(marks.length, 0, "number of marks after all removal");
+ },
+ function () {
+ ok(true, "Running removal test (1 'test' mark with other marks)");
+ performance.mark("test");
+ performance.mark("test2");
+ var marks = performance.getEntriesByType("mark");
+ is(marks.length, 2, "number of marks before all removal");
+ performance.clearMarks("test");
+ marks = performance.getEntriesByType("mark");
+ is(marks.length, 1, "number of marks after all removal");
+ },
+ function () {
+ ok(true, "Running removal test (1 'test' mark with no other marks)");
+ performance.mark("test");
+ var marks = performance.getEntriesByType("mark");
+ is(marks.length, 1, "number of marks before all removal");
+ performance.clearMarks("test");
+ marks = performance.getEntriesByType("mark");
+ is(marks.length, 0, "number of marks after all removal");
+ },
+ function () {
+ ok(true, "Running removal test (2 'test' marks with other marks)");
+ performance.mark("test");
+ performance.mark("test");
+ performance.mark("test2");
+ var marks = performance.getEntriesByType("mark");
+ is(marks.length, 3, "number of marks before all removal");
+ performance.clearMarks("test");
+ marks = performance.getEntriesByType("mark");
+ is(marks.length, 1, "number of marks after all removal");
+ },
+ function () {
+ ok(true, "Running removal test (2 'test' marks with no other marks)");
+ performance.mark("test");
+ performance.mark("test");
+ var marks = performance.getEntriesByType("mark");
+ is(marks.length, 2, "number of marks before all removal");
+ performance.clearMarks("test");
+ marks = performance.getEntriesByType("mark");
+ is(marks.length, 0, "number of marks after all removal");
+ },
+ // Test mark name being same as navigation timing parameter
+ function () {
+ ok(true, "Running mark name collision test");
+ for (n in performance.timing) {
+ try {
+ if (n == "toJSON") {
+ ok(true, "Skipping toJSON entry in collision test");
+ continue;
+ }
+ performance.mark(n);
+ ok(
+ false,
+ "Mark name collision test failed for name " +
+ n +
+ ", shouldn't make it here!"
+ );
+ } catch (e) {
+ ok(
+ e instanceof DOMException,
+ "DOM exception thrown for mark named " + n
+ );
+ is(
+ e.code,
+ e.SYNTAX_ERR,
+ "DOM exception for name collision is syntax error"
+ );
+ }
+ }
+ },
+ // Test measure
+ function () {
+ ok(true, "Running measure addition with no start/end time test");
+ performance.measure("test");
+ var measures = performance.getEntriesByType("measure");
+ is(measures.length, 1, "number of measures should be 1");
+ var measure = measures[0];
+ is(measure.name, "test", "measure name should be 'test'");
+ is(measure.entryType, "measure", "measure type should be 'measure'");
+ is(measure.startTime, 0, "measure start time should be zero");
+ ok(measure.duration >= 0, "measure duration should not be negative");
+ },
+ function () {
+ ok(true, "Running measure addition with only start time test");
+ performance.mark("test1");
+ performance.measure("test", "test1", undefined);
+ var measures = performance.getEntriesByName("test", "measure");
+ var marks = performance.getEntriesByName("test1", "mark");
+ var measure = measures[0];
+ var mark = marks[0];
+ is(
+ measure.startTime,
+ mark.startTime,
+ "measure start time should be equal to the mark startTime"
+ );
+ ok(measure.duration >= 0, "measure duration should not be negative");
+ },
+ function () {
+ ok(true, "Running measure addition with only end time test");
+ performance.mark("test1");
+ performance.measure("test", undefined, "test1");
+ var measures = performance.getEntriesByName("test", "measure");
+ var marks = performance.getEntriesByName("test1", "mark");
+ var measure = measures[0];
+ var mark = marks[0];
+ ok(measure.duration >= 0, "measure duration should not be negative");
+ },
+ // Test measure picking latest version of similarly named tags
+ function () {
+ ok(true, "Running multiple mark with same name addition test");
+ performance.mark("test");
+ performance.mark("test");
+ performance.mark("test");
+ performance.mark("test2");
+ var marks_name = performance.getEntriesByName("test");
+ is(marks_name.length, 3, "Number of marks by name should be 3");
+ var marks_name2 = performance.getEntriesByName("test2");
+ is(marks_name2.length, 1, "Number of marks by name should be 1");
+ var test_mark = marks_name2[0];
+ performance.measure("test", "test", "test2");
+ var measures_type = performance.getEntriesByType("measure");
+ var last_mark = marks_name[marks_name.length - 1];
+ is(measures_type.length, 1, "Number of measures by type should be 1");
+ var measure = measures_type[0];
+ is(
+ measure.startTime,
+ last_mark.startTime,
+ "Measure start time should be the start time of the latest 'test' mark"
+ );
+ // Tolerance testing to avoid oranges, since we're doing double math across two different languages.
+ ok(
+ measure.duration - (test_mark.startTime - last_mark.startTime) < 0.00001,
+ "Measure duration ( " +
+ measure.duration +
+ ") should be difference between two marks"
+ );
+ },
+ function () {
+ // We don't have navigationStart in workers.
+ if ("window" in self) {
+ ok(true, "Running measure addition with no start/end time test");
+ performance.measure("test", "navigationStart");
+ var measures = performance.getEntriesByType("measure");
+ is(measures.length, 1, "number of measures should be 1");
+ var measure = measures[0];
+ is(measure.name, "test", "measure name should be 'test'");
+ is(measure.entryType, "measure", "measure type should be 'measure'");
+ is(measure.startTime, 0, "measure start time should be zero");
+ ok(measure.duration >= 0, "measure duration should not be negative");
+ }
+ },
+ // Test all measure removal
+ function () {
+ ok(true, "Running all measure removal test");
+ performance.measure("test");
+ performance.measure("test2");
+ var measures = performance.getEntriesByType("measure");
+ is(measures.length, 2, "measure entries should be length 2");
+ performance.clearMeasures();
+ measures = performance.getEntriesByType("measure");
+ is(measures.length, 0, "measure entries should be length 0");
+ },
+ // Test single measure removal
+ function () {
+ ok(true, "Running all measure removal test");
+ performance.measure("test");
+ performance.measure("test2");
+ var measures = performance.getEntriesByType("measure");
+ is(measures.length, 2, "measure entries should be length 2");
+ performance.clearMeasures("test");
+ measures = performance.getEntriesByType("measure");
+ is(measures.length, 1, "measure entries should be length 1");
+ },
+ // Test measure with invalid start time mark name
+ function () {
+ ok(true, "Running measure invalid start test");
+ try {
+ performance.measure("test", "notamark");
+ ok(false, "invalid measure start time exception not thrown!");
+ } catch (e) {
+ ok(e instanceof DOMException, "DOM exception thrown for invalid measure");
+ is(
+ e.code,
+ e.SYNTAX_ERR,
+ "DOM exception for invalid time is syntax error"
+ );
+ }
+ },
+ // Test measure with invalid end time mark name
+ function () {
+ ok(true, "Running measure invalid end test");
+ try {
+ performance.measure("test", undefined, "notamark");
+ ok(false, "invalid measure end time exception not thrown!");
+ } catch (e) {
+ ok(e instanceof DOMException, "DOM exception thrown for invalid measure");
+ is(
+ e.code,
+ e.SYNTAX_ERR,
+ "DOM exception for invalid time is syntax error"
+ );
+ }
+ },
+ // Test measure name being same as navigation timing parameter
+ function () {
+ ok(true, "Running measure name collision test");
+ for (n in performance.timing) {
+ if (n == "toJSON") {
+ ok(true, "Skipping toJSON entry in collision test");
+ continue;
+ }
+ performance.measure(n);
+ ok(true, "Measure name supports name collisions: " + n);
+ }
+ },
+ // Test measure mark being a reserved name
+ function () {
+ ok(true, "Create measures using all reserved names");
+ for (n in performance.timing) {
+ try {
+ if (n == "toJSON") {
+ ok(true, "Skipping toJSON entry in collision test");
+ continue;
+ }
+ performance.measure("test", n);
+ ok(true, "Measure created from reserved name as starting time: " + n);
+ } catch (e) {
+ ok(
+ [
+ "redirectStart",
+ "redirectEnd",
+ "unloadEventStart",
+ "unloadEventEnd",
+ "loadEventEnd",
+ "secureConnectionStart",
+ ].includes(n),
+ "Measure created from reserved name as starting time: " +
+ n +
+ " and threw expected error"
+ );
+ }
+ }
+ },
+ // TODO: Test measure picking latest version of similarly named tags
+];
diff --git a/dom/performance/tests/test_performance_user_timing_dying_global.html b/dom/performance/tests/test_performance_user_timing_dying_global.html
new file mode 100644
index 0000000000..18e4a54684
--- /dev/null
+++ b/dom/performance/tests/test_performance_user_timing_dying_global.html
@@ -0,0 +1,61 @@
+<!DOCTYPE html>
+<html>
+ <head>
+ <title>Test for User Timing APIs on dying globals</title>
+ <script src="/tests/SimpleTest/SimpleTest.js"></script>
+ <script type="text/javascript">
+ // We must wait for the iframe to load.
+ SimpleTest.waitForExplicitFinish();
+ window.addEventListener('load', () => {
+ const dyingWindow = initTest();
+ ok(true, 'Initialization complete');
+
+ testDoesNotCrash(dyingWindow);
+ SimpleTest.finish();
+ });
+
+ function initTest() {
+ // We create a dying global by creating an iframe, keeping a
+ // reference to it, and removing it.
+ const iframe = document.querySelector('iframe');
+ const iframeWindow = iframe.contentWindow;
+
+ // We want to call the User Timing functions in the context of
+ // the dying global. However, we can't call constructors
+ // directly on a reference to a window so we have to wrap it.
+ iframeWindow.newPerformanceMark = () => {
+ new PerformanceMark('constructor', {detail: 'constructorDetail'});
+ };
+
+ // Send the global to a dying state.
+ iframe.remove();
+
+ return iframeWindow;
+ }
+
+ function testDoesNotCrash(dyingWindow) {
+ ok(true, 'Running testDoesNotCrash');
+
+ dyingWindow.newPerformanceMark();
+ ok(true, 'new PerformanceMark() on dying global did not crash');
+
+ try {
+ dyingWindow.performance.mark('markMethod', {detail: 'markMethodDetail'});
+ } catch (e) {
+ is(e.code, e.INVALID_STATE_ERR, 'performance.mark on dying global threw expected exception');
+ }
+ ok(true, 'performance.mark on dying global did not crash');
+
+ try {
+ dyingWindow.performance.measure('measureMethod');
+ } catch (e) {
+ is(e.code, e.INVALID_STATE_ERR, 'performance.measure on dying global threw expected exception');
+ }
+ ok(true, 'performance.measure on dying global did not crash');
+ }
+ </script>
+ </head>
+ <body>
+ <iframe width="200" height="200" src="about:blank"></iframe>
+ </body>
+</html>
diff --git a/dom/performance/tests/test_sharedWorker_performance_user_timing.html b/dom/performance/tests/test_sharedWorker_performance_user_timing.html
new file mode 100644
index 0000000000..d26594e292
--- /dev/null
+++ b/dom/performance/tests/test_sharedWorker_performance_user_timing.html
@@ -0,0 +1,30 @@
+<!--
+ Any copyright is dedicated to the Public Domain.
+ http://creativecommons.org/publicdomain/zero/1.0/
+-->
+<!DOCTYPE HTML>
+<html>
+ <head>
+ <title>Test for worker performance timing API</title>
+ <meta http-equiv="content-type" content="text/html; charset=UTF-8">
+ <script src="/tests/SimpleTest/SimpleTest.js"></script>
+ </head>
+ <body>
+ <script class="testbody" type="text/javascript">
+
+var sw = new SharedWorker('sharedworker_performance_user_timing.js');
+sw.port.onmessage = function(event) {
+ if (event.data.type == 'finish') {
+ SpecialPowers.setBoolPref("privacy.reduceTimerPrecision", reduceTimePrecisionPrevPrefValue);
+ SimpleTest.finish();
+ } else if (event.data.type == 'status') {
+ ok(event.data.status, event.data.msg);
+ }
+}
+
+var reduceTimePrecisionPrevPrefValue = SpecialPowers.getBoolPref("privacy.reduceTimerPrecision");
+SpecialPowers.setBoolPref("privacy.reduceTimerPrecision", false);
+SimpleTest.waitForExplicitFinish();
+ </script>
+ </body>
+</html>
diff --git a/dom/performance/tests/test_timeOrigin.html b/dom/performance/tests/test_timeOrigin.html
new file mode 100644
index 0000000000..69796a432d
--- /dev/null
+++ b/dom/performance/tests/test_timeOrigin.html
@@ -0,0 +1,76 @@
+<!DOCTYPE HTML>
+<html>
+ <head>
+ <title>Test for performance.timeOrigin</title>
+ <meta http-equiv="content-type" content="text/html; charset=UTF-8">
+ <script src="/tests/SimpleTest/SimpleTest.js"></script>
+ </head>
+ <body>
+ <script type="text/js-worker" id="worker-src">
+ postMessage({ now: performance.now(), timeOrigin: performance.timeOrigin });
+ </script>
+
+ <script type="text/js-worker" id="shared-worker-src">
+ onconnect = function(evt) {
+ evt.ports[0].postMessage({ now: performance.now(), timeOrigin: performance.timeOrigin });
+ };
+ </script>
+
+ <script class="testbody" type="text/javascript">
+
+function testBasic() {
+ ok("timeOrigin" in performance, "Performance.timeOrigin exists.");
+ ok(performance.timeOrigin > 0, "TimeOrigin must be greater than 0.");
+ next();
+}
+
+function testWorker() {
+ var now = performance.now();
+
+ var blob = new Blob([ document.getElementById("worker-src").textContent ],
+ { type: "text/javascript" });
+ var w = new Worker(URL.createObjectURL(blob));
+ w.onmessage = function(e) {
+ ok (e.data.now + e.data.timeOrigin > now + performance.timeOrigin, "Comparing worker.now and window.now");
+ next();
+ }
+}
+
+function testSharedWorker() {
+ var now = performance.now();
+
+ var blob = new Blob([ document.getElementById("shared-worker-src").textContent ],
+ { type: "text/javascript" });
+ var w = new SharedWorker(URL.createObjectURL(blob));
+ w.port.onmessage = function(e) {
+ ok (e.data.now + e.data.timeOrigin > now + performance.timeOrigin, "Comparing worker.now and window.now");
+ next();
+ }
+}
+
+var tests = [ testBasic, testWorker, testSharedWorker ];
+function next() {
+ if (!tests.length) {
+ SpecialPowers.setBoolPref("privacy.reduceTimerPrecision", reduceTimePrecisionPrevPrefValue);
+ SimpleTest.finish();
+ return;
+ }
+
+ var test = tests.shift();
+ test();
+}
+
+SimpleTest.waitForExplicitFinish();
+
+// It is a known issue that comparing time between a worker and a window
+// when timer clamping is in effect may cause time to go backwards.
+// Do not run this test with this preference set. For large values of
+// clamping you will see failures. For small values, it is intermitant.
+var reduceTimePrecisionPrevPrefValue = SpecialPowers.getBoolPref("privacy.reduceTimerPrecision");
+SpecialPowers.setBoolPref("privacy.reduceTimerPrecision", false);
+
+addLoadEvent(next);
+ </script>
+ </pre>
+ </body>
+</html>
diff --git a/dom/performance/tests/test_worker_observer.html b/dom/performance/tests/test_worker_observer.html
new file mode 100644
index 0000000000..7f4df855c9
--- /dev/null
+++ b/dom/performance/tests/test_worker_observer.html
@@ -0,0 +1,41 @@
+<!--
+ Any copyright is dedicated to the Public Domain.
+ http://creativecommons.org/publicdomain/zero/1.0/
+-->
+<!DOCTYPE html>
+<html>
+<head>
+<meta charset=utf-8>
+<title>Test for performance observer in worker</title>
+<script src="/resources/testharness.js"></script>
+<script src="/resources/testharnessreport.js"></script>
+</head>
+<body>
+<div id="log"></div>
+<script>
+const worker = new Worker("worker_performance_observer.js");
+
+promise_test(t => {
+ let found = false;
+ return new Promise(resolve => {
+ SpecialPowers.registerConsoleListener(msg => {
+ if (msg.errorMessage === "Ignoring unsupported entryTypes: invalid.") {
+ found = true;
+ resolve();
+ }
+ });
+ worker.addEventListener("error", resolve);
+ worker.addEventListener("message", function(event) {
+ if (event.data.type === "complete") {
+ resolve();
+ }
+ });
+ }).then(() => {
+ SpecialPowers.postConsoleSentinel();
+ assert_true(found, "got the expected console warning");
+ });
+}, "Console warnings about invalid types should be logged during the tests");
+
+fetch_tests_from_worker(worker);
+</script>
+</body>
diff --git a/dom/performance/tests/test_worker_performance_entries.html b/dom/performance/tests/test_worker_performance_entries.html
new file mode 100644
index 0000000000..d3f124fdb3
--- /dev/null
+++ b/dom/performance/tests/test_worker_performance_entries.html
@@ -0,0 +1,39 @@
+<!-- Any copyright is dedicated to the Public Domain.
+ - http://creativecommons.org/publicdomain/zero/1.0/ -->
+<!DOCTYPE HTML>
+<html>
+<head>
+ <title>PerformanceResouceTiming in workers</title>
+ <script src="/tests/SimpleTest/SimpleTest.js"></script>
+ <link rel="stylesheet" type="text/css" href="/tests/SimpleTest/test.css" />
+</head>
+<body>
+<script class="testbody" type="text/javascript">
+
+SimpleTest.waitForExplicitFinish();
+
+// The worker assumes it will take some amount of time to load a resource.
+// With a low enough precision, the duration to load a resource may clamp
+// down to zero.
+var reduceTimePrecisionPrevPrefValue = SpecialPowers.getBoolPref("privacy.reduceTimerPrecision");
+SpecialPowers.setBoolPref("privacy.reduceTimerPrecision", false);
+
+var worker = new Worker('test_worker_performance_entries.js');
+worker.onmessage = function(event) {
+ if (event.data.type == "check") {
+ ok(event.data.status, event.data.msg);
+ return;
+ }
+
+ if (event.data.type == "finish") {
+ SpecialPowers.setBoolPref("privacy.reduceTimerPrecision", reduceTimePrecisionPrevPrefValue);
+ SimpleTest.finish();
+ return;
+ }
+
+ ok(false, "?!?");
+}
+
+</script>
+</body>
+</html>
diff --git a/dom/performance/tests/test_worker_performance_entries.js b/dom/performance/tests/test_worker_performance_entries.js
new file mode 100644
index 0000000000..e3d660bd85
--- /dev/null
+++ b/dom/performance/tests/test_worker_performance_entries.js
@@ -0,0 +1,120 @@
+function ok(a, msg) {
+ postMessage({ type: "check", status: !!a, msg });
+}
+
+function is(a, b, msg) {
+ ok(a === b, msg);
+}
+
+function finish(a, msg) {
+ postMessage({ type: "finish" });
+}
+
+async function wait_for_performance_entries() {
+ let promise = new Promise(resolve => {
+ new PerformanceObserver(list => {
+ resolve(list.getEntries());
+ }).observe({ entryTypes: ["resource"] });
+ });
+ entries = await promise;
+ return entries;
+}
+
+async function check(resource, initiatorType, protocol) {
+ let entries = performance.getEntries();
+ if (!entries.length) {
+ entries = await wait_for_performance_entries();
+ }
+ ok(entries.length == 1, "We have an entry");
+
+ ok(entries[0] instanceof PerformanceEntry, "The entry is a PerformanceEntry");
+ ok(entries[0].name.endsWith(resource), "The entry has been found!");
+
+ is(entries[0].entryType, "resource", "Correct EntryType");
+ ok(entries[0].startTime > 0, "We have a startTime");
+ ok(entries[0].duration > 0, "We have a duration");
+
+ ok(
+ entries[0] instanceof PerformanceResourceTiming,
+ "The entry is a PerformanceResourceTiming"
+ );
+
+ is(entries[0].initiatorType, initiatorType, "Correct initiatorType");
+ is(entries[0].nextHopProtocol, protocol, "Correct protocol");
+
+ performance.clearResourceTimings();
+}
+
+function simple_checks() {
+ ok("performance" in self, "We have self.performance");
+ performance.clearResourceTimings();
+ next();
+}
+
+function fetch_request() {
+ fetch("test_worker_performance_entries.sjs")
+ .then(r => r.blob())
+ .then(blob => {
+ check("test_worker_performance_entries.sjs", "fetch", "http/1.1");
+ })
+ .then(next);
+}
+
+function xhr_request() {
+ let xhr = new XMLHttpRequest();
+ xhr.open("GET", "test_worker_performance_entries.sjs");
+ xhr.send();
+ xhr.onload = () => {
+ check("test_worker_performance_entries.sjs", "xmlhttprequest", "http/1.1");
+ next();
+ };
+}
+
+function sync_xhr_request() {
+ let xhr = new XMLHttpRequest();
+ xhr.open("GET", "test_worker_performance_entries.sjs", false);
+ xhr.send();
+ check("test_worker_performance_entries.sjs", "xmlhttprequest", "http/1.1");
+ next();
+}
+
+function import_script() {
+ importScripts(["empty.js"]);
+ check("empty.js", "other", "http/1.1");
+ next();
+}
+
+function redirect() {
+ fetch("test_worker_performance_entries.sjs?redirect")
+ .then(r => r.text())
+ .then(async text => {
+ is(text, "Hello world \\o/", "The redirect worked correctly");
+ await check(
+ "test_worker_performance_entries.sjs?redirect",
+ "fetch",
+ "http/1.1"
+ );
+ })
+ .then(next);
+}
+
+let tests = [
+ simple_checks,
+ fetch_request,
+ xhr_request,
+ sync_xhr_request,
+ import_script,
+ redirect,
+];
+
+function next() {
+ if (!tests.length) {
+ finish();
+ return;
+ }
+
+ let test = tests.shift();
+ test();
+}
+
+next();
diff --git a/dom/performance/tests/test_worker_performance_entries.sjs b/dom/performance/tests/test_worker_performance_entries.sjs
new file mode 100644
index 0000000000..62f00c22bc
--- /dev/null
+++ b/dom/performance/tests/test_worker_performance_entries.sjs
@@ -0,0 +1,11 @@
+function handleRequest(request, response) {
+ response.setHeader("Content-Type", "text/html");
+
+ if (request.queryString == "redirect") {
+ response.setStatusLine(request.httpVersion, 302, "See Other");
+ response.setHeader("Location", "test_worker_performance_entries.sjs?ok");
+ } else {
+ response.setStatusLine(request.httpVersion, 200, "OK");
+ response.write("Hello world \\o/");
+ }
+}
diff --git a/dom/performance/tests/test_worker_performance_now.html b/dom/performance/tests/test_worker_performance_now.html
new file mode 100644
index 0000000000..be4f8f56ea
--- /dev/null
+++ b/dom/performance/tests/test_worker_performance_now.html
@@ -0,0 +1,31 @@
+<!-- Any copyright is dedicated to the Public Domain.
+ - http://creativecommons.org/publicdomain/zero/1.0/ -->
+<!DOCTYPE HTML>
+<html>
+<head>
+ <title>Validate Interfaces Exposed to Workers</title>
+ <script src="/tests/SimpleTest/SimpleTest.js"></script>
+ <link rel="stylesheet" type="text/css" href="/tests/SimpleTest/test.css" />
+</head>
+<body>
+<script class="testbody" type="text/javascript">
+
+SimpleTest.waitForExplicitFinish();
+
+var reduceTimePrecisionPrevPrefValue = SpecialPowers.getBoolPref("privacy.reduceTimerPrecision");
+SpecialPowers.setBoolPref("privacy.reduceTimerPrecision", false);
+
+var worker = new Worker('test_worker_performance_now.js');
+worker.onmessage = function(event) {
+ if (event.data.type == 'finish') {
+ SpecialPowers.setBoolPref("privacy.reduceTimerPrecision", reduceTimePrecisionPrevPrefValue);
+ SimpleTest.finish();
+
+ } else if (event.data.type == 'status') {
+ ok(event.data.status, event.data.msg);
+ }
+}
+
+</script>
+</body>
+</html>
diff --git a/dom/performance/tests/test_worker_performance_now.js b/dom/performance/tests/test_worker_performance_now.js
new file mode 100644
index 0000000000..a22f66256e
--- /dev/null
+++ b/dom/performance/tests/test_worker_performance_now.js
@@ -0,0 +1,68 @@
+function ok(a, msg) {
+ dump("OK: " + !!a + " => " + a + ": " + msg + "\n");
+ postMessage({ type: "status", status: !!a, msg: a + ": " + msg });
+}
+
+function workerTestDone() {
+ postMessage({ type: "finish" });
+}
+
+ok(self.performance, "Performance object should exist.");
+ok(
+ typeof self.performance.now == "function",
+ "Performance object should have a 'now' method."
+);
+var n = self.performance.now(),
+ d = Date.now();
+ok(n >= 0, "The value of now() should be equal to or greater than 0.");
+ok(
+ self.performance.now() >= n,
+ "The value of now() should monotonically increase."
+);
+
+// Spin on setTimeout() until performance.now() increases. Due to recent
+// security developments, the hr-time working group has not yet reached
+// consensus on what the recommend minimum clock resolution should be:
+// https://w3c.github.io/hr-time/#clock-resolution
+// Since setTimeout might return too early/late, our goal is for
+// performance.now() to increase before a 2 ms deadline rather than specific
+// number of setTimeout(N) invocations.
+// See bug 749894 (intermittent failures of this test)
+setTimeout(checkAfterTimeout, 1);
+
+var checks = 0;
+
+function checkAfterTimeout() {
+ checks++;
+ var d2 = Date.now();
+ var n2 = self.performance.now();
+
+ // Spin on setTimeout() until performance.now() increases. Abort the test
+ // if it runs for more than 2 ms or 50 timeouts.
+ let elapsedTime = d2 - d;
+ let elapsedPerf = n2 - n;
+ if (elapsedPerf == 0 && elapsedTime < 2 && checks < 50) {
+ setTimeout(checkAfterTimeout, 1);
+ return;
+ }
+
+ // Our implementation provides 1 ms resolution (bug 1451790), but we
+ // can't assert that elapsedPerf >= 1 ms because this worker test runs with
+ // "privacy.reduceTimerPrecision" == false so performance.now() is not
+ // limited to 1 ms resolution.
+ ok(
+ elapsedPerf > 0,
+ `Loose - the value of now() should increase after 2ms. ` +
+ `delta now(): ${elapsedPerf} ms`
+ );
+
+ // If we need more than 1 iteration, then either performance.now() resolution
+ // is shorter than 1 ms or setTimeout() is returning too early.
+ ok(
+ checks == 1,
+ `Strict - the value of now() should increase after one setTimeout. ` +
+ `iters: ${checks}, dt: ${elapsedTime}, now(): ${n2}`
+ );
+
+ workerTestDone();
+}
diff --git a/dom/performance/tests/test_worker_user_timing.html b/dom/performance/tests/test_worker_user_timing.html
new file mode 100644
index 0000000000..ebeac24e4f
--- /dev/null
+++ b/dom/performance/tests/test_worker_user_timing.html
@@ -0,0 +1,30 @@
+<!--
+ Any copyright is dedicated to the Public Domain.
+ http://creativecommons.org/publicdomain/zero/1.0/
+-->
+<!DOCTYPE HTML>
+<html>
+ <head>
+ <title>Test for worker performance timing API</title>
+ <meta http-equiv="content-type" content="text/html; charset=UTF-8">
+ <script src="/tests/SimpleTest/SimpleTest.js"></script>
+ </head>
+ <body>
+ <script class="testbody" type="text/javascript">
+
+var worker = new Worker('worker_performance_user_timing.js');
+worker.onmessage = function(event) {
+ if (event.data.type == 'finish') {
+ SpecialPowers.setBoolPref("privacy.reduceTimerPrecision", reduceTimePrecisionPrevPrefValue);
+ SimpleTest.finish();
+ } else if (event.data.type == 'status') {
+ ok(event.data.status, event.data.msg);
+ }
+}
+
+var reduceTimePrecisionPrevPrefValue = SpecialPowers.getBoolPref("privacy.reduceTimerPrecision");
+SpecialPowers.setBoolPref("privacy.reduceTimerPrecision", false);
+SimpleTest.waitForExplicitFinish();
+ </script>
+ </body>
+</html>
diff --git a/dom/performance/tests/worker_performance_observer.js b/dom/performance/tests/worker_performance_observer.js
new file mode 100644
index 0000000000..3282c9d157
--- /dev/null
+++ b/dom/performance/tests/worker_performance_observer.js
@@ -0,0 +1,4 @@
+importScripts(["/resources/testharness.js"]);
+importScripts(["test_performance_observer.js"]);
+
+done();
diff --git a/dom/performance/tests/worker_performance_user_timing.js b/dom/performance/tests/worker_performance_user_timing.js
new file mode 100644
index 0000000000..257040f09f
--- /dev/null
+++ b/dom/performance/tests/worker_performance_user_timing.js
@@ -0,0 +1,32 @@
+function ok(a, msg) {
+ dump("OK: " + !!a + " => " + a + " " + msg + "\n");
+ postMessage({ type: "status", status: !!a, msg: a + ": " + msg });
+}
+
+function is(a, b, msg) {
+ dump("IS: " + (a === b) + " => " + a + " | " + b + " " + msg + "\n");
+ postMessage({
+ type: "status",
+ status: a === b,
+ msg: a + " === " + b + ": " + msg,
+ });
+}
+
+function isnot(a, b, msg) {
+ dump("ISNOT: " + (a === b) + " => " + a + " | " + b + " " + msg + "\n");
+ postMessage({
+ type: "status",
+ status: a != b,
+ msg: a + " != " + b + ": " + msg,
+ });
+}
+
+importScripts(["test_performance_user_timing.js"]);
+
+for (var i = 0; i < steps.length; ++i) {
+ performance.clearMarks();
+ performance.clearMeasures();
+ steps[i]();
+}
+
+postMessage({ type: "finish" });