From 43a97878ce14b72f0981164f87f2e35e14151312 Mon Sep 17 00:00:00 2001 From: Daniel Baumann Date: Sun, 7 Apr 2024 11:22:09 +0200 Subject: Adding upstream version 110.0.1. Signed-off-by: Daniel Baumann --- dom/performance/tests/empty.js | 1 + dom/performance/tests/logo.png | Bin 0 -> 21901 bytes dom/performance/tests/mochitest.ini | 38 +++ dom/performance/tests/serverTiming.sjs | 41 +++ .../tests/sharedworker_performance_user_timing.js | 38 +++ .../tests/test_performance_navigation_timing.html | 104 +++++++ .../tests/test_performance_observer.html | 142 +++++++++ dom/performance/tests/test_performance_observer.js | 286 ++++++++++++++++++ .../tests/test_performance_paint_observer.html | 40 +++ .../test_performance_paint_observer_helper.html | 35 +++ .../tests/test_performance_paint_timing.html | 38 +++ .../test_performance_paint_timing_helper.html | 65 +++++ .../tests/test_performance_server_timing.html | 58 ++++ .../test_performance_server_timing_plain_http.html | 42 +++ .../tests/test_performance_timing_json.html | 32 +++ .../tests/test_performance_user_timing.html | 49 ++++ .../tests/test_performance_user_timing.js | 318 +++++++++++++++++++++ .../test_performance_user_timing_dying_global.html | 61 ++++ .../test_sharedWorker_performance_user_timing.html | 30 ++ dom/performance/tests/test_timeOrigin.html | 76 +++++ dom/performance/tests/test_worker_observer.html | 41 +++ .../tests/test_worker_performance_entries.html | 39 +++ .../tests/test_worker_performance_entries.js | 107 +++++++ .../tests/test_worker_performance_entries.sjs | 11 + .../tests/test_worker_performance_now.html | 31 ++ .../tests/test_worker_performance_now.js | 68 +++++ dom/performance/tests/test_worker_user_timing.html | 30 ++ .../tests/worker_performance_observer.js | 4 + .../tests/worker_performance_user_timing.js | 32 +++ 29 files changed, 1857 insertions(+) create mode 100644 dom/performance/tests/empty.js create mode 100644 dom/performance/tests/logo.png create mode 100644 dom/performance/tests/mochitest.ini create mode 100644 dom/performance/tests/serverTiming.sjs create mode 100644 dom/performance/tests/sharedworker_performance_user_timing.js create mode 100644 dom/performance/tests/test_performance_navigation_timing.html create mode 100644 dom/performance/tests/test_performance_observer.html create mode 100644 dom/performance/tests/test_performance_observer.js create mode 100644 dom/performance/tests/test_performance_paint_observer.html create mode 100644 dom/performance/tests/test_performance_paint_observer_helper.html create mode 100644 dom/performance/tests/test_performance_paint_timing.html create mode 100644 dom/performance/tests/test_performance_paint_timing_helper.html create mode 100644 dom/performance/tests/test_performance_server_timing.html create mode 100644 dom/performance/tests/test_performance_server_timing_plain_http.html create mode 100644 dom/performance/tests/test_performance_timing_json.html create mode 100644 dom/performance/tests/test_performance_user_timing.html create mode 100644 dom/performance/tests/test_performance_user_timing.js create mode 100644 dom/performance/tests/test_performance_user_timing_dying_global.html create mode 100644 dom/performance/tests/test_sharedWorker_performance_user_timing.html create mode 100644 dom/performance/tests/test_timeOrigin.html create mode 100644 dom/performance/tests/test_worker_observer.html create mode 100644 dom/performance/tests/test_worker_performance_entries.html create mode 100644 dom/performance/tests/test_worker_performance_entries.js create mode 100644 dom/performance/tests/test_worker_performance_entries.sjs create mode 100644 dom/performance/tests/test_worker_performance_now.html create mode 100644 dom/performance/tests/test_worker_performance_now.js create mode 100644 dom/performance/tests/test_worker_user_timing.html create mode 100644 dom/performance/tests/worker_performance_observer.js create mode 100644 dom/performance/tests/worker_performance_user_timing.js (limited to 'dom/performance/tests') diff --git a/dom/performance/tests/empty.js b/dom/performance/tests/empty.js new file mode 100644 index 0000000000..3b44754e30 --- /dev/null +++ b/dom/performance/tests/empty.js @@ -0,0 +1 @@ +/* Nothing here */ diff --git a/dom/performance/tests/logo.png b/dom/performance/tests/logo.png new file mode 100644 index 0000000000..a05926bcd7 Binary files /dev/null and b/dom/performance/tests/logo.png differ diff --git a/dom/performance/tests/mochitest.ini b/dom/performance/tests/mochitest.ini new file mode 100644 index 0000000000..59cc878554 --- /dev/null +++ b/dom/performance/tests/mochitest.ini @@ -0,0 +1,38 @@ +[DEFAULT] +support-files = + test_performance_observer.js + test_performance_user_timing.js + test_worker_performance_now.js + worker_performance_user_timing.js + worker_performance_observer.js + sharedworker_performance_user_timing.js + test_worker_performance_entries.js + test_worker_performance_entries.sjs + empty.js + serverTiming.sjs + +[test_performance_observer.html] +[test_performance_user_timing.html] +[test_performance_user_timing_dying_global.html] +[test_performance_navigation_timing.html] +[test_performance_paint_timing.html] +support-files = + test_performance_paint_timing_helper.html + logo.png +[test_performance_paint_observer.html] +support-files = + test_performance_paint_observer_helper.html + logo.png +[test_worker_user_timing.html] +[test_worker_observer.html] +[test_sharedWorker_performance_user_timing.html] +skip-if = true # Bug 1571904 +[test_worker_performance_now.html] +[test_timeOrigin.html] +skip-if = toolkit == 'android' && !is_fennec # Bug 1525959 +[test_worker_performance_entries.html] +skip-if = toolkit == 'android' && !is_fennec # Bug 1525959 +[test_performance_timing_json.html] +[test_performance_server_timing.html] +scheme = https +[test_performance_server_timing_plain_http.html] diff --git a/dom/performance/tests/serverTiming.sjs b/dom/performance/tests/serverTiming.sjs new file mode 100644 index 0000000000..8a93829fa5 --- /dev/null +++ b/dom/performance/tests/serverTiming.sjs @@ -0,0 +1,41 @@ +var responseServerTiming = [ + { metric: "metric1", duration: "123.4", description: "description1" }, + { metric: "metric2", duration: "456.78", description: "description2" }, +]; +var trailerServerTiming = [ + { metric: "metric3", duration: "789.11", description: "description3" }, + { metric: "metric4", duration: "1112.13", description: "description4" }, +]; + +function createServerTimingHeader(headerData) { + var header = ""; + for (var i = 0; i < headerData.length; i++) { + header += + "Server-Timing:" + + headerData[i].metric + + ";" + + "dur=" + + headerData[i].duration + + ";" + + "desc=" + + headerData[i].description + + "\r\n"; + } + return header; +} + +function handleRequest(request, response) { + var body = "c\r\ndata reached\r\n3\r\nhej\r\n0\r\n"; + + response.seizePower(); + response.write("HTTP/1.1 200 OK\r\n"); + response.write("Content-Type: text/plain\r\n"); + response.write(createServerTimingHeader(responseServerTiming)); + + response.write("Transfer-Encoding: chunked\r\n"); + response.write("\r\n"); + response.write(body); + response.write(createServerTimingHeader(trailerServerTiming)); + response.write("\r\n"); + response.finish(); +} diff --git a/dom/performance/tests/sharedworker_performance_user_timing.js b/dom/performance/tests/sharedworker_performance_user_timing.js new file mode 100644 index 0000000000..a5838c41f0 --- /dev/null +++ b/dom/performance/tests/sharedworker_performance_user_timing.js @@ -0,0 +1,38 @@ +var port; + +function ok(a, msg) { + dump("OK: " + !!a + " => " + a + " " + msg + "\n"); + port.postMessage({ type: "status", status: !!a, msg: a + ": " + msg }); +} + +function is(a, b, msg) { + dump("IS: " + (a === b) + " => " + a + " | " + b + " " + msg + "\n"); + port.postMessage({ + type: "status", + status: a === b, + msg: a + " === " + b + ": " + msg, + }); +} + +function isnot(a, b, msg) { + dump("ISNOT: " + (a === b) + " => " + a + " | " + b + " " + msg + "\n"); + port.postMessage({ + type: "status", + status: a != b, + msg: a + " != " + b + ": " + msg, + }); +} + +importScripts("test_performance_user_timing.js"); + +onconnect = function(evt) { + port = evt.ports[0]; + + for (var i = 0; i < steps.length; ++i) { + performance.clearMarks(); + performance.clearMeasures(); + steps[i](); + } + + port.postMessage({ type: "finish" }); +}; diff --git a/dom/performance/tests/test_performance_navigation_timing.html b/dom/performance/tests/test_performance_navigation_timing.html new file mode 100644 index 0000000000..869d0dc106 --- /dev/null +++ b/dom/performance/tests/test_performance_navigation_timing.html @@ -0,0 +1,104 @@ + + + + + Test for Bug 1462891 + + + + + Mozilla Bug 1462891 - Navigation Timing API +
+
+
+            
+        
+ + diff --git a/dom/performance/tests/test_performance_observer.html b/dom/performance/tests/test_performance_observer.html new file mode 100644 index 0000000000..86c780c56c --- /dev/null +++ b/dom/performance/tests/test_performance_observer.html @@ -0,0 +1,142 @@ + + + + + +Test for performance observer + + + + +
+ + + + diff --git a/dom/performance/tests/test_performance_observer.js b/dom/performance/tests/test_performance_observer.js new file mode 100644 index 0000000000..d342b6b34e --- /dev/null +++ b/dom/performance/tests/test_performance_observer.js @@ -0,0 +1,286 @@ +test(t => { + assert_throws( + { name: "TypeError" }, + function() { + new PerformanceObserver(); + }, + "PerformanceObserver constructor should throw TypeError if no argument is specified." + ); + + assert_throws( + { name: "TypeError" }, + function() { + new PerformanceObserver({}); + }, + "PerformanceObserver constructor should throw TypeError if the argument is not a function." + ); +}, "Test that PerformanceObserver constructor throws exception"); + +test(t => { + var observer = new PerformanceObserver(() => {}); + + assert_throws( + { name: "TypeError" }, + function() { + observer.observe(); + }, + "observe() should throw TypeError exception if no option specified." + ); + + assert_throws( + { name: "TypeError" }, + function() { + observer.observe({ unsupportedAttribute: "unsupported" }); + }, + "obsrve() should throw TypeError exception if the option has no 'entryTypes' attribute." + ); + + assert_equals( + undefined, + observer.observe({ entryTypes: [] }), + "observe() should silently ignore empty 'entryTypes' sequence." + ); + + assert_throws( + { name: "TypeError" }, + function() { + observer.observe({ entryTypes: null }); + }, + "obsrve() should throw TypeError exception if 'entryTypes' attribute is null." + ); + + assert_equals( + undefined, + observer.observe({ entryTypes: ["invalid"] }), + "observe() should silently ignore invalid 'entryTypes' values." + ); +}, "Test that PerformanceObserver.observe throws exception"); + +function promiseObserve(test, options) { + return new Promise(resolve => { + performance.clearMarks(); + performance.clearMeasures(); + + var observer = new PerformanceObserver(list => resolve(list)); + observer.observe(options); + test.add_cleanup(() => observer.disconnect()); + }); +} + +promise_test(t => { + var promise = promiseObserve(t, { entryTypes: ["mark", "measure"] }); + + performance.mark("test-start"); + performance.mark("test-end"); + performance.measure("test-measure", "test-start", "test-end"); + + return promise.then(list => { + assert_equals( + list.getEntries().length, + 3, + "There should be three observed entries." + ); + + var markEntries = list.getEntries().filter(entry => { + return entry.entryType == "mark"; + }); + assert_array_equals( + markEntries, + performance.getEntriesByType("mark"), + "Observed 'mark' entries should equal to entries obtained by getEntriesByType." + ); + + var measureEntries = list.getEntries().filter(entry => { + return entry.entryType == "measure"; + }); + assert_array_equals( + measureEntries, + performance.getEntriesByType("measure"), + "Observed 'measure' entries should equal to entries obtained by getEntriesByType." + ); + }); +}, "Test for user-timing with PerformanceObserver"); + +promise_test(t => { + var promise = new Promise((resolve, reject) => { + performance.clearMarks(); + performance.clearMeasures(); + + var observer = new PerformanceObserver(list => reject(list)); + observer.observe({ entryTypes: ["mark", "measure"] }); + observer.disconnect(); + t.step_timeout(resolve, 100); + }); + + performance.mark("test-start"); + performance.mark("test-end"); + performance.measure("test-measure", "test-start", "test-end"); + + return promise.then( + () => { + assert_equals(performance.getEntriesByType("mark").length, 2); + assert_equals(performance.getEntriesByType("measure").length, 1); + }, + list => { + assert_unreached("Observer callback should never be called."); + } + ); +}, "Nothing should be notified after disconnecting observer"); + +promise_test(t => { + var promise = promiseObserve(t, { entryTypes: ["mark"] }); + + performance.mark("test"); + + return promise.then(list => { + assert_array_equals( + list.getEntries({ entryType: "mark" }), + performance.getEntriesByType("mark"), + "getEntries with entryType filter should return correct results." + ); + + assert_array_equals( + list.getEntries({ name: "test" }), + performance.getEntriesByName("test"), + "getEntries with name filter should return correct results." + ); + + assert_array_equals( + list.getEntries({ name: "test", entryType: "mark" }), + performance.getEntriesByName("test"), + "getEntries with name and entryType filter should return correct results." + ); + + assert_array_equals( + list.getEntries({ name: "invalid" }), + [], + "getEntries with non-existent name filter should return an empty array." + ); + + assert_array_equals( + list.getEntries({ name: "test", entryType: "measure" }), + [], + "getEntries with name filter and non-existent entryType should return an empty array." + ); + + assert_array_equals( + list.getEntries({ name: "invalid", entryType: "mark" }), + [], + "getEntries with non-existent name and entryType filter should return an empty array." + ); + + assert_array_equals( + list.getEntries({ initiatorType: "xmlhttprequest" }), + [], + "getEntries with initiatorType filter should return an empty array." + ); + }); +}, "Test for PerformanceObserverEntryList.getEntries"); + +promise_test(t => { + var promise = promiseObserve(t, { entryTypes: ["mark", "measure"] }); + + performance.mark("test"); + performance.measure("test-measure", "test", "test"); + + return promise.then(list => { + assert_array_equals( + list.getEntriesByType("mark"), + performance.getEntriesByType("mark") + ); + assert_array_equals( + list.getEntriesByType("measure"), + performance.getEntriesByType("measure") + ); + }); +}, "Test for PerformanceObserverEntryList.getEntriesByType"); + +promise_test(t => { + var promise = promiseObserve(t, { entryTypes: ["mark", "measure"] }); + + performance.mark("test"); + performance.measure("test-measure", "test", "test"); + + return promise.then(list => { + assert_array_equals( + list.getEntriesByName("test"), + performance.getEntriesByName("test") + ); + assert_array_equals( + list.getEntriesByName("test-measure"), + performance.getEntriesByName("test-measure") + ); + }); +}, "Test for PerformanceObserverEntryList.getEntriesByName"); + +promise_test(t => { + var promise = new Promise(resolve => { + performance.clearMarks(); + performance.clearMeasures(); + + var observer = new PerformanceObserver(list => resolve(list)); + observer.observe({ entryTypes: ["mark", "measure"] }); + observer.observe({ entryTypes: ["mark", "measure"] }); + t.add_cleanup(() => observer.disconnect()); + }); + + performance.mark("test-start"); + performance.mark("test-end"); + performance.measure("test-measure", "test-start", "test-end"); + + return promise.then(list => { + assert_equals( + list.getEntries().length, + 3, + "Observed user timing entries should have only three entries." + ); + }); +}, "Test that invoking observe method twice affects nothing"); + +promise_test(t => { + var promise = new Promise(resolve => { + performance.clearMarks(); + performance.clearMeasures(); + + var observer = new PerformanceObserver(list => resolve(list)); + observer.observe({ entryTypes: ["mark", "measure"] }); + observer.observe({ entryTypes: ["mark"] }); + t.add_cleanup(() => observer.disconnect()); + }); + + performance.mark("test-start"); + performance.mark("test-end"); + performance.measure("test-measure", "test-start", "test-end"); + + return promise.then(list => { + assert_equals( + list.getEntries().length, + 2, + "Observed user timing entries should have only two entries." + ); + }); +}, "Test that observing filter is replaced by a new filter"); + +promise_test(t => { + var promise = new Promise(resolve => { + performance.clearMarks(); + performance.clearMeasures(); + + var observer = new PerformanceObserver(list => resolve(list)); + observer.observe({ entryTypes: ["mark"] }); + observer.observe({ entryTypes: ["measure"] }); + t.add_cleanup(() => observer.disconnect()); + }); + + performance.mark("test-start"); + performance.mark("test-end"); + performance.measure("test-measure", "test-start", "test-end"); + + return promise.then(list => { + assert_equals( + list.getEntries().length, + 1, + "Observed user timing entries should have only 1 entries." + ); + }); +}, "Test that observing filter is replaced by a new filter"); diff --git a/dom/performance/tests/test_performance_paint_observer.html b/dom/performance/tests/test_performance_paint_observer.html new file mode 100644 index 0000000000..2ded1db797 --- /dev/null +++ b/dom/performance/tests/test_performance_paint_observer.html @@ -0,0 +1,40 @@ + + + + + + Test for Bug 1518999 (Observer API) + + + + + + Mozilla + Bug 1518999 - Paint Timing API For Observers +

+ + + diff --git a/dom/performance/tests/test_performance_paint_observer_helper.html b/dom/performance/tests/test_performance_paint_observer_helper.html new file mode 100644 index 0000000000..ae27c9480d --- /dev/null +++ b/dom/performance/tests/test_performance_paint_observer_helper.html @@ -0,0 +1,35 @@ + + + + + + + diff --git a/dom/performance/tests/test_performance_paint_timing.html b/dom/performance/tests/test_performance_paint_timing.html new file mode 100644 index 0000000000..f8784ecf26 --- /dev/null +++ b/dom/performance/tests/test_performance_paint_timing.html @@ -0,0 +1,38 @@ + + + + + + Test for Bug 1518999 + + + + + + Mozilla + Bug 1518999 - Paint Timing API +

+ + + diff --git a/dom/performance/tests/test_performance_paint_timing_helper.html b/dom/performance/tests/test_performance_paint_timing_helper.html new file mode 100644 index 0000000000..c05b38cac0 --- /dev/null +++ b/dom/performance/tests/test_performance_paint_timing_helper.html @@ -0,0 +1,65 @@ + + + + + + Test for Bug 1518999 + + + +
+
+
+ +
+ + + diff --git a/dom/performance/tests/test_performance_server_timing.html b/dom/performance/tests/test_performance_server_timing.html new file mode 100644 index 0000000000..cba11a5fdd --- /dev/null +++ b/dom/performance/tests/test_performance_server_timing.html @@ -0,0 +1,58 @@ + + + + + +Test for PerformanceServerTiming + + + + +
+ + diff --git a/dom/performance/tests/test_performance_server_timing_plain_http.html b/dom/performance/tests/test_performance_server_timing_plain_http.html new file mode 100644 index 0000000000..7dcb8bd38d --- /dev/null +++ b/dom/performance/tests/test_performance_server_timing_plain_http.html @@ -0,0 +1,42 @@ + + + + + +Plain HTTP Test for PerformanceServerTiming + + + + +
+ + diff --git a/dom/performance/tests/test_performance_timing_json.html b/dom/performance/tests/test_performance_timing_json.html new file mode 100644 index 0000000000..97079c0d2f --- /dev/null +++ b/dom/performance/tests/test_performance_timing_json.html @@ -0,0 +1,32 @@ + + + + + + Test for Bug 1375829 + + + + + +Mozilla Bug 1375829 +

+ +
+
+ + diff --git a/dom/performance/tests/test_performance_user_timing.html b/dom/performance/tests/test_performance_user_timing.html new file mode 100644 index 0000000000..fa0aaceb4e --- /dev/null +++ b/dom/performance/tests/test_performance_user_timing.html @@ -0,0 +1,49 @@ + + + + + Test for Bug 782751 + + + + + + Mozilla Bug 782751 - User Timing API +
+
+
+            
+        
+ + diff --git a/dom/performance/tests/test_performance_user_timing.js b/dom/performance/tests/test_performance_user_timing.js new file mode 100644 index 0000000000..fe91945953 --- /dev/null +++ b/dom/performance/tests/test_performance_user_timing.js @@ -0,0 +1,318 @@ +var steps = [ + // Test single mark addition + function() { + ok(true, "Running mark addition test"); + performance.mark("test"); + var marks = performance.getEntriesByType("mark"); + is(marks.length, 1, "Number of marks should be 1"); + var mark = marks[0]; + is(mark.name, "test", "mark name should be 'test'"); + is(mark.entryType, "mark", "mark type should be 'mark'"); + isnot(mark.startTime, 0, "mark start time should not be 0"); + is(mark.duration, 0, "mark duration should be 0"); + }, + // Test multiple mark addition + function() { + ok(true, "Running multiple mark with same name addition test"); + performance.mark("test"); + performance.mark("test"); + performance.mark("test"); + var marks_type = performance.getEntriesByType("mark"); + is(marks_type.length, 3, "Number of marks by type should be 3"); + var marks_name = performance.getEntriesByName("test"); + is(marks_name.length, 3, "Number of marks by name should be 3"); + var mark = marks_name[0]; + is(mark.name, "test", "mark name should be 'test'"); + is(mark.entryType, "mark", "mark type should be 'mark'"); + isnot(mark.startTime, 0, "mark start time should not be 0"); + is(mark.duration, 0, "mark duration should be 0"); + var times = []; + // This also tests the chronological ordering specified as + // required for getEntries in the performance timeline spec. + marks_name.forEach(function(s) { + times.forEach(function(time) { + ok( + s.startTime >= time.startTime, + "Times should be equal or increasing between similarly named marks: " + + s.startTime + + " >= " + + time.startTime + ); + }); + times.push(s); + }); + }, + // Test all marks removal + function() { + ok(true, "Running all mark removal test"); + performance.mark("test"); + performance.mark("test2"); + var marks = performance.getEntriesByType("mark"); + is(marks.length, 2, "number of marks before all removal"); + performance.clearMarks(); + marks = performance.getEntriesByType("mark"); + is(marks.length, 0, "number of marks after all removal"); + }, + // Test single mark removal + function() { + ok(true, "Running removal test (0 'test' marks with other marks)"); + performance.mark("test2"); + var marks = performance.getEntriesByType("mark"); + is(marks.length, 1, "number of marks before all removal"); + performance.clearMarks("test"); + marks = performance.getEntriesByType("mark"); + is(marks.length, 1, "number of marks after all removal"); + }, + // Test single mark removal + function() { + ok(true, "Running removal test (0 'test' marks with no other marks)"); + var marks = performance.getEntriesByType("mark"); + is(marks.length, 0, "number of marks before all removal"); + performance.clearMarks("test"); + marks = performance.getEntriesByType("mark"); + is(marks.length, 0, "number of marks after all removal"); + }, + function() { + ok(true, "Running removal test (1 'test' mark with other marks)"); + performance.mark("test"); + performance.mark("test2"); + var marks = performance.getEntriesByType("mark"); + is(marks.length, 2, "number of marks before all removal"); + performance.clearMarks("test"); + marks = performance.getEntriesByType("mark"); + is(marks.length, 1, "number of marks after all removal"); + }, + function() { + ok(true, "Running removal test (1 'test' mark with no other marks)"); + performance.mark("test"); + var marks = performance.getEntriesByType("mark"); + is(marks.length, 1, "number of marks before all removal"); + performance.clearMarks("test"); + marks = performance.getEntriesByType("mark"); + is(marks.length, 0, "number of marks after all removal"); + }, + function() { + ok(true, "Running removal test (2 'test' marks with other marks)"); + performance.mark("test"); + performance.mark("test"); + performance.mark("test2"); + var marks = performance.getEntriesByType("mark"); + is(marks.length, 3, "number of marks before all removal"); + performance.clearMarks("test"); + marks = performance.getEntriesByType("mark"); + is(marks.length, 1, "number of marks after all removal"); + }, + function() { + ok(true, "Running removal test (2 'test' marks with no other marks)"); + performance.mark("test"); + performance.mark("test"); + var marks = performance.getEntriesByType("mark"); + is(marks.length, 2, "number of marks before all removal"); + performance.clearMarks("test"); + marks = performance.getEntriesByType("mark"); + is(marks.length, 0, "number of marks after all removal"); + }, + // Test mark name being same as navigation timing parameter + function() { + ok(true, "Running mark name collision test"); + for (n in performance.timing) { + try { + if (n == "toJSON") { + ok(true, "Skipping toJSON entry in collision test"); + continue; + } + performance.mark(n); + ok( + false, + "Mark name collision test failed for name " + + n + + ", shouldn't make it here!" + ); + } catch (e) { + ok( + e instanceof DOMException, + "DOM exception thrown for mark named " + n + ); + is( + e.code, + e.SYNTAX_ERR, + "DOM exception for name collision is syntax error" + ); + } + } + }, + // Test measure + function() { + ok(true, "Running measure addition with no start/end time test"); + performance.measure("test"); + var measures = performance.getEntriesByType("measure"); + is(measures.length, 1, "number of measures should be 1"); + var measure = measures[0]; + is(measure.name, "test", "measure name should be 'test'"); + is(measure.entryType, "measure", "measure type should be 'measure'"); + is(measure.startTime, 0, "measure start time should be zero"); + ok(measure.duration >= 0, "measure duration should not be negative"); + }, + function() { + ok(true, "Running measure addition with only start time test"); + performance.mark("test1"); + performance.measure("test", "test1", undefined); + var measures = performance.getEntriesByName("test", "measure"); + var marks = performance.getEntriesByName("test1", "mark"); + var measure = measures[0]; + var mark = marks[0]; + is( + measure.startTime, + mark.startTime, + "measure start time should be equal to the mark startTime" + ); + ok(measure.duration >= 0, "measure duration should not be negative"); + }, + function() { + ok(true, "Running measure addition with only end time test"); + performance.mark("test1"); + performance.measure("test", undefined, "test1"); + var measures = performance.getEntriesByName("test", "measure"); + var marks = performance.getEntriesByName("test1", "mark"); + var measure = measures[0]; + var mark = marks[0]; + ok(measure.duration >= 0, "measure duration should not be negative"); + }, + // Test measure picking latest version of similarly named tags + function() { + ok(true, "Running multiple mark with same name addition test"); + performance.mark("test"); + performance.mark("test"); + performance.mark("test"); + performance.mark("test2"); + var marks_name = performance.getEntriesByName("test"); + is(marks_name.length, 3, "Number of marks by name should be 3"); + var marks_name2 = performance.getEntriesByName("test2"); + is(marks_name2.length, 1, "Number of marks by name should be 1"); + var test_mark = marks_name2[0]; + performance.measure("test", "test", "test2"); + var measures_type = performance.getEntriesByType("measure"); + var last_mark = marks_name[marks_name.length - 1]; + is(measures_type.length, 1, "Number of measures by type should be 1"); + var measure = measures_type[0]; + is( + measure.startTime, + last_mark.startTime, + "Measure start time should be the start time of the latest 'test' mark" + ); + // Tolerance testing to avoid oranges, since we're doing double math across two different languages. + ok( + measure.duration - (test_mark.startTime - last_mark.startTime) < 0.00001, + "Measure duration ( " + + measure.duration + + ") should be difference between two marks" + ); + }, + function() { + // We don't have navigationStart in workers. + if ("window" in self) { + ok(true, "Running measure addition with no start/end time test"); + performance.measure("test", "navigationStart"); + var measures = performance.getEntriesByType("measure"); + is(measures.length, 1, "number of measures should be 1"); + var measure = measures[0]; + is(measure.name, "test", "measure name should be 'test'"); + is(measure.entryType, "measure", "measure type should be 'measure'"); + is(measure.startTime, 0, "measure start time should be zero"); + ok(measure.duration >= 0, "measure duration should not be negative"); + } + }, + // Test all measure removal + function() { + ok(true, "Running all measure removal test"); + performance.measure("test"); + performance.measure("test2"); + var measures = performance.getEntriesByType("measure"); + is(measures.length, 2, "measure entries should be length 2"); + performance.clearMeasures(); + measures = performance.getEntriesByType("measure"); + is(measures.length, 0, "measure entries should be length 0"); + }, + // Test single measure removal + function() { + ok(true, "Running all measure removal test"); + performance.measure("test"); + performance.measure("test2"); + var measures = performance.getEntriesByType("measure"); + is(measures.length, 2, "measure entries should be length 2"); + performance.clearMeasures("test"); + measures = performance.getEntriesByType("measure"); + is(measures.length, 1, "measure entries should be length 1"); + }, + // Test measure with invalid start time mark name + function() { + ok(true, "Running measure invalid start test"); + try { + performance.measure("test", "notamark"); + ok(false, "invalid measure start time exception not thrown!"); + } catch (e) { + ok(e instanceof DOMException, "DOM exception thrown for invalid measure"); + is( + e.code, + e.SYNTAX_ERR, + "DOM exception for invalid time is syntax error" + ); + } + }, + // Test measure with invalid end time mark name + function() { + ok(true, "Running measure invalid end test"); + try { + performance.measure("test", undefined, "notamark"); + ok(false, "invalid measure end time exception not thrown!"); + } catch (e) { + ok(e instanceof DOMException, "DOM exception thrown for invalid measure"); + is( + e.code, + e.SYNTAX_ERR, + "DOM exception for invalid time is syntax error" + ); + } + }, + // Test measure name being same as navigation timing parameter + function() { + ok(true, "Running measure name collision test"); + for (n in performance.timing) { + if (n == "toJSON") { + ok(true, "Skipping toJSON entry in collision test"); + continue; + } + performance.measure(n); + ok(true, "Measure name supports name collisions: " + n); + } + }, + // Test measure mark being a reserved name + function() { + ok(true, "Create measures using all reserved names"); + for (n in performance.timing) { + try { + if (n == "toJSON") { + ok(true, "Skipping toJSON entry in collision test"); + continue; + } + performance.measure("test", n); + ok(true, "Measure created from reserved name as starting time: " + n); + } catch (e) { + ok( + [ + "redirectStart", + "redirectEnd", + "unloadEventStart", + "unloadEventEnd", + "loadEventEnd", + "secureConnectionStart", + ].includes(n), + "Measure created from reserved name as starting time: " + + n + + " and threw expected error" + ); + } + } + }, + // TODO: Test measure picking latest version of similarly named tags +]; diff --git a/dom/performance/tests/test_performance_user_timing_dying_global.html b/dom/performance/tests/test_performance_user_timing_dying_global.html new file mode 100644 index 0000000000..18e4a54684 --- /dev/null +++ b/dom/performance/tests/test_performance_user_timing_dying_global.html @@ -0,0 +1,61 @@ + + + + Test for User Timing APIs on dying globals + + + + + + + diff --git a/dom/performance/tests/test_sharedWorker_performance_user_timing.html b/dom/performance/tests/test_sharedWorker_performance_user_timing.html new file mode 100644 index 0000000000..d26594e292 --- /dev/null +++ b/dom/performance/tests/test_sharedWorker_performance_user_timing.html @@ -0,0 +1,30 @@ + + + + + Test for worker performance timing API + + + + + + + diff --git a/dom/performance/tests/test_timeOrigin.html b/dom/performance/tests/test_timeOrigin.html new file mode 100644 index 0000000000..69796a432d --- /dev/null +++ b/dom/performance/tests/test_timeOrigin.html @@ -0,0 +1,76 @@ + + + + Test for performance.timeOrigin + + + + + + + + + + + + diff --git a/dom/performance/tests/test_worker_observer.html b/dom/performance/tests/test_worker_observer.html new file mode 100644 index 0000000000..7f4df855c9 --- /dev/null +++ b/dom/performance/tests/test_worker_observer.html @@ -0,0 +1,41 @@ + + + + + +Test for performance observer in worker + + + + +
+ + diff --git a/dom/performance/tests/test_worker_performance_entries.html b/dom/performance/tests/test_worker_performance_entries.html new file mode 100644 index 0000000000..d3f124fdb3 --- /dev/null +++ b/dom/performance/tests/test_worker_performance_entries.html @@ -0,0 +1,39 @@ + + + + + PerformanceResouceTiming in workers + + + + + + + diff --git a/dom/performance/tests/test_worker_performance_entries.js b/dom/performance/tests/test_worker_performance_entries.js new file mode 100644 index 0000000000..1eea24efd9 --- /dev/null +++ b/dom/performance/tests/test_worker_performance_entries.js @@ -0,0 +1,107 @@ +function ok(a, msg) { + postMessage({ type: "check", status: !!a, msg }); +} + +function is(a, b, msg) { + ok(a === b, msg); +} + +function finish(a, msg) { + postMessage({ type: "finish" }); +} + +function check(resource, initiatorType, protocol) { + let entries = performance.getEntries(); + ok(entries.length == 1, "We have an entry"); + + ok(entries[0] instanceof PerformanceEntry, "The entry is a PerformanceEntry"); + ok(entries[0].name.endsWith(resource), "The entry has been found!"); + + is(entries[0].entryType, "resource", "Correct EntryType"); + ok(entries[0].startTime > 0, "We have a startTime"); + ok(entries[0].duration > 0, "We have a duration"); + + ok( + entries[0] instanceof PerformanceResourceTiming, + "The entry is a PerformanceResourceTiming" + ); + + is(entries[0].initiatorType, initiatorType, "Correct initiatorType"); + is(entries[0].nextHopProtocol, protocol, "Correct protocol"); + + performance.clearResourceTimings(); +} + +function simple_checks() { + ok("performance" in self, "We have self.performance"); + performance.clearResourceTimings(); + next(); +} + +function fetch_request() { + fetch("test_worker_performance_entries.sjs") + .then(r => r.blob()) + .then(blob => { + check("test_worker_performance_entries.sjs", "fetch", "http/1.1"); + }) + .then(next); +} + +function xhr_request() { + let xhr = new XMLHttpRequest(); + xhr.open("GET", "test_worker_performance_entries.sjs"); + xhr.send(); + xhr.onload = () => { + check("test_worker_performance_entries.sjs", "xmlhttprequest", "http/1.1"); + next(); + }; +} + +function sync_xhr_request() { + let xhr = new XMLHttpRequest(); + xhr.open("GET", "test_worker_performance_entries.sjs", false); + xhr.send(); + check("test_worker_performance_entries.sjs", "xmlhttprequest", "http/1.1"); + next(); +} + +function import_script() { + importScripts(["empty.js"]); + check("empty.js", "other", "http/1.1"); + next(); +} + +function redirect() { + fetch("test_worker_performance_entries.sjs?redirect") + .then(r => r.text()) + .then(text => { + is(text, "Hello world \\o/", "The redirect worked correctly"); + check( + "test_worker_performance_entries.sjs?redirect", + "fetch", + "http/1.1" + ); + }) + .then(next); +} + +let tests = [ + simple_checks, + fetch_request, + xhr_request, + sync_xhr_request, + import_script, + redirect, +]; + +function next() { + if (!tests.length) { + finish(); + return; + } + + let test = tests.shift(); + test(); +} + +next(); diff --git a/dom/performance/tests/test_worker_performance_entries.sjs b/dom/performance/tests/test_worker_performance_entries.sjs new file mode 100644 index 0000000000..62f00c22bc --- /dev/null +++ b/dom/performance/tests/test_worker_performance_entries.sjs @@ -0,0 +1,11 @@ +function handleRequest(request, response) { + response.setHeader("Content-Type", "text/html"); + + if (request.queryString == "redirect") { + response.setStatusLine(request.httpVersion, 302, "See Other"); + response.setHeader("Location", "test_worker_performance_entries.sjs?ok"); + } else { + response.setStatusLine(request.httpVersion, 200, "OK"); + response.write("Hello world \\o/"); + } +} diff --git a/dom/performance/tests/test_worker_performance_now.html b/dom/performance/tests/test_worker_performance_now.html new file mode 100644 index 0000000000..be4f8f56ea --- /dev/null +++ b/dom/performance/tests/test_worker_performance_now.html @@ -0,0 +1,31 @@ + + + + + Validate Interfaces Exposed to Workers + + + + + + + diff --git a/dom/performance/tests/test_worker_performance_now.js b/dom/performance/tests/test_worker_performance_now.js new file mode 100644 index 0000000000..a22f66256e --- /dev/null +++ b/dom/performance/tests/test_worker_performance_now.js @@ -0,0 +1,68 @@ +function ok(a, msg) { + dump("OK: " + !!a + " => " + a + ": " + msg + "\n"); + postMessage({ type: "status", status: !!a, msg: a + ": " + msg }); +} + +function workerTestDone() { + postMessage({ type: "finish" }); +} + +ok(self.performance, "Performance object should exist."); +ok( + typeof self.performance.now == "function", + "Performance object should have a 'now' method." +); +var n = self.performance.now(), + d = Date.now(); +ok(n >= 0, "The value of now() should be equal to or greater than 0."); +ok( + self.performance.now() >= n, + "The value of now() should monotonically increase." +); + +// Spin on setTimeout() until performance.now() increases. Due to recent +// security developments, the hr-time working group has not yet reached +// consensus on what the recommend minimum clock resolution should be: +// https://w3c.github.io/hr-time/#clock-resolution +// Since setTimeout might return too early/late, our goal is for +// performance.now() to increase before a 2 ms deadline rather than specific +// number of setTimeout(N) invocations. +// See bug 749894 (intermittent failures of this test) +setTimeout(checkAfterTimeout, 1); + +var checks = 0; + +function checkAfterTimeout() { + checks++; + var d2 = Date.now(); + var n2 = self.performance.now(); + + // Spin on setTimeout() until performance.now() increases. Abort the test + // if it runs for more than 2 ms or 50 timeouts. + let elapsedTime = d2 - d; + let elapsedPerf = n2 - n; + if (elapsedPerf == 0 && elapsedTime < 2 && checks < 50) { + setTimeout(checkAfterTimeout, 1); + return; + } + + // Our implementation provides 1 ms resolution (bug 1451790), but we + // can't assert that elapsedPerf >= 1 ms because this worker test runs with + // "privacy.reduceTimerPrecision" == false so performance.now() is not + // limited to 1 ms resolution. + ok( + elapsedPerf > 0, + `Loose - the value of now() should increase after 2ms. ` + + `delta now(): ${elapsedPerf} ms` + ); + + // If we need more than 1 iteration, then either performance.now() resolution + // is shorter than 1 ms or setTimeout() is returning too early. + ok( + checks == 1, + `Strict - the value of now() should increase after one setTimeout. ` + + `iters: ${checks}, dt: ${elapsedTime}, now(): ${n2}` + ); + + workerTestDone(); +} diff --git a/dom/performance/tests/test_worker_user_timing.html b/dom/performance/tests/test_worker_user_timing.html new file mode 100644 index 0000000000..ebeac24e4f --- /dev/null +++ b/dom/performance/tests/test_worker_user_timing.html @@ -0,0 +1,30 @@ + + + + + Test for worker performance timing API + + + + + + + diff --git a/dom/performance/tests/worker_performance_observer.js b/dom/performance/tests/worker_performance_observer.js new file mode 100644 index 0000000000..3282c9d157 --- /dev/null +++ b/dom/performance/tests/worker_performance_observer.js @@ -0,0 +1,4 @@ +importScripts(["/resources/testharness.js"]); +importScripts(["test_performance_observer.js"]); + +done(); diff --git a/dom/performance/tests/worker_performance_user_timing.js b/dom/performance/tests/worker_performance_user_timing.js new file mode 100644 index 0000000000..257040f09f --- /dev/null +++ b/dom/performance/tests/worker_performance_user_timing.js @@ -0,0 +1,32 @@ +function ok(a, msg) { + dump("OK: " + !!a + " => " + a + " " + msg + "\n"); + postMessage({ type: "status", status: !!a, msg: a + ": " + msg }); +} + +function is(a, b, msg) { + dump("IS: " + (a === b) + " => " + a + " | " + b + " " + msg + "\n"); + postMessage({ + type: "status", + status: a === b, + msg: a + " === " + b + ": " + msg, + }); +} + +function isnot(a, b, msg) { + dump("ISNOT: " + (a === b) + " => " + a + " | " + b + " " + msg + "\n"); + postMessage({ + type: "status", + status: a != b, + msg: a + " != " + b + ": " + msg, + }); +} + +importScripts(["test_performance_user_timing.js"]); + +for (var i = 0; i < steps.length; ++i) { + performance.clearMarks(); + performance.clearMeasures(); + steps[i](); +} + +postMessage({ type: "finish" }); -- cgit v1.2.3