summaryrefslogtreecommitdiffstats
path: root/devtools/client/webconsole/test/chrome/test_render_perf.html
diff options
context:
space:
mode:
Diffstat (limited to 'devtools/client/webconsole/test/chrome/test_render_perf.html')
-rw-r--r--devtools/client/webconsole/test/chrome/test_render_perf.html239
1 files changed, 239 insertions, 0 deletions
diff --git a/devtools/client/webconsole/test/chrome/test_render_perf.html b/devtools/client/webconsole/test/chrome/test_render_perf.html
new file mode 100644
index 0000000000..083f1bffb9
--- /dev/null
+++ b/devtools/client/webconsole/test/chrome/test_render_perf.html
@@ -0,0 +1,239 @@
+<!DOCTYPE HTML>
+<html lang="en">
+<head>
+ <meta charset="utf8">
+ <title>Test for getRepeatId()</title>
+ <script src="chrome://mochikit/content/tests/SimpleTest/SimpleTest.js"></script>
+ <script type="application/javascript" src="head.js"></script>
+ <!-- Any copyright is dedicated to the Public Domain.
+ - http://creativecommons.org/publicdomain/zero/1.0/ -->
+</head>
+<body>
+<p>Test for render perf</p>
+<div id="output"></div>
+
+<script type="text/javascript">
+"use strict";
+
+// To analyze the profile results:
+// > ./mach mochitest test_render_perf.html
+// Then open https://profiler.firefox.com and drag the json file printed at the end of
+// this test
+
+const NUM_MESSAGES = 4000;
+const NUM_STREAMING = 100;
+const {FileUtils} = ChromeUtils.import("resource://gre/modules/FileUtils.jsm");
+const Services = browserRequire("Services");
+Services.prefs.setIntPref("devtools.hud.loglimit", NUM_MESSAGES);
+const WebConsoleWrapper = browserRequire("devtools/client/webconsole/webconsole-wrapper");
+const actions =
+ browserRequire("devtools/client/webconsole/actions/index");
+const EventEmitter = browserRequire("devtools/shared/event-emitter");
+const testPackets = Array.from({length: NUM_MESSAGES}).map((el, id) => ({
+ "from": "server1.conn4.child1/consoleActor2",
+ "type": "consoleAPICall",
+ "message": {
+ "arguments": [
+ "foobar",
+ `${id % 2 === 0 ? "Even" : "Odd"} text`,
+ id,
+ ],
+ "columnNumber": 1,
+ "counter": null,
+ "filename": "file:///test.html",
+ "functionName": "",
+ "groupName": "",
+ "level": "log",
+ "lineNumber": 1,
+ "private": false,
+ "styles": [],
+ "timeStamp": 1455064271115 + id,
+ "timer": null,
+ "workerType": "none",
+ "category": "webdev",
+ },
+}));
+const lastPacket = testPackets.pop();
+
+function startMeasure(label) {
+ const startLabel = label + "start";
+ performance.mark(startLabel);
+ return {
+ stop: (clear = true) => {
+ performance.measure(label, startLabel);
+ const entries = performance.getEntriesByName(label);
+ if (clear) {
+ performance.clearMeasures(label);
+ }
+ return entries[entries.length - 1];
+ },
+ };
+}
+
+function wait(time) {
+ return new Promise(resolve => setTimeout(resolve, time));
+}
+
+async function addAllMessages(wrapper) {
+ testPackets.forEach((packet) => wrapper.dispatchMessageAdd(packet));
+ // Only wait for the last packet to minimize work.
+ await wrapper.dispatchMessageAdd(lastPacket, true);
+ await new Promise(resolve => requestAnimationFrame(resolve));
+}
+
+async function addMessage(wrapper, message) {
+ await wrapper.dispatchMessageAdd(message, true);
+}
+
+function getTimes(times) {
+ times = times.sort();
+ const totalTime = times.reduce((sum, t) => sum + t);
+ const avg = totalTime / times.length;
+ const median = times.length % 2 !== 0
+ ? times[Math.floor(times.length / 2)]
+ : (times[(times.length / 2) - 1] + times[times.length / 2]) / 2;
+ return {avg, median};
+}
+
+async function clearMessages(wrapper) {
+ wrapper.dispatchMessagesClear();
+ await new Promise(resolve => requestAnimationFrame(resolve));
+}
+
+async function testBulkLogging(wrapper) {
+ await clearMessages(wrapper);
+ const bulkTimes = [];
+ const iterations = 5;
+ for (let i = 0; i < iterations; i++) {
+ const measure = startMeasure("bulk log");
+ await addAllMessages(wrapper);
+ const {duration} = measure.stop();
+
+ info(`took ${duration} ms to render bulk messages (iteration ${i})`);
+ bulkTimes.push(duration);
+
+ // Do not clear the store on last iteration so the next test can use the messages.
+ if (i !== iterations - 1) {
+ // Wait before clearing messages so those events are more spotable on the profile.
+ await wait(500);
+ await clearMessages(wrapper);
+ await wait(500);
+ }
+ }
+ const {avg, median} = getTimes(bulkTimes);
+
+ info(`BULK: On average, it took ${avg} ms (median ${median} ms) ` +
+ `to render ${NUM_MESSAGES} messages`);
+}
+
+async function testFiltering(wrapper) {
+ const measureFilter = startMeasure("filtering");
+
+ const measureFilterOff = startMeasure("filtering off");
+ await wrapper.getStore().dispatch(actions.filterToggle("log"));
+ const measureFilterOffEntry = measureFilterOff.stop();
+ info(`Filter toggle time (off): ${measureFilterOffEntry.duration}`);
+
+ const measureFilterOn = startMeasure("filtering on");
+ await wrapper.getStore().dispatch(actions.filterToggle("log"));
+ const measureFilterOnEntry = measureFilterOn.stop();
+ info(`Filter toggle time (on): ${measureFilterOnEntry.duration}`);
+ measureFilter.stop();
+}
+
+async function testStreamLogging(wrapper, clear = true) {
+ const streamMeasureLabel = "stream" + (clear === false ? " and prune" : "");
+ const streamMeasure = startMeasure(streamMeasureLabel);
+ if (clear === true) {
+ await clearMessages(wrapper);
+ }
+
+ const measureLabel = "stream - add single message";
+ for (let i = 0; i < NUM_STREAMING; i++) {
+ const measure = startMeasure(measureLabel);
+ await addMessage(wrapper, testPackets[i]);
+ measure.stop(false);
+ await new Promise(resolve => setTimeout(resolve, 100));
+ }
+
+ const streamTimes = performance.getEntriesByName(measureLabel)
+ .map(entry => entry.duration);
+ performance.clearMeasures(measureLabel);
+ const { avg, median } = getTimes(streamTimes);
+ info(`STREAMING${clear === false ? " AND PRUNING" : ""}: On average, ` +
+ `it took ${avg} ms (median ${median} ms) for each message`);
+ streamMeasure.stop();
+}
+
+window.onload = async function() {
+ // This test does costly work multiple times to have better performance data.
+ // It doesn't run in automation
+ SimpleTest.requestLongerTimeout(3);
+
+ const wrapper = new WebConsoleWrapper(
+ document.getElementById("output"),
+ {hud: EventEmitter.decorate({proxy: {}}), focus: () => {}},
+ {},
+ null,
+ document,
+ );
+ wrapper.init();
+
+ // From https://github.com/firefox-devtools/profiler/blob/b73eb73df04c7df51464fa50eeadef3dc7f5d4e2/docs/gecko-profile-format.md#L21
+ const settings = {
+ entries: 100000000,
+ interval: 1,
+ features: ["js"],
+ threads: ["GeckoMain"],
+ };
+ Services.profiler.StartProfiler(
+ settings.entries,
+ settings.interval,
+ settings.features,
+ settings.threads,
+ );
+ info("Profiler has started");
+
+ await wait(500);
+
+ await testBulkLogging(wrapper);
+
+ await wait(500);
+
+ await testFiltering(wrapper);
+
+ await wait(500);
+
+ // first pass, without clearing the store.
+ await testStreamLogging(wrapper, false);
+
+ await wait(500);
+ // second pass, with an empty store.
+ await testStreamLogging(wrapper, true);
+
+ ok(true, "Tests finished");
+
+ const file = FileUtils.getFile("TmpD", [`test_render_perf_${Date.now()}.json`]);
+ Services.profiler.dumpProfileToFile(file.path);
+ Services.profiler.StopProfiler();
+
+ info(`
+
+SAVING PROFILE: ${file.path}
+
+To upload the profile, run the following commands:
+
+ gzip ${file.path}
+ curl -O https://raw.githubusercontent.com/firefox-devtools/profiler-server/master/tools/decode_jwt_payload.py
+ curl 'https://api.profiler.firefox.com/compressed-store' --compressed --data-binary @${file.path}.gz --header 'Accept: application/vnd.firefox-profiler+json;version=1' \\
+ | python3 decode_jwt_payload.py \\
+ | awk '{print "Hosted at: https://profiler.firefox.com/public/"$1}'
+
+
+`);
+
+ SimpleTest.finish();
+};
+</script>
+</body>
+</html>