diff options
author | Daniel Baumann <daniel.baumann@progress-linux.org> | 2024-04-28 14:29:10 +0000 |
---|---|---|
committer | Daniel Baumann <daniel.baumann@progress-linux.org> | 2024-04-28 14:29:10 +0000 |
commit | 2aa4a82499d4becd2284cdb482213d541b8804dd (patch) | |
tree | b80bf8bf13c3766139fbacc530efd0dd9d54394c /testing/awsy | |
parent | Initial commit. (diff) | |
download | firefox-2aa4a82499d4becd2284cdb482213d541b8804dd.tar.xz firefox-2aa4a82499d4becd2284cdb482213d541b8804dd.zip |
Adding upstream version 86.0.1.upstream/86.0.1upstream
Signed-off-by: Daniel Baumann <daniel.baumann@progress-linux.org>
Diffstat (limited to 'testing/awsy')
-rw-r--r-- | testing/awsy/README.md | 2 | ||||
-rw-r--r-- | testing/awsy/awsy/__init__.py | 24 | ||||
-rw-r--r-- | testing/awsy/awsy/awsy_test_case.py | 396 | ||||
-rw-r--r-- | testing/awsy/awsy/parse_about_memory.py | 173 | ||||
-rw-r--r-- | testing/awsy/awsy/process_perf_data.py | 225 | ||||
-rw-r--r-- | testing/awsy/awsy/test_base_memory_usage.py | 129 | ||||
-rw-r--r-- | testing/awsy/awsy/test_memory_usage.py | 243 | ||||
-rw-r--r-- | testing/awsy/awsy/webservers.py | 99 | ||||
-rw-r--r-- | testing/awsy/conf/base-prefs.json | 14 | ||||
-rw-r--r-- | testing/awsy/conf/base-testvars.json | 5 | ||||
-rw-r--r-- | testing/awsy/conf/prefs.json | 13 | ||||
-rw-r--r-- | testing/awsy/conf/testvars.json | 6 | ||||
-rw-r--r-- | testing/awsy/conf/tp6-pages.yml | 50 | ||||
-rw-r--r-- | testing/awsy/conf/tp6-prefs.json | 15 | ||||
-rw-r--r-- | testing/awsy/conf/tp6-testvars.json | 5 | ||||
-rw-r--r-- | testing/awsy/mach_commands.py | 354 | ||||
-rw-r--r-- | testing/awsy/moz.build | 9 | ||||
-rw-r--r-- | testing/awsy/requirements.txt | 2 | ||||
-rw-r--r-- | testing/awsy/setup.py | 31 | ||||
-rw-r--r-- | testing/awsy/tp5n-pageset.manifest | 10 | ||||
-rw-r--r-- | testing/awsy/tp6-pageset.manifest | 212 |
21 files changed, 2017 insertions, 0 deletions
diff --git a/testing/awsy/README.md b/testing/awsy/README.md new file mode 100644 index 0000000000..ca72bd9ca0 --- /dev/null +++ b/testing/awsy/README.md @@ -0,0 +1,2 @@ +# awsy-lite +Barebones are we slim yet test. diff --git a/testing/awsy/awsy/__init__.py b/testing/awsy/awsy/__init__.py new file mode 100644 index 0000000000..ff457dd164 --- /dev/null +++ b/testing/awsy/awsy/__init__.py @@ -0,0 +1,24 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +# Maximum number of tabs to open +MAX_TABS = 30 + +# Default amount of seconds to wait in between opening tabs +PER_TAB_PAUSE = 10 + +# Default amount of seconds to wait for things to be settled down +SETTLE_WAIT_TIME = 30 + +# Amount of times to run through the test suite +ITERATIONS = 5 + +__all__ = [ + "MAX_TABS", + "PER_TAB_PAUSE", + "SETTLE_WAIT_TIME", + "ITERATIONS", + "webservers", + "process_perf_data", +] diff --git a/testing/awsy/awsy/awsy_test_case.py b/testing/awsy/awsy/awsy_test_case.py new file mode 100644 index 0000000000..077c2dff9a --- /dev/null +++ b/testing/awsy/awsy/awsy_test_case.py @@ -0,0 +1,396 @@ +# -*- Mode: python; c-basic-offset: 4; indent-tabs-mode: nil; tab-width: 40 -*- +# vim: set filetype=python: +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +from __future__ import absolute_import + +import fnmatch +import glob +import gzip +import json +import os +import shutil +import sys +import tempfile +import time + +import mozlog.structured + +from marionette_driver import Wait +from marionette_driver.errors import JavascriptException, ScriptTimeoutException +from marionette_driver.keys import Keys +from marionette_harness import MarionetteTestCase + +AWSY_PATH = os.path.dirname(os.path.dirname(os.path.realpath(__file__))) +if AWSY_PATH not in sys.path: + sys.path.append(AWSY_PATH) + +from awsy import ITERATIONS, PER_TAB_PAUSE, SETTLE_WAIT_TIME, MAX_TABS +from awsy import process_perf_data + + +class AwsyTestCase(MarionetteTestCase): + """ + Base test case for AWSY tests. + """ + + def urls(self): + raise NotImplementedError() + + def perf_suites(self): + raise NotImplementedError() + + def perf_checkpoints(self): + raise NotImplementedError() + + def perf_extra_opts(self): + return None + + def iterations(self): + return self._iterations + + def pages_to_load(self): + return self._pages_to_load if self._pages_to_load else len(self.urls()) + + def settle(self): + """ + Pauses for the settle time. + """ + time.sleep(self._settleWaitTime) + + def setUp(self): + MarionetteTestCase.setUp(self) + + self.logger = mozlog.structured.structuredlog.get_default_logger() + self.marionette.set_context("chrome") + self._resultsDir = self.testvars["resultsDir"] + + self._binary = self.testvars["bin"] + self._run_local = self.testvars.get("run_local", False) + + # Cleanup our files from previous runs. + for patt in ( + "memory-report-*.json.gz", + "perfherder_data.json", + "dmd-*.json.gz", + ): + for f in glob.glob(os.path.join(self._resultsDir, patt)): + os.unlink(f) + + # Optional testvars. + self._pages_to_load = self.testvars.get("entities", 0) + self._iterations = self.testvars.get("iterations", ITERATIONS) + self._perTabPause = self.testvars.get("perTabPause", PER_TAB_PAUSE) + self._settleWaitTime = self.testvars.get("settleWaitTime", SETTLE_WAIT_TIME) + self._maxTabs = self.testvars.get("maxTabs", MAX_TABS) + self._dmd = self.testvars.get("dmd", False) + + self.logger.info( + "areweslimyet run by %d pages, %d iterations," + " %d perTabPause, %d settleWaitTime" + % ( + self._pages_to_load, + self._iterations, + self._perTabPause, + self._settleWaitTime, + ) + ) + self.reset_state() + + def tearDown(self): + MarionetteTestCase.tearDown(self) + + try: + self.logger.info("processing data in %s!" % self._resultsDir) + perf_blob = process_perf_data.create_perf_data( + self._resultsDir, + self.perf_suites(), + self.perf_checkpoints(), + self.perf_extra_opts(), + ) + self.logger.info("PERFHERDER_DATA: %s" % json.dumps(perf_blob)) + + perf_file = os.path.join(self._resultsDir, "perfherder_data.json") + with open(perf_file, "w") as fp: + json.dump(perf_blob, fp, indent=2) + self.logger.info("Perfherder data written to %s" % perf_file) + except Exception: + raise + finally: + # Make sure we cleanup and upload any existing files even if there + # were errors processing the perf data. + if self._dmd: + self.cleanup_dmd() + + # copy it to moz upload dir if set + if "MOZ_UPLOAD_DIR" in os.environ: + for file in os.listdir(self._resultsDir): + file = os.path.join(self._resultsDir, file) + if os.path.isfile(file): + shutil.copy2(file, os.environ["MOZ_UPLOAD_DIR"]) + + def cleanup_dmd(self): + """ + Handles moving DMD reports from the temp dir to our resultsDir. + """ + from dmd import fixStackTraces + + # Move DMD files from temp dir to resultsDir. + tmpdir = tempfile.gettempdir() + tmp_files = os.listdir(tmpdir) + for f in fnmatch.filter(tmp_files, "dmd-*.json.gz"): + f = os.path.join(tmpdir, f) + # We don't fix stacks on Windows, even though we could, due to the + # tale of woe in bug 1626272. + if not sys.platform.startswith("win"): + self.logger.info("Fixing stacks for %s, this may take a while" % f) + isZipped = True + fixStackTraces(f, isZipped, gzip.open) + shutil.move(f, self._resultsDir) + + # Also attempt to cleanup the unified memory reports. + for f in fnmatch.filter(tmp_files, "unified-memory-report-*.json.gz"): + try: + os.remove(f) + except OSError: + self.logger.info("Unable to remove %s" % f) + + def reset_state(self): + self._pages_loaded = 0 + + # Close all tabs except one + for x in self.marionette.window_handles[1:]: + self.logger.info("closing window: %s" % x) + self.marionette.switch_to_window(x) + self.marionette.close() + + self._tabs = self.marionette.window_handles + self.marionette.switch_to_window(self._tabs[0]) + + def do_memory_report(self, checkpointName, iteration, minimize=False): + """Creates a memory report for all processes and and returns the + checkpoint. + + This will block until all reports are retrieved or a timeout occurs. + Returns the checkpoint or None on error. + + :param checkpointName: The name of the checkpoint. + + :param minimize: If true, minimize memory before getting the report. + """ + self.logger.info("starting checkpoint %s..." % checkpointName) + + checkpoint_file = "memory-report-%s-%d.json.gz" % (checkpointName, iteration) + checkpoint_path = os.path.join(self._resultsDir, checkpoint_file) + # On Windows, replace / with the Windows directory + # separator \ and escape it to prevent it from being + # interpreted as an escape character. + if sys.platform.startswith("win"): + checkpoint_path = checkpoint_path.replace("\\", "\\\\").replace("/", "\\\\") + + checkpoint_script = r""" + let [resolve] = arguments; + let dumper = + Cc["@mozilla.org/memory-info-dumper;1"].getService( + Ci.nsIMemoryInfoDumper); + dumper.dumpMemoryReportsToNamedFile( + "%s", + () => resolve("memory report done!"), + null, + /* anonymize */ false, + /* minimize memory usage */ %s); + """ % ( + checkpoint_path, + "true" if minimize else "false", + ) + + checkpoint = None + try: + finished = self.marionette.execute_async_script( + checkpoint_script, script_timeout=60000 + ) + if finished: + checkpoint = checkpoint_path + except JavascriptException as e: + self.logger.error("Checkpoint JavaScript error: %s" % e) + except ScriptTimeoutException: + self.logger.error("Memory report timed out") + except Exception: + self.logger.error("Unexpected error: %s" % sys.exc_info()[0]) + else: + self.logger.info("checkpoint created, stored in %s" % checkpoint_path) + + # Now trigger a DMD report if requested. + if self._dmd: + self.do_dmd(checkpointName, iteration) + + return checkpoint + + def do_dmd(self, checkpointName, iteration): + """ + Triggers DMD reports that are used to help identify sources of + 'heap-unclassified'. + + NB: This will dump DMD reports to the temp dir. Unfortunately it also + dumps memory reports, but that's all we have to work with right now. + """ + self.logger.info("Starting %s DMD reports..." % checkpointName) + + ident = "%s-%d" % (checkpointName, iteration) + + # TODO(ER): This actually takes a minimize argument. We could use that + # rather than have a separate `do_gc` function. Also it generates a + # memory report so we could combine this with `do_checkpoint`. The main + # issue would be moving everything out of the temp dir. + # + # Generated files have the form: + # dmd-<checkpoint>-<iteration>-pid.json.gz, ie: + # dmd-TabsOpenForceGC-0-10885.json.gz + # + # and for the memory report: + # unified-memory-report-<checkpoint>-<iteration>.json.gz + dmd_script = ( + r""" + let dumper = + Cc["@mozilla.org/memory-info-dumper;1"].getService( + Ci.nsIMemoryInfoDumper); + dumper.dumpMemoryInfoToTempDir( + "%s", + /* anonymize = */ false, + /* minimize = */ false); + """ + % ident + ) + + try: + # This is async and there's no callback so we use the existence + # of an incomplete memory report to check if it hasn't finished yet. + self.marionette.execute_script(dmd_script, script_timeout=60000) + tmpdir = tempfile.gettempdir() + prefix = "incomplete-unified-memory-report-%s-%d-*" % ( + checkpointName, + iteration, + ) + max_wait = 240 + elapsed = 0 + while fnmatch.filter(os.listdir(tmpdir), prefix) and elapsed < max_wait: + self.logger.info("Waiting for memory report to finish") + time.sleep(1) + elapsed += 1 + + incomplete = fnmatch.filter(os.listdir(tmpdir), prefix) + if incomplete: + # The memory reports never finished. + self.logger.error("Incomplete memory reports leftover.") + for f in incomplete: + os.remove(os.path.join(tmpdir, f)) + + except JavascriptException as e: + self.logger.error("DMD JavaScript error: %s" % e) + except ScriptTimeoutException: + self.logger.error("DMD timed out") + except Exception: + self.logger.error("Unexpected error: %s" % sys.exc_info()[0]) + else: + self.logger.info("DMD started, prefixed with %s" % ident) + + def open_and_focus(self): + """Opens the next URL in the list and focuses on the tab it is opened in. + + A new tab will be opened if |_maxTabs| has not been exceeded, otherwise + the URL will be loaded in the next tab. + """ + page_to_load = self.urls()[self._pages_loaded % len(self.urls())] + tabs_loaded = len(self._tabs) + open_tab_script = r""" + gBrowser.loadOneTab("about:blank", { + inBackground: false, + triggeringPrincipal: Services.scriptSecurityManager.getSystemPrincipal(), + }); + """ + + if tabs_loaded < self._maxTabs and tabs_loaded <= self._pages_loaded: + full_tab_list = self.marionette.window_handles + + self.marionette.execute_script(open_tab_script, script_timeout=60000) + + Wait(self.marionette).until( + lambda mn: len(mn.window_handles) == tabs_loaded + 1, + message="No new tab has been opened", + ) + + # NB: The tab list isn't sorted, so we do a set diff to determine + # which is the new tab + new_tab_list = self.marionette.window_handles + new_tabs = list(set(new_tab_list) - set(full_tab_list)) + + self._tabs.append(new_tabs[0]) + tabs_loaded += 1 + + tab_idx = self._pages_loaded % self._maxTabs + + tab = self._tabs[tab_idx] + + # Tell marionette which tab we're on + # NB: As a work-around for an e10s marionette bug, only select the tab + # if we're really switching tabs. + if tabs_loaded > 1: + self.logger.info("switching to tab") + self.marionette.switch_to_window(tab) + self.logger.info("switched to tab") + + with self.marionette.using_context("content"): + self.logger.info("loading %s" % page_to_load) + self.marionette.navigate(page_to_load) + self.logger.info("loaded!") + + # The tab handle can change after actually loading content + # First build a set up w/o the current tab + old_tabs = set(self._tabs) + old_tabs.remove(tab) + # Perform a set diff to get the (possibly) new handle + new_tabs = set(self.marionette.window_handles) - old_tabs + # Update the tab list at the current index to preserve the tab + # ordering + if new_tabs: + self._tabs[tab_idx] = list(new_tabs)[0] + + # give the page time to settle + time.sleep(self._perTabPause) + + self._pages_loaded += 1 + + def signal_user_active(self): + """Signal to the browser that the user is active. + + Normally when being driven by marionette the browser thinks the + user is inactive the whole time because user activity is + detected by looking at key and mouse events. + + This would be a problem for this test because user inactivity is + used to schedule some GCs (in particular shrinking GCs), so it + would make this unrepresentative of real use. + + Instead we manually cause some inconsequential activity (a press + and release of the shift key) to make the browser think the user + is active. Then when we sleep to allow things to settle the + browser will see the user as becoming inactive and trigger + appropriate GCs, as would have happened in real use. + """ + try: + action = self.marionette.actions.sequence("key", "keyboard_id") + action.key_down(Keys.SHIFT) + action.key_up(Keys.SHIFT) + action.perform() + finally: + self.marionette.actions.release() + + def open_pages(self): + """ + Opens all pages with our given configuration. + """ + for _ in range(self.pages_to_load()): + self.open_and_focus() + self.signal_user_active() diff --git a/testing/awsy/awsy/parse_about_memory.py b/testing/awsy/awsy/parse_about_memory.py new file mode 100644 index 0000000000..7e2f4c9c3a --- /dev/null +++ b/testing/awsy/awsy/parse_about_memory.py @@ -0,0 +1,173 @@ +#!/usr/bin/env python + +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + + +# Firefox about:memory log parser. + +from __future__ import absolute_import, print_function + +import argparse +from collections import defaultdict +import gzip +import json + +# This value comes from nsIMemoryReporter.idl. +KIND_HEAP = 1 + + +def path_total(data, path): + """ + Calculates the sum for the given data point path and its children. If + path does not end with a '/' then only the value for the exact path is + returned. + """ + path_totals = defaultdict(int) + + # Bookkeeping for calculating the heap-unclassified measurement. + explicit_heap = defaultdict(int) + heap_allocated = defaultdict(int) + + discrete = not path.endswith("/") + + def match(value): + """ + Helper that performs either an explicit match or a prefix match + depending on the format of the path passed in. + """ + if discrete: + return value == path + else: + return value.startswith(path) + + def update_bookkeeping(report): + """ + Adds the value to the heap total if this an explicit entry that is a + heap measurement and updates the heap allocated value if necessary. + """ + if report["kind"] == KIND_HEAP and report["path"].startswith("explicit/"): + explicit_heap[report["process"]] += report["amount"] + elif report["path"] == "heap-allocated": + heap_allocated[report["process"]] = report["amount"] + + def heap_unclassified(process): + """ + Calculates the heap-unclassified value for the given process. This is + simply the difference between all values reported as heap allocated + under the explicit/ tree and the value reported for heap-allocated by + the allocator. + """ + # Memory reports should always include heap-allocated. If it's missing + # just assert. + assert process in heap_allocated + + unclassified = heap_allocated[process] - explicit_heap[process] + + # Make sure the value is sane. A misbehaving reporter could lead to + # negative values. + assert unclassified >= 0, "heap-unclassified was negative: %d" % unclassified + + return unclassified + + needs_bookkeeping = path in ("explicit/", "explicit/heap-unclassified") + + # Process all the reports. + for report in data["reports"]: + if needs_bookkeeping: + update_bookkeeping(report) + + if match(report["path"]): + path_totals[report["process"]] += report["amount"] + + # Handle special processing for explicit and heap-unclassified. + if path == "explicit/": + # If 'explicit/' is requested we need to add the 'explicit/heap-unclassified' + # node that is generated by about:memory. + for k, v in explicit_heap.items(): + path_totals[k] += heap_unclassified(k) + elif path == "explicit/heap-unclassified": + # If 'explicit/heap-unclassified' is requested we need to calculate the + # value as it's generated by about:memory, not explicitly reported. + for k, v in explicit_heap.items(): + path_totals[k] = heap_unclassified(k) + + return path_totals + + +def calculate_memory_report_values( + memory_report_path, data_point_path, process_names=None +): + """ + Opens the given memory report file and calculates the value for the given + data point. + + :param memory_report_path: Path to the memory report file to parse. + :param data_point_path: Path of the data point to calculate in the memory + report, ie: 'explicit/heap-unclassified'. + :param process_name: Name of processes to limit reports to. ie 'Main' + """ + try: + with open(memory_report_path) as f: + data = json.load(f) + except ValueError: + # Check if the file is gzipped. + with gzip.open(memory_report_path, "rb") as f: + data = json.load(f) + + totals = path_total(data, data_point_path) + + # If a process name is provided, restricted output to processes matching + # that name. + if process_names is not None: + for k in list(totals.keys()): + if not any([process_name in k for process_name in process_names]): + del totals[k] + + return totals + + +if __name__ == "__main__": + parser = argparse.ArgumentParser( + description="Extract data points from about:memory reports" + ) + parser.add_argument("report", action="store", help="Path to a memory report file.") + parser.add_argument( + "prefix", + action="store", + help="Prefix of data point to measure. " + "If the prefix does not end in a '/' " + "then an exact match is made.", + ) + parser.add_argument( + "--proc-filter", + action="store", + nargs="*", + default=None, + help="Process name filter. " "If not provided all processes will be included.", + ) + parser.add_argument( + "--mebi", + action="store_true", + help="Output values as mebibytes (instead of bytes)" " to match about:memory.", + ) + + args = parser.parse_args() + totals = calculate_memory_report_values(args.report, args.prefix, args.proc_filter) + + sorted_totals = sorted(totals.items(), key=lambda item: (-item[1], item[0])) + for (k, v) in sorted_totals: + if v: + print("{0}\t".format(k)), + print("") + + bytes_per_mebibyte = 1024.0 * 1024.0 + for (k, v) in sorted_totals: + if v: + if args.mebi: + print("{0:.2f} MiB".format(v / bytes_per_mebibyte)), + else: + print("{0} bytes".format(v)), + print("\t"), + print("") diff --git a/testing/awsy/awsy/process_perf_data.py b/testing/awsy/awsy/process_perf_data.py new file mode 100644 index 0000000000..614a7af997 --- /dev/null +++ b/testing/awsy/awsy/process_perf_data.py @@ -0,0 +1,225 @@ +#!/usr/bin/env python +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +from __future__ import absolute_import, division, print_function + +import six +import os +import sys +import json +import math +import glob + +AWSY_PATH = os.path.dirname(os.path.realpath(__file__)) +if AWSY_PATH not in sys.path: + sys.path.append(AWSY_PATH) + +import parse_about_memory + +# A description of each checkpoint and the root path to it. +CHECKPOINTS = [ + {"name": "Fresh start", "path": "memory-report-Start-0.json.gz"}, + {"name": "Fresh start [+30s]", "path": "memory-report-StartSettled-0.json.gz"}, + {"name": "After tabs open", "path": "memory-report-TabsOpen-4.json.gz"}, + { + "name": "After tabs open [+30s]", + "path": "memory-report-TabsOpenSettled-4.json.gz", + }, + { + "name": "After tabs open [+30s, forced GC]", + "path": "memory-report-TabsOpenForceGC-4.json.gz", + }, + { + "name": "Tabs closed extra processes", + "path": "memory-report-TabsClosedExtraProcesses-4.json.gz", + }, + {"name": "Tabs closed", "path": "memory-report-TabsClosed-4.json.gz"}, + {"name": "Tabs closed [+30s]", "path": "memory-report-TabsClosedSettled-4.json.gz"}, + { + "name": "Tabs closed [+30s, forced GC]", + "path": "memory-report-TabsClosedForceGC-4.json.gz", + }, +] + +# A description of each perfherder suite and the path to its values. +PERF_SUITES = [ + {"name": "Resident Memory", "node": "resident"}, + {"name": "Explicit Memory", "node": "explicit/"}, + {"name": "Heap Unclassified", "node": "explicit/heap-unclassified"}, + {"name": "JS", "node": "js-main-runtime/"}, + {"name": "Images", "node": "explicit/images/"}, +] + + +def median(values): + sorted_ = sorted(values) + # pylint --py3k W1619 + med = int(len(sorted_) / 2) + + if len(sorted_) % 2: + return sorted_[med] + # pylint --py3k W1619 + return (sorted_[med - 1] + sorted_[med]) / 2 + + +def update_checkpoint_paths(checkpoint_files, checkpoints): + """ + Updates checkpoints with memory report file fetched in data_path + :param checkpoint_files: list of files in data_path + :param checkpoints: The checkpoints to update the path of. + """ + target_path = [ + ["Start-", 0], + ["StartSettled-", 0], + ["TabsOpen-", -1], + ["TabsOpenSettled-", -1], + ["TabsOpenForceGC-", -1], + ["TabsClosedExtraProcesses-", -1], + ["TabsClosed-", -1], + ["TabsClosedSettled-", -1], + ["TabsClosedForceGC-", -1], + ] + for i in range(len(target_path)): + (name, idx) = target_path[i] + paths = sorted([x for x in checkpoint_files if name in x]) + if paths: + indices = [i for i, x in enumerate(checkpoints) if name in x["path"]] + if indices: + checkpoints[indices[0]]["path"] = paths[idx] + else: + print("found files but couldn't find {}".format(name)) + + +def create_suite( + name, node, data_path, checkpoints=CHECKPOINTS, alertThreshold=None, extra_opts=None +): + """ + Creates a suite suitable for adding to a perfherder blob. Calculates the + geometric mean of the checkpoint values and adds that to the suite as + well. + + :param name: The name of the suite. + :param node: The path of the data node to extract data from. + :param data_path: The directory to retrieve data from. + :param checkpoints: Which checkpoints to include. + :param alertThreshold: The percentage of change that triggers an alert. + """ + suite = {"name": name, "subtests": [], "lowerIsBetter": True, "unit": "bytes"} + + if alertThreshold: + suite["alertThreshold"] = alertThreshold + + opts = [] + if extra_opts: + opts.extend(extra_opts) + + # The stylo attributes override each other. + stylo_opt = None + if "STYLO_FORCE_ENABLED" in os.environ and os.environ["STYLO_FORCE_ENABLED"]: + stylo_opt = "stylo" + if "STYLO_THREADS" in os.environ and os.environ["STYLO_THREADS"] == "1": + stylo_opt = "stylo-sequential" + + if stylo_opt: + opts.append(stylo_opt) + + if "DMD" in os.environ and os.environ["DMD"]: + opts.append("dmd") + + if extra_opts: + suite["extraOptions"] = opts + + update_checkpoint_paths( + glob.glob(os.path.join(data_path, "memory-report*")), checkpoints + ) + + total = 0 + for checkpoint in checkpoints: + memory_report_path = os.path.join(data_path, checkpoint["path"]) + + name_filter = checkpoint.get("name_filter", None) + if checkpoint.get("median"): + process = median + else: + process = sum + + if node != "resident": + totals = parse_about_memory.calculate_memory_report_values( + memory_report_path, node, name_filter + ) + value = process(totals.values()) + else: + # For "resident" we really want RSS of the chrome ("Main") process + # and USS of the child processes. We'll still call it resident + # for simplicity (it's nice to be able to compare RSS of non-e10s + # with RSS + USS of e10s). + totals_rss = parse_about_memory.calculate_memory_report_values( + memory_report_path, node, ["Main"] + ) + totals_uss = parse_about_memory.calculate_memory_report_values( + memory_report_path, "resident-unique" + ) + value = list(totals_rss.values())[0] + sum( + [v for k, v in six.iteritems(totals_uss) if "Main" not in k] + ) + + subtest = { + "name": checkpoint["name"], + "value": value, + "lowerIsBetter": True, + "unit": "bytes", + } + suite["subtests"].append(subtest) + total += math.log(subtest["value"]) + + # Add the geometric mean. For more details on the calculation see: + # https://en.wikipedia.org/wiki/Geometric_mean#Relationship_with_arithmetic_mean_of_logarithms + # pylint --py3k W1619 + suite["value"] = math.exp(total / len(checkpoints)) + + return suite + + +def create_perf_data( + data_path, perf_suites=PERF_SUITES, checkpoints=CHECKPOINTS, extra_opts=None +): + """ + Builds up a performance data blob suitable for submitting to perfherder. + """ + if ("GCOV_PREFIX" in os.environ) or ("JS_CODE_COVERAGE_OUTPUT_DIR" in os.environ): + print( + "Code coverage is being collected, performance data will not be gathered." + ) + return {} + + perf_blob = {"framework": {"name": "awsy"}, "suites": []} + + for suite in perf_suites: + perf_blob["suites"].append( + create_suite( + suite["name"], + suite["node"], + data_path, + checkpoints, + suite.get("alertThreshold"), + extra_opts, + ) + ) + + return perf_blob + + +if __name__ == "__main__": + args = sys.argv[1:] + if not args: + print("Usage: process_perf_data.py data_path") + sys.exit(1) + + # Determine which revisions we need to process. + data_path = args[0] + perf_blob = create_perf_data(data_path) + print("PERFHERDER_DATA: {}".format(json.dumps(perf_blob))) + + sys.exit(0) diff --git a/testing/awsy/awsy/test_base_memory_usage.py b/testing/awsy/awsy/test_base_memory_usage.py new file mode 100644 index 0000000000..8ef9934a68 --- /dev/null +++ b/testing/awsy/awsy/test_base_memory_usage.py @@ -0,0 +1,129 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +from __future__ import absolute_import + +import os +import sys + +AWSY_PATH = os.path.dirname(os.path.dirname(os.path.realpath(__file__))) +if AWSY_PATH not in sys.path: + sys.path.append(AWSY_PATH) + +from awsy.awsy_test_case import AwsyTestCase + +# A description of each checkpoint and the root path to it. +CHECKPOINTS = [ + { + "name": "After tabs open [+30s, forced GC]", + "path": "memory-report-TabsOpenForceGC-4.json.gz", + "name_filter": ["web ", "Web Content"], # We only want the content process + "median": True, # We want the median from all content processes + }, +] + +# A description of each perfherder suite and the path to its values. +PERF_SUITES = [ + {"name": "Base Content Resident Unique Memory", "node": "resident-unique"}, + {"name": "Base Content Heap Unclassified", "node": "explicit/heap-unclassified"}, + {"name": "Base Content JS", "node": "js-main-runtime/", "alertThreshold": 0.25}, + {"name": "Base Content Explicit", "node": "explicit/"}, +] + + +class TestMemoryUsage(AwsyTestCase): + """ + Provides a base case test that just loads about:memory and reports the + memory usage of a single content process. + """ + + def urls(self): + return self._urls + + def perf_suites(self): + return PERF_SUITES + + def perf_checkpoints(self): + return CHECKPOINTS + + def setUp(self): + AwsyTestCase.setUp(self) + self.logger.info("setting up!") + + # Override AwsyTestCase value, this is always going to be 1 iteration. + self._iterations = 1 + + # Override "entities" from our configuration. + # + # We aim to load a number of about:blank pages exactly matching the + # number of content processes we can have. After this we should no + # longer have a preallocated content process (although to be sure we + # explicitly drop it at the end of the test). + process_count = self.marionette.get_pref("dom.ipc.processCount") + self._pages_to_load = process_count + self._urls = ["about:blank"] * process_count + + self.logger.info( + "areweslimyet run by %d pages, " + "%d iterations, %d perTabPause, %d settleWaitTime, " + "%d content processes" + % ( + self._pages_to_load, + self._iterations, + self._perTabPause, + self._settleWaitTime, + process_count, + ) + ) + self.logger.info("done setting up!") + + def tearDown(self): + self.logger.info("tearing down!") + AwsyTestCase.tearDown(self) + self.logger.info("done tearing down!") + + def set_preallocated_process_enabled_state(self, enabled): + """Sets the pref that controls whether we have a preallocated content + process to the given value. + + This will cause the preallocated process to be destroyed or created + as appropriate. + """ + if enabled: + self.logger.info("re-enabling preallocated process") + else: + self.logger.info("disabling preallocated process") + self.marionette.set_pref("dom.ipc.processPrelaunch.enabled", enabled) + + def test_open_tabs(self): + """Marionette test entry that returns an array of checkpoint arrays. + + This will generate a set of checkpoints for each iteration requested. + Upon successful completion the results will be stored in + |self.testvars["results"]| and accessible to the test runner via the + |testvars| object it passed in. + """ + # setup the results array + results = [[] for _ in range(self.iterations())] + + def create_checkpoint(name, iteration, minimize=False): + checkpoint = self.do_memory_report(name, iteration, minimize) + self.assertIsNotNone(checkpoint, "Checkpoint was recorded") + results[iteration].append(checkpoint) + + # As long as we force the number of iterations to 1 in setUp() above, + # we don't need to loop over this work. + assert self._iterations == 1 + self.open_pages() + self.set_preallocated_process_enabled_state(False) + self.settle() + self.settle() + create_checkpoint("TabsOpenForceGC", 0, minimize=True) + self.set_preallocated_process_enabled_state(True) + # (If we wanted to do something after the preallocated process has been + # recreated, we should call self.settle() again to wait for it.) + + # TODO(ER): Temporary hack until bug 1121139 lands + self.logger.info("setting results") + self.testvars["results"] = results diff --git a/testing/awsy/awsy/test_memory_usage.py b/testing/awsy/awsy/test_memory_usage.py new file mode 100644 index 0000000000..3665187f82 --- /dev/null +++ b/testing/awsy/awsy/test_memory_usage.py @@ -0,0 +1,243 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +from __future__ import absolute_import + +import os +import sys +import yaml + +import mozinfo + +from marionette_driver.errors import JavascriptException, ScriptTimeoutException +from mozproxy import get_playback + +AWSY_PATH = os.path.dirname(os.path.dirname(os.path.realpath(__file__))) +if AWSY_PATH not in sys.path: + sys.path.append(AWSY_PATH) + +from awsy import process_perf_data, webservers +from awsy.awsy_test_case import AwsyTestCase + + +class TestMemoryUsage(AwsyTestCase): + """Provides a test that collects memory usage at various checkpoints: + - "Start" - Just after startup + - "StartSettled" - After an additional wait time + - "TabsOpen" - After opening all provided URLs + - "TabsOpenSettled" - After an additional wait time + - "TabsOpenForceGC" - After forcibly invoking garbage collection + - "TabsClosed" - After closing all tabs + - "TabsClosedSettled" - After an additional wait time + - "TabsClosedForceGC" - After forcibly invoking garbage collection + """ + + def urls(self): + return self._urls + + def perf_suites(self): + return process_perf_data.PERF_SUITES + + def perf_checkpoints(self): + return process_perf_data.CHECKPOINTS + + def perf_extra_opts(self): + return self._extra_opts + + def setupTp5(self): + urls = None + default_tp5n_manifest = os.path.join( + self._webroot_dir, "page_load_test", "tp5n", "tp5n.manifest" + ) + tp5n_manifest = self.testvars.get("pageManifest", default_tp5n_manifest) + with open(tp5n_manifest) as fp: + urls = fp.readlines() + # pylint --py3k: W1636 + urls = list(map(lambda x: x.replace("localhost", "localhost:{}"), urls)) + + # We haven't set self._urls yet, so this value might be zero if + # 'entities' wasn't specified. + to_load = self.pages_to_load() + if not to_load: + to_load = len(urls) + self._webservers = webservers.WebServers( + "localhost", 8001, self._webroot_dir, to_load + ) + self._webservers.start() + for url, server in zip(urls, self._webservers.servers): + self._urls.append(url.strip().format(server.port)) + + def setupTp6(self): + # tp5n stores its manifest in the zip file that gets extracted, tp6 + # doesn't so we just keep one in our project dir for now. + default_tp6_pages_manifest = os.path.join(AWSY_PATH, "conf", "tp6-pages.yml") + tp6_pages_manifest = self.testvars.get( + "pageManifest", default_tp6_pages_manifest + ) + urls = [] + recordings = set() + with open(tp6_pages_manifest) as f: + d = yaml.safe_load(f) + for r in d: + recordings.add(r["rec"]) + url = r["url"] + if isinstance(url, list): + urls.extend(url) + else: + urls.append(url) + + self._urls = urls + + # Indicate that we're using tp6 in the perf data. + self._extra_opts = ["tp6"] + + if self.marionette.get_pref("fission.autostart"): + self._extra_opts.append("fission") + + # Now we setup the mitm proxy with our tp6 pageset. + tp6_pageset_manifest = os.path.join(AWSY_PATH, "tp6-pageset.manifest") + config = { + "playback_tool": "mitmproxy", + "playback_version": "4.0.4", + "playback_files": [tp6_pageset_manifest], + "platform": mozinfo.os, + "obj_path": self._webroot_dir, + "binary": self._binary, + "run_local": self._run_local, + "app": "firefox", + "host": "127.0.0.1", + "ignore_mitmdump_exit_failure": True, + } + + self._playback = get_playback(config) + self._playback.start() + + # We need to reload after the mitmproxy cert is installed + self.marionette.restart(clean=False) + + # Setup WebDriver capabilities that we need + self.marionette.delete_session() + caps = { + "unhandledPromptBehavior": "dismiss", # Ignore page navigation warnings + } + self.marionette.start_session(caps) + self.marionette.set_context("chrome") + + def setUp(self): + AwsyTestCase.setUp(self) + self.logger.info("setting up") + self._webroot_dir = self.testvars["webRootDir"] + self._urls = [] + self._extra_opts = None + + if self.testvars.get("tp6", False): + self.setupTp6() + else: + self.setupTp5() + + self.logger.info( + "areweslimyet run by %d pages, %d iterations," + " %d perTabPause, %d settleWaitTime" + % ( + self._pages_to_load, + self._iterations, + self._perTabPause, + self._settleWaitTime, + ) + ) + self.logger.info("done setting up!") + + def tearDown(self): + self.logger.info("tearing down!") + + self.logger.info("tearing down webservers!") + + if self.testvars.get("tp6", False): + self._playback.stop() + else: + self._webservers.stop() + + AwsyTestCase.tearDown(self) + + self.logger.info("done tearing down!") + + def clear_preloaded_browser(self): + """ + Clears out the preloaded browser. + """ + self.logger.info("closing preloaded browser") + script = """ + if (window.NewTabPagePreloading) { + return NewTabPagePreloading.removePreloadedBrowser(window); + } + return "NewTabPagePreloading.removePreloadedBrowser not available"; + """ + try: + result = self.marionette.execute_script(script, script_timeout=180000) + except JavascriptException as e: + self.logger.error("removePreloadedBrowser() JavaScript error: %s" % e) + except ScriptTimeoutException: + self.logger.error("removePreloadedBrowser() timed out") + except Exception: + self.logger.error( + "removePreloadedBrowser() Unexpected error: %s" % sys.exc_info()[0] + ) + else: + if result: + self.logger.info(result) + + def test_open_tabs(self): + """Marionette test entry that returns an array of checkpoint arrays. + + This will generate a set of checkpoints for each iteration requested. + Upon successful completion the results will be stored in + |self.testvars["results"]| and accessible to the test runner via the + |testvars| object it passed in. + """ + # setup the results array + results = [[] for _ in range(self.iterations())] + + def create_checkpoint(name, iteration, minimize=False): + checkpoint = self.do_memory_report(name, iteration, minimize) + self.assertIsNotNone(checkpoint, "Checkpoint was recorded") + results[iteration].append(checkpoint) + + # The first iteration gets Start and StartSettled entries before + # opening tabs + create_checkpoint("Start", 0) + self.settle() + create_checkpoint("StartSettled", 0) + + for itr in range(self.iterations()): + self.open_pages() + + create_checkpoint("TabsOpen", itr) + self.settle() + create_checkpoint("TabsOpenSettled", itr) + create_checkpoint("TabsOpenForceGC", itr, minimize=True) + + # Close all tabs + self.reset_state() + + with self.marionette.using_context("content"): + self.logger.info("navigating to about:blank") + self.marionette.navigate("about:blank") + self.logger.info("navigated to about:blank") + self.signal_user_active() + + # Create checkpoint that may contain retained processes that will + # be reused. + create_checkpoint("TabsClosedExtraProcesses", itr) + + # Clear out the retained processes and measure again. + self.clear_preloaded_browser() + + create_checkpoint("TabsClosed", itr) + self.settle() + create_checkpoint("TabsClosedSettled", itr) + create_checkpoint("TabsClosedForceGC", itr, minimize=True) + + # TODO(ER): Temporary hack until bug 1121139 lands + self.logger.info("setting results") + self.testvars["results"] = results diff --git a/testing/awsy/awsy/webservers.py b/testing/awsy/awsy/webservers.py new file mode 100644 index 0000000000..c6c54b17ca --- /dev/null +++ b/testing/awsy/awsy/webservers.py @@ -0,0 +1,99 @@ +#!/usr/bin/env python + +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + + +# mozhttpd web server. + +from __future__ import absolute_import, print_function + +import argparse +import os +import socket + +import mozhttpd + + +# directory of this file +here = os.path.dirname(os.path.realpath(__file__)) + + +class WebServers(object): + def __init__(self, host, port, docroot, count): + self.host = host + self.port = port + self.docroot = docroot + self.count = count + self.servers = [] + + def start(self): + self.stop() + self.servers = [] + port = self.port + num_errors = 0 + while len(self.servers) < self.count: + self.servers.append( + mozhttpd.MozHttpd(host=self.host, port=port, docroot=self.docroot) + ) + try: + self.servers[-1].start() + except socket.error as error: + if isinstance(error, socket.error): + if error.errno == 98: + print("port {} is in use.".format(port)) + else: + print("port {} error {}".format(port, error)) + elif isinstance(error, str): + print("port {} error {}".format(port, error)) + self.servers.pop() + num_errors += 1 + except Exception as error: + print("port {} error {}".format(port, error)) + self.servers.pop() + num_errors += 1 + + if num_errors > 15: + raise Exception("Too many errors in webservers.py") + port += 1 + + def stop(self): + while len(self.servers) > 0: + server = self.servers.pop() + server.stop() + + +def main(): + parser = argparse.ArgumentParser( + description="Start mozhttpd servers for use by areweslimyet." + ) + + parser.add_argument( + "--port", + type=int, + default=8001, + help="Starting port. Defaults to 8001. Web servers will be " + "created for each port from the starting port to starting port " + "+ count - 1.", + ) + parser.add_argument( + "--count", + type=int, + default=100, + help="Number of web servers to start. Defaults to 100.", + ) + parser.add_argument( + "--host", + type=str, + default="localhost", + help="Name of webserver host. Defaults to localhost.", + ) + + args = parser.parse_args() + web_servers = WebServers(args.host, args.port, "%s/html" % here, args.count) + web_servers.start() + + +if __name__ == "__main__": + main() diff --git a/testing/awsy/conf/base-prefs.json b/testing/awsy/conf/base-prefs.json new file mode 100644 index 0000000000..f3874c720a --- /dev/null +++ b/testing/awsy/conf/base-prefs.json @@ -0,0 +1,14 @@ +{ + "browser.tabs.remote.separatePrivilegedContentProcess": true, + "javascript.options.asyncstack": false, + "image.mem.surfacecache.min_expiration_ms": 10000, + "network.proxy.socks": "localhost", + "network.proxy.socks_port": 90000, + "network.proxy.socks_remote_dns": true, + "network.proxy.type": 1, + "plugin.disable": true, + "startup.homepage_override_url": "", + "startup.homepage_welcome_url": "", + "browser.startup.homepage": "about:blank", + "browser.newtabpage.enabled": false +} diff --git a/testing/awsy/conf/base-testvars.json b/testing/awsy/conf/base-testvars.json new file mode 100644 index 0000000000..de6c6f9be8 --- /dev/null +++ b/testing/awsy/conf/base-testvars.json @@ -0,0 +1,5 @@ +{ + "iterations": 1, + "perTabPause": 10, + "settleWaitTime": 60 +} diff --git a/testing/awsy/conf/prefs.json b/testing/awsy/conf/prefs.json new file mode 100644 index 0000000000..d2d3f0895e --- /dev/null +++ b/testing/awsy/conf/prefs.json @@ -0,0 +1,13 @@ +{ + "browser.newtabpage.enabled": true, + "browser.tabs.remote.separatePrivilegedContentProcess": true, + "javascript.options.asyncstack": false, + "image.mem.surfacecache.min_expiration_ms": 10000, + "network.proxy.socks": "localhost", + "network.proxy.socks_port": 90000, + "network.proxy.socks_remote_dns": true, + "network.proxy.type": 1, + "plugin.disable": true, + "startup.homepage_override_url": "", + "startup.homepage_welcome_url": "" +} diff --git a/testing/awsy/conf/testvars.json b/testing/awsy/conf/testvars.json new file mode 100644 index 0000000000..454f583340 --- /dev/null +++ b/testing/awsy/conf/testvars.json @@ -0,0 +1,6 @@ +{ + "entities": 100, + "iterations": 3, + "perTabPause": 10, + "settleWaitTime": 30 +} diff --git a/testing/awsy/conf/tp6-pages.yml b/testing/awsy/conf/tp6-pages.yml new file mode 100644 index 0000000000..0565b4d549 --- /dev/null +++ b/testing/awsy/conf/tp6-pages.yml @@ -0,0 +1,50 @@ +- rec: fandom.mp + url: https://www.fandom.com/articles/fallout-76-will-live-and-die-on-the-creativity-of-its-playerbase +- rec: google-docs.mp + url: https://docs.google.com/document/d/1US-07msg12slQtI_xchzYxcKlTs6Fp7WqIc6W5GK5M8/edit?usp=sharing +- rec: google-slides.mp + url: https://docs.google.com/presentation/d/1Ici0ceWwpFvmIb3EmKeWSq_vAQdmmdFcWqaiLqUkJng/edit?usp=sharing +- rec: google-sheets.mp + url: https://docs.google.com/spreadsheets/d/1jT9qfZFAeqNoOK97gruc34Zb7y_Q-O_drZ8kSXT-4D4/edit?usp=sharing +- rec: wikipedia.mp + url: https://en.wikipedia.org/wiki/Barack_Obama +- rec: imgur.mp + url: https://imgur.com/gallery/m5tYJL6 +- rec: google-mail.mp + url: https://mail.google.com/ +- rec: yahoo-mail.mp + url: https://mail.yahoo.com/ +- rec: pinterest.mp + url: https://pinterest.com/ +- rec: twitter.mp + url: https://twitter.com/BarackObama +- rec: amazon.mp + url: https://www.amazon.com/s?k=laptop&ref=nb_sb_noss_1 +- rec: apple.mp + url: https://www.apple.com/macbook-pro/ +- rec: bing.mp + url: https://www.bing.com/search?q=barack+obama +- rec: ebay.mp + url: https://www.ebay.com/ +- rec: facebook.mp + url: https://www.facebook.com +- rec: google-search.mp + url: https://www.google.com/search?hl=en&q=barack+obama&cad=h +- rec: imdb.mp + url: https://www.imdb.com/title/tt0084967/?ref_=nv_sr_2 +- rec: instagram.mp + url: https://www.instagram.com/ +- rec: microsoft.mp + url: https://www.microsoft.com/en-us/ +- rec: paypal.mp + url: https://www.paypal.com/myaccount/summary/ +- rec: reddit.mp + url: https://www.reddit.com/r/technology/comments/9sqwyh/we_posed_as_100_senators_to_run_ads_on_facebook/ +- rec: tumblr.mp + url: https://www.tumblr.com/dashboard +- rec: yahoo-news.mp + url: https://www.yahoo.com/lifestyle/police-respond-noise-complaint-end-playing-video-games-respectful-tenants-002329963.html +- rec: youtube.mp + url: https://www.youtube.com +- rec: yandex.mp + url: https://yandex.ru/search/?text=barack%20obama&lr=10115 diff --git a/testing/awsy/conf/tp6-prefs.json b/testing/awsy/conf/tp6-prefs.json new file mode 100644 index 0000000000..e7b84cc856 --- /dev/null +++ b/testing/awsy/conf/tp6-prefs.json @@ -0,0 +1,15 @@ +{ + "browser.newtabpage.enabled": true, + "browser.tabs.remote.separatePrivilegedContentProcess": true, + "javascript.options.asyncstack": false, + "image.mem.surfacecache.min_expiration_ms": 10000, + "network.proxy.http": "localhost", + "network.proxy.http_port": 8080, + "network.proxy.ssl": "localhost", + "network.proxy.ssl_port": 8080, + "network.proxy.no_proxies_on": "localhost", + "network.proxy.type": 1, + "plugin.disable": true, + "startup.homepage_override_url": "", + "startup.homepage_welcome_url": "" +} diff --git a/testing/awsy/conf/tp6-testvars.json b/testing/awsy/conf/tp6-testvars.json new file mode 100644 index 0000000000..218f5aefea --- /dev/null +++ b/testing/awsy/conf/tp6-testvars.json @@ -0,0 +1,5 @@ +{ + "iterations": 1, + "perTabPause": 15, + "settleWaitTime": 30 +} diff --git a/testing/awsy/mach_commands.py b/testing/awsy/mach_commands.py new file mode 100644 index 0000000000..1ca974bb0a --- /dev/null +++ b/testing/awsy/mach_commands.py @@ -0,0 +1,354 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +from __future__ import absolute_import, unicode_literals + +import argparse +import logging +import os +import sys + +import six + +from mozbuild.base import ( + MachCommandBase, + MachCommandConditions as conditions, + BinaryNotFoundException, +) + +from mach.decorators import ( + CommandArgument, + CommandArgumentGroup, + CommandProvider, + Command, +) + +import mozinfo + + +def setup_awsy_argument_parser(): + from marionette_harness.runtests import MarionetteArguments + from mozlog.structured import commandline + + parser = MarionetteArguments() + commandline.add_logging_group(parser) + + return parser + + +@CommandProvider +class MachCommands(MachCommandBase): + AWSY_PATH = os.path.dirname(os.path.realpath(__file__)) + if AWSY_PATH not in sys.path: + sys.path.append(AWSY_PATH) + from awsy import ITERATIONS, PER_TAB_PAUSE, SETTLE_WAIT_TIME, MAX_TABS + + def run_awsy(self, tests, binary=None, **kwargs): + import json + from mozlog.structured import commandline + + from marionette_harness.runtests import MarionetteTestRunner, MarionetteHarness + + parser = setup_awsy_argument_parser() + + awsy_source_dir = os.path.join(self.topsrcdir, "testing", "awsy") + if not tests: + tests = [os.path.join(awsy_source_dir, "awsy", "test_memory_usage.py")] + + args = argparse.Namespace(tests=tests) + + args.binary = binary + + if kwargs["quick"]: + kwargs["entities"] = 3 + kwargs["iterations"] = 1 + kwargs["perTabPause"] = 1 + kwargs["settleWaitTime"] = 1 + + if "single_stylo_traversal" in kwargs and kwargs["single_stylo_traversal"]: + os.environ["STYLO_THREADS"] = "1" + else: + os.environ["STYLO_THREADS"] = "4" + + runtime_testvars = {} + for arg in ( + "webRootDir", + "pageManifest", + "resultsDir", + "entities", + "iterations", + "perTabPause", + "settleWaitTime", + "maxTabs", + "dmd", + "tp6", + ): + if arg in kwargs and kwargs[arg] is not None: + runtime_testvars[arg] = kwargs[arg] + + if "webRootDir" not in runtime_testvars: + awsy_tests_dir = os.path.join(self.topobjdir, "_tests", "awsy") + web_root_dir = os.path.join(awsy_tests_dir, "html") + runtime_testvars["webRootDir"] = web_root_dir + else: + web_root_dir = runtime_testvars["webRootDir"] + awsy_tests_dir = os.path.dirname(web_root_dir) + + if "resultsDir" not in runtime_testvars: + runtime_testvars["resultsDir"] = os.path.join(awsy_tests_dir, "results") + + runtime_testvars["bin"] = binary + runtime_testvars["run_local"] = True + + page_load_test_dir = os.path.join(web_root_dir, "page_load_test") + if not os.path.isdir(page_load_test_dir): + os.makedirs(page_load_test_dir) + + if not os.path.isdir(runtime_testvars["resultsDir"]): + os.makedirs(runtime_testvars["resultsDir"]) + + runtime_testvars_path = os.path.join(awsy_tests_dir, "runtime-testvars.json") + if kwargs["testvars"]: + kwargs["testvars"].append(runtime_testvars_path) + else: + kwargs["testvars"] = [runtime_testvars_path] + + runtime_testvars_file = open(runtime_testvars_path, "wb" if six.PY2 else "w") + runtime_testvars_file.write(json.dumps(runtime_testvars, indent=2)) + runtime_testvars_file.close() + + manifest_file = os.path.join(awsy_source_dir, "tp5n-pageset.manifest") + tooltool_args = { + "args": [ + sys.executable, + os.path.join(self.topsrcdir, "mach"), + "artifact", + "toolchain", + "-v", + "--tooltool-manifest=%s" % manifest_file, + "--cache-dir=%s" % os.path.join(self.topsrcdir, "tooltool-cache"), + ] + } + self.run_process(cwd=page_load_test_dir, **tooltool_args) + tp5nzip = os.path.join(page_load_test_dir, "tp5n.zip") + tp5nmanifest = os.path.join(page_load_test_dir, "tp5n", "tp5n.manifest") + if not os.path.exists(tp5nmanifest): + unzip_args = { + "args": ["unzip", "-q", "-o", tp5nzip, "-d", page_load_test_dir] + } + try: + self.run_process(**unzip_args) + except Exception as exc: + troubleshoot = "" + if mozinfo.os == "win": + troubleshoot = ( + " Try using --web-root to specify a " + "directory closer to the drive root." + ) + + self.log( + logging.ERROR, + "awsy", + {"directory": page_load_test_dir, "exception": exc}, + "Failed to unzip `tp5n.zip` into " + "`{directory}` with `{exception}`." + troubleshoot, + ) + raise exc + + # If '--preferences' was not specified supply our default set. + if not kwargs["prefs_files"]: + kwargs["prefs_files"] = [ + os.path.join(awsy_source_dir, "conf", "prefs.json") + ] + + # Setup DMD env vars if necessary. + if kwargs["dmd"]: + bin_dir = os.path.dirname(binary) + + if "DMD" not in os.environ: + os.environ["DMD"] = "1" + + # Work around a startup crash with DMD on windows + if mozinfo.os == "win": + kwargs["pref"] = "security.sandbox.content.level:0" + self.log( + logging.WARNING, + "awsy", + {}, + "Forcing 'security.sandbox.content.level' = 0 because DMD is enabled.", + ) + elif mozinfo.os == "mac": + # On mac binary is in MacOS and dmd.py is in Resources, ie: + # Name.app/Contents/MacOS/libdmd.dylib + # Name.app/Contents/Resources/dmd.py + bin_dir = os.path.join(bin_dir, "../Resources/") + + # Also add the bin dir to the python path so we can use dmd.py + if bin_dir not in sys.path: + sys.path.append(bin_dir) + + for k, v in six.iteritems(kwargs): + setattr(args, k, v) + + parser.verify_usage(args) + + args.logger = commandline.setup_logging( + "Are We Slim Yet Tests", args, {"mach": sys.stdout} + ) + failed = MarionetteHarness(MarionetteTestRunner, args=vars(args)).run() + if failed > 0: + return 1 + else: + return 0 + + @Command( + "awsy-test", + category="testing", + description="Run Are We Slim Yet (AWSY) memory usage testing using marionette.", + parser=setup_awsy_argument_parser, + ) + @CommandArgumentGroup("AWSY") + @CommandArgument( + "--web-root", + group="AWSY", + action="store", + type=str, + dest="webRootDir", + help="Path to web server root directory. If not specified, " + "defaults to topobjdir/_tests/awsy/html.", + ) + @CommandArgument( + "--page-manifest", + group="AWSY", + action="store", + type=str, + dest="pageManifest", + help="Path to page manifest text file containing a list " + "of urls to test. The urls must be served from localhost. If not " + "specified, defaults to page_load_test/tp5n/tp5n.manifest under " + "the web root.", + ) + @CommandArgument( + "--results", + group="AWSY", + action="store", + type=str, + dest="resultsDir", + help="Path to results directory. If not specified, defaults " + "to the parent directory of the web root.", + ) + @CommandArgument( + "--quick", + group="AWSY", + action="store_true", + dest="quick", + default=False, + help="Set --entities=3, --iterations=1, --per-tab-pause=1, " + "--settle-wait-time=1 for a quick test. Overrides any explicit " + "argument settings.", + ) + @CommandArgument( + "--entities", + group="AWSY", + action="store", + type=int, + dest="entities", + help="Number of urls to load. Defaults to the total number of " "urls.", + ) + @CommandArgument( + "--max-tabs", + group="AWSY", + action="store", + type=int, + dest="maxTabs", + help="Maximum number of tabs to open. " "Defaults to %s." % MAX_TABS, + ) + @CommandArgument( + "--iterations", + group="AWSY", + action="store", + type=int, + dest="iterations", + help="Number of times to run through the test suite. " + "Defaults to %s." % ITERATIONS, + ) + @CommandArgument( + "--per-tab-pause", + group="AWSY", + action="store", + type=int, + dest="perTabPause", + help="Seconds to wait in between opening tabs. " + "Defaults to %s." % PER_TAB_PAUSE, + ) + @CommandArgument( + "--settle-wait-time", + group="AWSY", + action="store", + type=int, + dest="settleWaitTime", + help="Seconds to wait for things to settled down. " + "Defaults to %s." % SETTLE_WAIT_TIME, + ) + @CommandArgument( + "--dmd", + group="AWSY", + action="store_true", + dest="dmd", + default=False, + help="Enable DMD during testing. Requires a DMD-enabled build.", + ) + @CommandArgument( + "--tp6", + group="AWSY", + action="store_true", + dest="tp6", + default=False, + help="Use the tp6 pageset during testing.", + ) + def run_awsy_test(self, tests, **kwargs): + """mach awsy-test runs the in-tree version of the Are We Slim Yet + (AWSY) tests. + + awsy-test is implemented as a marionette test and marionette + test arguments also apply although they are not necessary + since reasonable defaults will be chosen. + + The AWSY specific arguments can be found in the Command + Arguments for AWSY section below. + + awsy-test will automatically download the tp5n.zip talos + pageset from tooltool and install it under + topobjdir/_tests/awsy/html. You can specify your own page set + by specifying --web-root and --page-manifest. + + The results of the test will be placed in the results + directory specified by the --results argument. + + On Windows, you may experience problems due to path length + errors when extracting the tp5n.zip file containing the + test pages or when attempting to write checkpoints to the + results directory. In that case, you should specify both + the --web-root and --results arguments pointing to a location + with a short path. For example: + + --web-root=c:\\\\tmp\\\\html --results=c:\\\\tmp\\\\results + + Note that the double backslashes are required. + """ + kwargs["logger_name"] = "Awsy Tests" + if "test_objects" in kwargs: + tests = [] + for obj in kwargs["test_objects"]: + tests.append(obj["file_relpath"]) + del kwargs["test_objects"] + + if not kwargs.get("binary") and conditions.is_firefox(self): + try: + kwargs["binary"] = self.get_binary_path("app") + except BinaryNotFoundException as e: + self.log(logging.ERROR, "awsy", {"error": str(e)}, "ERROR: {error}") + self.log(logging.INFO, "awsy", {"help": e.help()}, "{help}") + return 1 + return self.run_awsy(tests, **kwargs) diff --git a/testing/awsy/moz.build b/testing/awsy/moz.build new file mode 100644 index 0000000000..b3360f0f4b --- /dev/null +++ b/testing/awsy/moz.build @@ -0,0 +1,9 @@ +# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*- +# vim: set filetype=python: +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +with Files("awsy/**"): + BUG_COMPONENT = ("Testing", "AWSY") + SCHEDULES.exclusive = ["awsy"] diff --git a/testing/awsy/requirements.txt b/testing/awsy/requirements.txt new file mode 100644 index 0000000000..4ab234e85f --- /dev/null +++ b/testing/awsy/requirements.txt @@ -0,0 +1,2 @@ +marionette-harness >= 4.0.0 +PyYaml >= 5.1 diff --git a/testing/awsy/setup.py b/testing/awsy/setup.py new file mode 100644 index 0000000000..61dd98ff78 --- /dev/null +++ b/testing/awsy/setup.py @@ -0,0 +1,31 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this file, +# You can obtain one at http://mozilla.org/MPL/2.0/. + +from __future__ import absolute_import + +from setuptools import setup, find_packages + +PACKAGE_NAME = "awsy" +PACKAGE_VERSION = "0.0.1" + +setup( + name=PACKAGE_NAME, + version=PACKAGE_VERSION, + description="AreWeSlimYet", + long_description="A memory testing framework for Firefox.", + author="Mozilla Automation and Testing Team", + author_email="tools@lists.mozilla.org", + license="MPL 1.1/GPL 2.0/LGPL 2.1", + packages=find_packages(), + zip_safe=False, + install_requires=["marionette_harness", "PyYaml"], + classifiers=[ + "Development Status :: 4 - Beta", + "Environment :: Console", + "Intended Audience :: Developers", + "License :: OSI Approved :: Mozilla Public License 1.1 (MPL 1.1)", + "Operating System :: OS Independent", + "Topic :: Software Development :: Libraries :: Python Modules", + ], +) diff --git a/testing/awsy/tp5n-pageset.manifest b/testing/awsy/tp5n-pageset.manifest new file mode 100644 index 0000000000..c785df7a45 --- /dev/null +++ b/testing/awsy/tp5n-pageset.manifest @@ -0,0 +1,10 @@ +[ + { + "filename": "tp5n.zip", + "size": 81753769, + "digest": "7e74bc532d220fa2484f84bd7c2659da7d2ae3aa0bc225ba63e3db70dc0c0697503427209098afa85e235397c4ec58cd488cab7b3435e8079583d3994fff8326", + "algorithm": "sha512", + "unpack": false + } +] + diff --git a/testing/awsy/tp6-pageset.manifest b/testing/awsy/tp6-pageset.manifest new file mode 100644 index 0000000000..533431bfec --- /dev/null +++ b/testing/awsy/tp6-pageset.manifest @@ -0,0 +1,212 @@ +[ + { + "size": 1505360, + "visibility": "public", + "digest": "89a93e65ae36b3c8b53bfadba2f318736bfe9073839241731df20610607245a530bfc6118d27b0244bea0a31d4e1ad03d95f8125a170c89d70e2dd8e506eb1a4", + "algorithm": "sha512", + "filename": "mitm4-linux-firefox-amazon.zip" + }, + { + "size": 14244293, + "visibility": "public", + "digest": "04c8a440e56868d01878e2c2b1e7b096ac4f39f9c113a3bc3e9dcab52359abff408afb4d5ca4ba984735dacf5af815c4a4d71f939026a91817654fb55011d38f", + "algorithm": "sha512", + "filename": "mitm4-linux-firefox-apple.zip" + }, + { + "size": 181484, + "visibility": "public", + "digest": "94cf72eef8ca4c90f3ea8517db848510d22fc98599a5aa63435f519544f25d8de94c826f0c5bc2b6ea8dd2d31916278e5dcb8cf32a63033ad2ce0c3a09505a6a", + "algorithm": "sha512", + "filename": "mitm4-linux-firefox-bing-search.zip" + }, + { + "size": 2455205, + "visibility": "public", + "digest": "44f5a9baf7fc27484d9450f6625e9afbbd9f12f2c80c7ebfad8821f2fed69c7744c131f4e18257c77d24e82cf004abd2e761f774b000efb57fb5d15565a01dce", + "algorithm": "sha512", + "filename": "mitm4-linux-firefox-ebay.zip" + }, + { + "size": 5646483, + "visibility": "public", + "digest": "3f4c11d8003278b5b13ad3f2ed1adaf2a33e0b645844202d8371ee669e00d36f0ae09814ffda3632011bd47e8202cc8ef23757e5d72a6d05a37a49a1ad59413e", + "algorithm": "sha512", + "filename": "mitm4-linux-firefox-facebook.zip" + }, + { + "size": 6175607, + "visibility": "public", + "digest": "588dc72d7e853e7b3bf51858b1e48445b70564a24e245bd361075ac83e64d65593fdbc87831fd4ea02805cb0442af754178c6011475fb0a6d4a61001fe76523a", + "algorithm": "sha512", + "filename": "mitm4-linux-firefox-fandom.zip" + }, + { + "size": 56817901, + "visibility": "public", + "digest": "5bad8693dbef7cc12ec1a735f80596158948cdc18e522100562e32ed7a286e95909cf8275fac2d46ebf4e9cc84b3c28e59e78973d4282613c5aad60a2a3892ad", + "algorithm": "sha512", + "filename": "mitm4-linux-firefox-google-docs.zip" + }, + { + "size": 8188029, + "visibility": "public", + "digest": "e4bf46d3676e0a9b101ef942f97186258289e448c85c9ba296c7faec3e00581a4da9bf35f75b4124cecb69e4933f19f37412688812dfd68cc625b18ccd8a05ae", + "algorithm": "sha512", + "filename": "mitm4-linux-firefox-google-mail.zip" + }, + { + "size": 3860610, + "visibility": "public", + "digest": "45ee7d7f4af86c32edfe1fc57f6a5fd6478209409dd244bd8349dbad5e538061b6f94093c89520e0bffb77ac5d628c7ae60cb50428d3130192db9f8ac895c2a7", + "algorithm": "sha512", + "filename": "mitm4-linux-firefox-google-search.zip" + }, + { + "size": 12726066, + "visibility": "public", + "digest": "904dfb7b604875e196c46ce5677f2ca419ddf2804f3575b8fd6ca49bd47cc066e298d123023a0b254c59f473507bdafef46cbb21b2f0592ec3780c40bb5d7dca", + "algorithm": "sha512", + "filename": "mitm4-linux-firefox-google-sheets.zip" + }, + { + "size": 18308375, + "visibility": "public", + "digest": "af4b0c9985dec29b9d54e765771546161fb770a2573be7a287f2fb29dddc1a9e85765810cffd79d585952d71d6e5d45a6ca13ffdad2b4271c79ac569ea2f7704", + "algorithm": "sha512", + "filename": "mitm4-linux-firefox-google-slides.zip" + }, + { + "size": 1916181, + "visibility": "public", + "digest": "f44c735e5b69f6f17c55dd98b5af3a762ecb4e27acd79673f05704617da9f0e8bd6d4f657223ebcdb0b5cdcbe62dd40646cbd1d173aaaa1b6332bc8397aae2bb", + "algorithm": "sha512", + "filename": "mitm4-linux-firefox-imdb.zip" + }, + { + "size": 7551825, + "visibility": "public", + "digest": "a04d8141c28abde302cc8c2329e71e9d38ca6f1458f0e38161b94c17400b80900329ef26230552339ffbc93df497b4fe3feda90226fb08215031f880fc76ad51", + "algorithm": "sha512", + "filename": "mitm4-linux-firefox-imgur.zip" + }, + { + "size": 4210445, + "visibility": "public", + "digest": "62f980b5f1a1d6f896667ae5480d9637bd7dd2c059abc8a449c7212cded3a5078450c804c50c65fa6328eeadd4edd04536b4a9be0bc634f8d538c088c1410684", + "algorithm": "sha512", + "filename": "mitm4-linux-firefox-instagram.zip" + }, + { + "size": 5041497, + "visibility": "public", + "digest": "bd3831953cdefc26c28e298830c4ccc14fcbfb1e42b190ddfb4dd75c96a5fc946c9560c4411bf232b1e4046347258c1bfe43db88b461e8a1b9de5a8cbfe50c9c", + "algorithm": "sha512", + "filename": "mitm4-linux-firefox-linkedin.zip" + }, + { + "size": 2886896, + "visibility": "public", + "digest": "af40b89006f6e457f0c6afa7e20fe176181de8ddf0c11c76dc4b5b1c8bbfb74a2bd0214a76c5d5f6a73b74637641bb01ff77e6abdb5fba6833b2d6af225a83c2", + "algorithm": "sha512", + "filename": "mitm4-linux-firefox-microsoft.zip" + }, + { + "size": 63313140, + "visibility": "public", + "digest": "24236b7cb2b6f5656fa87173b887801dd3ee1a6dd43d9cabf055735813aaf3975ac1daf743f3dd281c4d0f93fa32900377b17f3711e38b32d90f7271b3f118c1", + "algorithm": "sha512", + "filename": "mitm4-linux-firefox-netflix.zip" + }, + { + "size": 3094726, + "visibility": "public", + "digest": "3582326ba319f583f86480176bfc039d9b252374acc3b19f215dedb284340ee161e0fb890e4b90d9f29da56b5448044edc5247661a9ebadccfecb03340ee4249", + "algorithm": "sha512", + "filename": "mitm4-linux-firefox-office.zip" + }, + { + "size": 10091794, + "visibility": "public", + "digest": "4ab0e3defec98b227f1685f97246848d7662897cb6a1e83ae657f82b1ca17de7c69ac2e764def36f3dcf4f4c372b5a0410f1bb9d55e55ec046b6f96c183db42a", + "algorithm": "sha512", + "filename": "firefox-outlook.zip" + }, + { + "size": 5110282, + "visibility": "public", + "digest": "37dfef70b9fd8f9038084fc35878f82f67b8a96ca35b1d9d899813a87df513a591a92aa0e20017d40ea2de91b4b409a191eaa872680335bf67bae492705996ea", + "algorithm": "sha512", + "filename": "mitm4-linux-firefox-paypal.zip" + }, + { + "size": 22153229, + "visibility": "public", + "digest": "92dcecc7a3862b3e6ef1ec5e6c025726d81e4b7ef2107043c64bf342a1ab6dd9d4f4db301f6b563d0ac64a530faa954c4f1f877fb593a2cf800734f71c58d07d", + "algorithm": "sha512", + "filename": "mitm4-linux-firefox-pinterest.zip" + }, + { + "size": 3622625, + "visibility": "public", + "digest": "7c90d685d5a0e3e4a50e16747255ebed66718e527aaf63f13c7d1b5ad6d2d158ebc90bf8736d66617d9a59ede6ff9bd6cca3ee6f016045d20203750c4980ed79", + "algorithm": "sha512", + "filename": "mitm4-linux-firefox-reddit.zip" + }, + { + "size": 20028158, + "visibility": "public", + "digest": "2829c998c429ee03113473bb21f09f6d7b2fdaa199a009fac20cc8ebd073b2379ff7b15b286fac26ce7536967be31bb54451a582f9163bb29869cb8e43f8c9c3", + "algorithm": "sha512", + "filename": "mitm4-linux-firefox-tumblr.zip" + }, + { + "size": 16839627, + "visibility": "public", + "digest": "aeac3c3689c0ac7e6993025f51c89750e90a6084680d0c15d63e21b96739484c960ecd3bf34d30949d60aa8edf88862394786fe9499f2d68b46055373723f388", + "algorithm": "sha512", + "filename": "mitm4-linux-firefox-twitch.zip" + }, + { + "size": 6590856, + "visibility": "public", + "digest": "51c3068ed8061baec1c826634d66b2db6daf35f0fb3984b9bbbdfa7c6681f59532ac1ba797de6bb123a2f0caba5caf2a7c5b2690685de82d43269450da5b7477", + "algorithm": "sha512", + "filename": "mitm4-linux-firefox-twitter.zip" + }, + { + "size": 1336433, + "visibility": "public", + "digest": "c574a872a93278b99bc8012a127b91acdcad300172b22699074a0d68180d7f134b7f4963d7f8a54d95cd034aad634a8f5de2bbeb12d8963d93089364cded77eb", + "algorithm": "sha512", + "filename": "firefox-wikipedia.zip" + }, + { + "size": 4179595, + "visibility": "public", + "digest": "def5d6c77d083a841a5ca203f453590c862b88205b68b3e623e2f0a5223e3afd15e275b6cf950ec135be20dc9615caddb032a5538819f56395e6058d7b03ba71", + "algorithm": "sha512", + "filename": "mitm4-linux-firefox-yahoo-mail.zip" + }, + { + "size": 4769089, + "visibility": "public", + "digest": "2b13c1211704d5b1f9ff5a450c6dd07b5d312a4497d69a1b408759c8ba9f8dda821f0b7d975d00d5ebbde0f9517c4a5f002489aa2127300e29db1610948321e4", + "algorithm": "sha512", + "filename": "mitmproxy-tp6-yahoo-news.zip" + }, + { + "size": 1099328, + "visibility": "public", + "digest": "628d7026ff9d45592342300df326be7126fbfa300a0664cac8d789ac58b4f90c13b73502029d932120413d9957fef4fa2d5ff2971c2c2acd6ab3c2c4c5e67bcf", + "algorithm": "sha512", + "filename": "mitm4-linux-firefox-yandex.zip" + }, + { + "size": 4828567, + "visibility": "public", + "digest": "eaca3d7f82de7e48332342db5ac18620d76f0568fa553c3ae12b631b35763e29eaee487f2c17179b987ffe192e01da6fae89f7f88f0e6adfb31a2b081a66e3c6", + "algorithm": "sha512", + "filename": "mitm4-linux-firefox-youtube.zip" + } +] |