summaryrefslogtreecommitdiffstats
path: root/python/mozperftest/mozperftest/metrics/consoleoutput.py
blob: a4d544f3efd24480b978570fe1c98b4d0233f51f (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
import os

from mozperftest.layers import Layer
from mozperftest.metrics.common import COMMON_ARGS, filtered_metrics

RESULTS_TEMPLATE = """\

==========================================================
                    Results ({})
==========================================================

{}

"""


class ConsoleOutput(Layer):
    """Output metrics in the console."""

    name = "console"
    # By default activate the console layer when running locally.
    activated = "MOZ_AUTOMATION" not in os.environ
    arguments = COMMON_ARGS

    def run(self, metadata):
        # Get filtered metrics
        results = filtered_metrics(
            metadata,
            self.get_arg("output"),
            self.get_arg("prefix"),
            metrics=self.get_arg("metrics"),
            transformer=self.get_arg("transformer"),
            split_by=self.get_arg("split-by"),
            simplify_names=self.get_arg("simplify-names"),
            simplify_exclude=self.get_arg("simplify-exclude"),
        )

        if not results:
            self.warning("No results left after filtering")
            return metadata

        for name, res in results.items():
            # Make a nicer view of the data
            subtests = [
                "{}: {}".format(r["subtest"], [v["value"] for v in r["data"]])
                for r in res
            ]

            # Output the data to console
            self.info(
                "\n==========================================================\n"
                "=                          Results                       =\n"
                "=========================================================="
                "\n" + "\n".join(subtests) + "\n"
            )
        return metadata